Skip to content

Commit 8cd4556

Browse files
authored
Merge pull request #1 from anmerinoto/JENNY_ALAVA
Jenny alava
2 parents 9b791ef + 2054114 commit 8cd4556

File tree

1 file changed

+69
-15
lines changed

1 file changed

+69
-15
lines changed

lessons/02_web_scraping.ipynb

Lines changed: 69 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -47,19 +47,62 @@
4747
},
4848
{
4949
"cell_type": "code",
50-
"execution_count": null,
50+
"execution_count": 1,
5151
"metadata": {},
52-
"outputs": [],
53-
"source": [
54-
"%pip install requests"
52+
"outputs": [
53+
{
54+
"name": "stdout",
55+
"output_type": "stream",
56+
"text": [
57+
"Requirement already satisfied: requests in c:\\users\\jjala\\appdata\\local\\programs\\python\\python313\\lib\\site-packages (2.32.5)\n",
58+
"Requirement already satisfied: charset_normalizer<4,>=2 in c:\\users\\jjala\\appdata\\local\\programs\\python\\python313\\lib\\site-packages (from requests) (3.4.3)\n",
59+
"Requirement already satisfied: idna<4,>=2.5 in c:\\users\\jjala\\appdata\\local\\programs\\python\\python313\\lib\\site-packages (from requests) (3.10)\n",
60+
"Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\jjala\\appdata\\local\\programs\\python\\python313\\lib\\site-packages (from requests) (2.5.0)\n",
61+
"Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\jjala\\appdata\\local\\programs\\python\\python313\\lib\\site-packages (from requests) (2025.8.3)\n",
62+
"Note: you may need to restart the kernel to use updated packages.\n"
63+
]
64+
}
65+
],
66+
"source": [
67+
"# 🌐 La librería requests es necesaria para hacer solicitudes HTTP y descargar páginas web.\n",
68+
"# 🕸️ Esto es fundamental para hacer web scraping (extraer información de páginas web).\n",
69+
"%pip install requests "
5570
]
5671
},
5772
{
5873
"cell_type": "code",
5974
"execution_count": null,
6075
"metadata": {},
61-
"outputs": [],
62-
"source": [
76+
"outputs": [
77+
{
78+
"name": "stdout",
79+
"output_type": "stream",
80+
"text": [
81+
"Collecting beautifulsoup4\n",
82+
" Downloading beautifulsoup4-4.13.4-py3-none-any.whl.metadata (3.8 kB)\n",
83+
"Collecting soupsieve>1.2 (from beautifulsoup4)\n",
84+
" Downloading soupsieve-2.7-py3-none-any.whl.metadata (4.6 kB)\n",
85+
"Collecting typing-extensions>=4.0.0 (from beautifulsoup4)\n",
86+
" Downloading typing_extensions-4.14.1-py3-none-any.whl.metadata (3.0 kB)\n",
87+
"Downloading beautifulsoup4-4.13.4-py3-none-any.whl (187 kB)\n",
88+
"Downloading soupsieve-2.7-py3-none-any.whl (36 kB)\n",
89+
"Downloading typing_extensions-4.14.1-py3-none-any.whl (43 kB)\n",
90+
"Installing collected packages: typing-extensions, soupsieve, beautifulsoup4\n",
91+
"\n",
92+
" ------------- -------------------------- 1/3 [soupsieve]\n",
93+
" -------------------------- ------------- 2/3 [beautifulsoup4]\n",
94+
" -------------------------- ------------- 2/3 [beautifulsoup4]\n",
95+
" -------------------------- ------------- 2/3 [beautifulsoup4]\n",
96+
" ---------------------------------------- 3/3 [beautifulsoup4]\n",
97+
"\n",
98+
"Successfully installed beautifulsoup4-4.13.4 soupsieve-2.7 typing-extensions-4.14.1\n",
99+
"Note: you may need to restart the kernel to use updated packages.\n"
100+
]
101+
}
102+
],
103+
"source": [
104+
"# 🥣 La instrucción %pip install beautifulsoup4 sirve para instalar la librería Beautiful Soup 4 en tu entorno de Jupyter Notebook.\n",
105+
"# 🕸️ Beautiful Soup es esencial para analizar y extraer información de archivos HTML y XML, lo que facilita el web scraping.\n",
63106
"%pip install beautifulsoup4"
64107
]
65108
},
@@ -72,9 +115,25 @@
72115
},
73116
{
74117
"cell_type": "code",
75-
"execution_count": null,
118+
"execution_count": 3,
76119
"metadata": {},
77-
"outputs": [],
120+
"outputs": [
121+
{
122+
"name": "stdout",
123+
"output_type": "stream",
124+
"text": [
125+
"Collecting lxml\n",
126+
" Downloading lxml-6.0.1-cp313-cp313-win_amd64.whl.metadata (3.9 kB)\n",
127+
"Downloading lxml-6.0.1-cp313-cp313-win_amd64.whl (4.0 MB)\n",
128+
" ---------------------------------------- 0.0/4.0 MB ? eta -:--:--\n",
129+
" ----- ---------------------------------- 0.5/4.0 MB 5.7 MB/s eta 0:00:01\n",
130+
" ---------------------------------------- 4.0/4.0 MB 15.9 MB/s 0:00:00\n",
131+
"Installing collected packages: lxml\n",
132+
"Successfully installed lxml-6.0.1\n",
133+
"Note: you may need to restart the kernel to use updated packages.\n"
134+
]
135+
}
136+
],
78137
"source": [
79138
"%pip install lxml"
80139
]
@@ -988,7 +1047,7 @@
9881047
"metadata": {
9891048
"anaconda-cloud": {},
9901049
"kernelspec": {
991-
"display_name": "Python 3 (ipykernel)",
1050+
"display_name": "Python 3",
9921051
"language": "python",
9931052
"name": "python3"
9941053
},
@@ -1002,12 +1061,7 @@
10021061
"name": "python",
10031062
"nbconvert_exporter": "python",
10041063
"pygments_lexer": "ipython3",
1005-
"version": "3.8.13"
1006-
},
1007-
"vscode": {
1008-
"interpreter": {
1009-
"hash": "b6f9fe9f4b7182690503d8ecc2bae97b0ee3ebf54e877167ae4d28c119a56988"
1010-
}
1064+
"version": "3.13.6"
10111065
}
10121066
},
10131067
"nbformat": 4,

0 commit comments

Comments
 (0)