1_Data_Creation_Solutions (2).ipynb ADDED
@@ -0,0 +1,1087 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "metadata": {
6
+ "id": "4ba6aba8"
7
+ },
8
+ "source": [
9
+ "# 🤖 **Data Collection, Creation, Storage, and Processing**\n"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "markdown",
14
+ "metadata": {
15
+ "id": "jpASMyIQMaAq"
16
+ },
17
+ "source": [
18
+ "## **1.** 📦 Install required packages"
19
+ ]
20
+ },
21
+ {
22
+ "cell_type": "code",
23
+ "execution_count": 1,
24
+ "metadata": {
25
+ "colab": {
26
+ "base_uri": "https://localhost:8080/"
27
+ },
28
+ "id": "f48c8f8c",
29
+ "outputId": "13d0dd5e-82c6-489f-b1f0-e970186a4eb7"
30
+ },
31
+ "outputs": [
32
+ {
33
+ "output_type": "stream",
34
+ "name": "stdout",
35
+ "text": [
36
+ "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.12/dist-packages (4.13.5)\n",
37
+ "Requirement already satisfied: pandas in /usr/local/lib/python3.12/dist-packages (2.2.2)\n",
38
+ "Requirement already satisfied: matplotlib in /usr/local/lib/python3.12/dist-packages (3.10.0)\n",
39
+ "Requirement already satisfied: seaborn in /usr/local/lib/python3.12/dist-packages (0.13.2)\n",
40
+ "Requirement already satisfied: numpy in /usr/local/lib/python3.12/dist-packages (2.0.2)\n",
41
+ "Requirement already satisfied: textblob in /usr/local/lib/python3.12/dist-packages (0.19.0)\n",
42
+ "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.12/dist-packages (from beautifulsoup4) (2.8.3)\n",
43
+ "Requirement already satisfied: typing-extensions>=4.0.0 in /usr/local/lib/python3.12/dist-packages (from beautifulsoup4) (4.15.0)\n",
44
+ "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.12/dist-packages (from pandas) (2.9.0.post0)\n",
45
+ "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.12/dist-packages (from pandas) (2025.2)\n",
46
+ "Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.12/dist-packages (from pandas) (2025.3)\n",
47
+ "Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (1.3.3)\n",
48
+ "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (0.12.1)\n",
49
+ "Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (4.61.1)\n",
50
+ "Requirement already satisfied: kiwisolver>=1.3.1 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (1.4.9)\n",
51
+ "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (26.0)\n",
52
+ "Requirement already satisfied: pillow>=8 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (11.3.0)\n",
53
+ "Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.12/dist-packages (from matplotlib) (3.3.2)\n",
54
+ "Requirement already satisfied: nltk>=3.9 in /usr/local/lib/python3.12/dist-packages (from textblob) (3.9.1)\n",
55
+ "Requirement already satisfied: click in /usr/local/lib/python3.12/dist-packages (from nltk>=3.9->textblob) (8.3.1)\n",
56
+ "Requirement already satisfied: joblib in /usr/local/lib/python3.12/dist-packages (from nltk>=3.9->textblob) (1.5.3)\n",
57
+ "Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.12/dist-packages (from nltk>=3.9->textblob) (2025.11.3)\n",
58
+ "Requirement already satisfied: tqdm in /usr/local/lib/python3.12/dist-packages (from nltk>=3.9->textblob) (4.67.3)\n",
59
+ "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.12/dist-packages (from python-dateutil>=2.8.2->pandas) (1.17.0)\n"
60
+ ]
61
+ }
62
+ ],
63
+ "source": [
64
+ "!pip install beautifulsoup4 pandas matplotlib seaborn numpy textblob"
65
+ ]
66
+ },
67
+ {
68
+ "cell_type": "markdown",
69
+ "metadata": {
70
+ "id": "lquNYCbfL9IM"
71
+ },
72
+ "source": [
73
+ "## **2.** ⛏ Web-scrape all book titles, prices, and ratings from books.toscrape.com"
74
+ ]
75
+ },
76
+ {
77
+ "cell_type": "markdown",
78
+ "metadata": {
79
+ "id": "0IWuNpxxYDJF"
80
+ },
81
+ "source": [
82
+ "### *a. Initial setup*\n",
83
+ "Define the base url of the website you will scrape as well as how and what you will scrape"
84
+ ]
85
+ },
86
+ {
87
+ "cell_type": "code",
88
+ "execution_count": 2,
89
+ "metadata": {
90
+ "id": "91d52125"
91
+ },
92
+ "outputs": [],
93
+ "source": [
94
+ "import requests\n",
95
+ "from bs4 import BeautifulSoup\n",
96
+ "import pandas as pd\n",
97
+ "import time\n",
98
+ "\n",
99
+ "base_url = \"https://books.toscrape.com/catalogue/page-{}.html\"\n",
100
+ "headers = {\"User-Agent\": \"Mozilla/5.0\"}\n",
101
+ "\n",
102
+ "titles, prices, ratings = [], [], []"
103
+ ]
104
+ },
105
+ {
106
+ "cell_type": "markdown",
107
+ "metadata": {
108
+ "id": "oCdTsin2Yfp3"
109
+ },
110
+ "source": [
111
+ "### *b. Fill titles, prices, and ratings from the web pages*"
112
+ ]
113
+ },
114
+ {
115
+ "cell_type": "code",
116
+ "execution_count": 3,
117
+ "metadata": {
118
+ "id": "xqO5Y3dnYhxt"
119
+ },
120
+ "outputs": [],
121
+ "source": [
122
+ "# Loop through all 50 pages\n",
123
+ "for page in range(1, 51):\n",
124
+ " url = base_url.format(page)\n",
125
+ " response = requests.get(url, headers=headers)\n",
126
+ " soup = BeautifulSoup(response.content, \"html.parser\")\n",
127
+ " books = soup.find_all(\"article\", class_=\"product_pod\")\n",
128
+ "\n",
129
+ " for book in books:\n",
130
+ " titles.append(book.h3.a[\"title\"])\n",
131
+ " prices.append(float(book.find(\"p\", class_=\"price_color\").text[1:]))\n",
132
+ " ratings.append(book.p.get(\"class\")[1])\n",
133
+ "\n",
134
+ " time.sleep(0.5) # polite scraping delay"
135
+ ]
136
+ },
137
+ {
138
+ "cell_type": "markdown",
139
+ "metadata": {
140
+ "id": "T0TOeRC4Yrnn"
141
+ },
142
+ "source": [
143
+ "### *c. ✋🏻🛑⛔️ Create a dataframe df_books that contains the now complete \"title\", \"price\", and \"rating\" objects*"
144
+ ]
145
+ },
146
+ {
147
+ "cell_type": "code",
148
+ "execution_count": 4,
149
+ "metadata": {
150
+ "id": "l5FkkNhUYTHh"
151
+ },
152
+ "outputs": [],
153
+ "source": [
154
+ "# 🗂️ Create DataFrame\n",
155
+ "df_books = pd.DataFrame({\n",
156
+ " \"title\": titles,\n",
157
+ " \"price\": prices,\n",
158
+ " \"rating\": ratings\n",
159
+ "})"
160
+ ]
161
+ },
162
+ {
163
+ "cell_type": "markdown",
164
+ "metadata": {
165
+ "id": "duI5dv3CZYvF"
166
+ },
167
+ "source": [
168
+ "### *d. Save web-scraped dataframe either as a CSV or Excel file*"
169
+ ]
170
+ },
171
+ {
172
+ "cell_type": "code",
173
+ "execution_count": 5,
174
+ "metadata": {
175
+ "id": "lC1U_YHtZifh"
176
+ },
177
+ "outputs": [],
178
+ "source": [
179
+ "# 💾 Save to CSV\n",
180
+ "df_books.to_csv(\"books_data.csv\", index=False)\n",
181
+ "\n",
182
+ "# 💾 Or save to Excel\n",
183
+ "# df_books.to_excel(\"books_data.xlsx\", index=False)"
184
+ ]
185
+ },
186
+ {
187
+ "cell_type": "markdown",
188
+ "metadata": {
189
+ "id": "qMjRKMBQZlJi"
190
+ },
191
+ "source": [
192
+ "### *e. ✋🏻🛑⛔️ View first fiew lines*"
193
+ ]
194
+ },
195
+ {
196
+ "cell_type": "code",
197
+ "execution_count": 6,
198
+ "metadata": {
199
+ "colab": {
200
+ "base_uri": "https://localhost:8080/",
201
+ "height": 0
202
+ },
203
+ "id": "O_wIvTxYZqCK",
204
+ "outputId": "349b36b0-c008-4fd5-d4a4-dba38ae18337"
205
+ },
206
+ "outputs": [
207
+ {
208
+ "output_type": "execute_result",
209
+ "data": {
210
+ "text/plain": [
211
+ " title price rating\n",
212
+ "0 A Light in the Attic 51.77 Three\n",
213
+ "1 Tipping the Velvet 53.74 One\n",
214
+ "2 Soumission 50.10 One\n",
215
+ "3 Sharp Objects 47.82 Four\n",
216
+ "4 Sapiens: A Brief History of Humankind 54.23 Five"
217
+ ],
218
+ "text/html": [
219
+ "\n",
220
+ " <div id=\"df-04c87660-4415-45e9-ad3b-3fa19d9402c2\" class=\"colab-df-container\">\n",
221
+ " <div>\n",
222
+ "<style scoped>\n",
223
+ " .dataframe tbody tr th:only-of-type {\n",
224
+ " vertical-align: middle;\n",
225
+ " }\n",
226
+ "\n",
227
+ " .dataframe tbody tr th {\n",
228
+ " vertical-align: top;\n",
229
+ " }\n",
230
+ "\n",
231
+ " .dataframe thead th {\n",
232
+ " text-align: right;\n",
233
+ " }\n",
234
+ "</style>\n",
235
+ "<table border=\"1\" class=\"dataframe\">\n",
236
+ " <thead>\n",
237
+ " <tr style=\"text-align: right;\">\n",
238
+ " <th></th>\n",
239
+ " <th>title</th>\n",
240
+ " <th>price</th>\n",
241
+ " <th>rating</th>\n",
242
+ " </tr>\n",
243
+ " </thead>\n",
244
+ " <tbody>\n",
245
+ " <tr>\n",
246
+ " <th>0</th>\n",
247
+ " <td>A Light in the Attic</td>\n",
248
+ " <td>51.77</td>\n",
249
+ " <td>Three</td>\n",
250
+ " </tr>\n",
251
+ " <tr>\n",
252
+ " <th>1</th>\n",
253
+ " <td>Tipping the Velvet</td>\n",
254
+ " <td>53.74</td>\n",
255
+ " <td>One</td>\n",
256
+ " </tr>\n",
257
+ " <tr>\n",
258
+ " <th>2</th>\n",
259
+ " <td>Soumission</td>\n",
260
+ " <td>50.10</td>\n",
261
+ " <td>One</td>\n",
262
+ " </tr>\n",
263
+ " <tr>\n",
264
+ " <th>3</th>\n",
265
+ " <td>Sharp Objects</td>\n",
266
+ " <td>47.82</td>\n",
267
+ " <td>Four</td>\n",
268
+ " </tr>\n",
269
+ " <tr>\n",
270
+ " <th>4</th>\n",
271
+ " <td>Sapiens: A Brief History of Humankind</td>\n",
272
+ " <td>54.23</td>\n",
273
+ " <td>Five</td>\n",
274
+ " </tr>\n",
275
+ " </tbody>\n",
276
+ "</table>\n",
277
+ "</div>\n",
278
+ " <div class=\"colab-df-buttons\">\n",
279
+ "\n",
280
+ " <div class=\"colab-df-container\">\n",
281
+ " <button class=\"colab-df-convert\" onclick=\"convertToInteractive('df-04c87660-4415-45e9-ad3b-3fa19d9402c2')\"\n",
282
+ " title=\"Convert this dataframe to an interactive table.\"\n",
283
+ " style=\"display:none;\">\n",
284
+ "\n",
285
+ " <svg xmlns=\"http://www.w3.org/2000/svg\" height=\"24px\" viewBox=\"0 -960 960 960\">\n",
286
+ " <path d=\"M120-120v-720h720v720H120Zm60-500h600v-160H180v160Zm220 220h160v-160H400v160Zm0 220h160v-160H400v160ZM180-400h160v-160H180v160Zm440 0h160v-160H620v160ZM180-180h160v-160H180v160Zm440 0h160v-160H620v160Z\"/>\n",
287
+ " </svg>\n",
288
+ " </button>\n",
289
+ "\n",
290
+ " <style>\n",
291
+ " .colab-df-container {\n",
292
+ " display:flex;\n",
293
+ " gap: 12px;\n",
294
+ " }\n",
295
+ "\n",
296
+ " .colab-df-convert {\n",
297
+ " background-color: #E8F0FE;\n",
298
+ " border: none;\n",
299
+ " border-radius: 50%;\n",
300
+ " cursor: pointer;\n",
301
+ " display: none;\n",
302
+ " fill: #1967D2;\n",
303
+ " height: 32px;\n",
304
+ " padding: 0 0 0 0;\n",
305
+ " width: 32px;\n",
306
+ " }\n",
307
+ "\n",
308
+ " .colab-df-convert:hover {\n",
309
+ " background-color: #E2EBFA;\n",
310
+ " box-shadow: 0px 1px 2px rgba(60, 64, 67, 0.3), 0px 1px 3px 1px rgba(60, 64, 67, 0.15);\n",
311
+ " fill: #174EA6;\n",
312
+ " }\n",
313
+ "\n",
314
+ " .colab-df-buttons div {\n",
315
+ " margin-bottom: 4px;\n",
316
+ " }\n",
317
+ "\n",
318
+ " [theme=dark] .colab-df-convert {\n",
319
+ " background-color: #3B4455;\n",
320
+ " fill: #D2E3FC;\n",
321
+ " }\n",
322
+ "\n",
323
+ " [theme=dark] .colab-df-convert:hover {\n",
324
+ " background-color: #434B5C;\n",
325
+ " box-shadow: 0px 1px 3px 1px rgba(0, 0, 0, 0.15);\n",
326
+ " filter: drop-shadow(0px 1px 2px rgba(0, 0, 0, 0.3));\n",
327
+ " fill: #FFFFFF;\n",
328
+ " }\n",
329
+ " </style>\n",
330
+ "\n",
331
+ " <script>\n",
332
+ " const buttonEl =\n",
333
+ " document.querySelector('#df-04c87660-4415-45e9-ad3b-3fa19d9402c2 button.colab-df-convert');\n",
334
+ " buttonEl.style.display =\n",
335
+ " google.colab.kernel.accessAllowed ? 'block' : 'none';\n",
336
+ "\n",
337
+ " async function convertToInteractive(key) {\n",
338
+ " const element = document.querySelector('#df-04c87660-4415-45e9-ad3b-3fa19d9402c2');\n",
339
+ " const dataTable =\n",
340
+ " await google.colab.kernel.invokeFunction('convertToInteractive',\n",
341
+ " [key], {});\n",
342
+ " if (!dataTable) return;\n",
343
+ "\n",
344
+ " const docLinkHtml = 'Like what you see? Visit the ' +\n",
345
+ " '<a target=\"_blank\" href=https://colab.research.google.com/notebooks/data_table.ipynb>data table notebook</a>'\n",
346
+ " + ' to learn more about interactive tables.';\n",
347
+ " element.innerHTML = '';\n",
348
+ " dataTable['output_type'] = 'display_data';\n",
349
+ " await google.colab.output.renderOutput(dataTable, element);\n",
350
+ " const docLink = document.createElement('div');\n",
351
+ " docLink.innerHTML = docLinkHtml;\n",
352
+ " element.appendChild(docLink);\n",
353
+ " }\n",
354
+ " </script>\n",
355
+ " </div>\n",
356
+ "\n",
357
+ "\n",
358
+ " </div>\n",
359
+ " </div>\n"
360
+ ],
361
+ "application/vnd.google.colaboratory.intrinsic+json": {
362
+ "type": "dataframe",
363
+ "variable_name": "df_books",
364
+ "summary": "{\n \"name\": \"df_books\",\n \"rows\": 1000,\n \"fields\": [\n {\n \"column\": \"title\",\n \"properties\": {\n \"dtype\": \"string\",\n \"num_unique_values\": 999,\n \"samples\": [\n \"The Grownup\",\n \"Persepolis: The Story of a Childhood (Persepolis #1-2)\",\n \"Ayumi's Violin\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"price\",\n \"properties\": {\n \"dtype\": \"number\",\n \"std\": 14.446689669952772,\n \"min\": 10.0,\n \"max\": 59.99,\n \"num_unique_values\": 903,\n \"samples\": [\n 19.73,\n 55.65,\n 46.31\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n },\n {\n \"column\": \"rating\",\n \"properties\": {\n \"dtype\": \"category\",\n \"num_unique_values\": 5,\n \"samples\": [\n \"One\",\n \"Two\",\n \"Four\"\n ],\n \"semantic_type\": \"\",\n \"description\": \"\"\n }\n }\n ]\n}"
365
+ }
366
+ },
367
+ "metadata": {},
368
+ "execution_count": 6
369
+ }
370
+ ],
371
+ "source": [
372
+ "df_books.head()"
373
+ ]
374
+ },
375
+ {
376
+ "cell_type": "markdown",
377
+ "metadata": {
378
+ "id": "p-1Pr2szaqLk"
379
+ },
380
+ "source": [
381
+ "## **3.** 🧩 Create a meaningful connection between real & synthetic datasets"
382
+ ]
383
+ },
384
+ {
385
+ "cell_type": "markdown",
386
+ "metadata": {
387
+ "id": "SIaJUGIpaH4V"
388
+ },
389
+ "source": [
390
+ "### *a. Initial setup*"
391
+ ]
392
+ },
393
+ {
394
+ "cell_type": "code",
395
+ "execution_count": 7,
396
+ "metadata": {
397
+ "id": "-gPXGcRPuV_9"
398
+ },
399
+ "outputs": [],
400
+ "source": [
401
+ "import numpy as np\n",
402
+ "import random\n",
403
+ "from datetime import datetime\n",
404
+ "import warnings\n",
405
+ "\n",
406
+ "warnings.filterwarnings(\"ignore\")\n",
407
+ "random.seed(2025)\n",
408
+ "np.random.seed(2025)"
409
+ ]
410
+ },
411
+ {
412
+ "cell_type": "markdown",
413
+ "metadata": {
414
+ "id": "pY4yCoIuaQqp"
415
+ },
416
+ "source": [
417
+ "### *b. Generate popularity scores based on rating (with some randomness) with a generate_popularity_score function*"
418
+ ]
419
+ },
420
+ {
421
+ "cell_type": "code",
422
+ "execution_count": 8,
423
+ "metadata": {
424
+ "id": "mnd5hdAbaNjz"
425
+ },
426
+ "outputs": [],
427
+ "source": [
428
+ "def generate_popularity_score(rating):\n",
429
+ " base = {\"One\": 2, \"Two\": 3, \"Three\": 3, \"Four\": 4, \"Five\": 4}.get(rating, 3)\n",
430
+ " trend_factor = random.choices([-1, 0, 1], weights=[1, 3, 2])[0]\n",
431
+ " return int(np.clip(base + trend_factor, 1, 5))"
432
+ ]
433
+ },
434
+ {
435
+ "cell_type": "markdown",
436
+ "metadata": {
437
+ "id": "n4-TaNTFgPak"
438
+ },
439
+ "source": [
440
+ "### *c. ✋🏻🛑⛔️ Run the function to create a \"popularity_score\" column from \"rating\"*"
441
+ ]
442
+ },
443
+ {
444
+ "cell_type": "code",
445
+ "execution_count": 9,
446
+ "metadata": {
447
+ "id": "V-G3OCUCgR07"
448
+ },
449
+ "outputs": [],
450
+ "source": [
451
+ "df_books[\"popularity_score\"] = df_books[\"rating\"].apply(generate_popularity_score)"
452
+ ]
453
+ },
454
+ {
455
+ "cell_type": "markdown",
456
+ "metadata": {
457
+ "id": "HnngRNTgacYt"
458
+ },
459
+ "source": [
460
+ "### *d. Decide on the sentiment_label based on the popularity score with a get_sentiment function*"
461
+ ]
462
+ },
463
+ {
464
+ "cell_type": "code",
465
+ "execution_count": 10,
466
+ "metadata": {
467
+ "id": "kUtWmr8maZLZ"
468
+ },
469
+ "outputs": [],
470
+ "source": [
471
+ "def get_sentiment(popularity_score):\n",
472
+ " if popularity_score <= 2:\n",
473
+ " return \"negative\"\n",
474
+ " elif popularity_score == 3:\n",
475
+ " return \"neutral\"\n",
476
+ " else:\n",
477
+ " return \"positive\""
478
+ ]
479
+ },
480
+ {
481
+ "cell_type": "markdown",
482
+ "metadata": {
483
+ "id": "HF9F9HIzgT7Z"
484
+ },
485
+ "source": [
486
+ "### *e. ✋🏻🛑⛔️ Run the function to create a \"sentiment_label\" column from \"popularity_score\"*"
487
+ ]
488
+ },
489
+ {
490
+ "cell_type": "code",
491
+ "execution_count": 11,
492
+ "metadata": {
493
+ "id": "tafQj8_7gYCG"
494
+ },
495
+ "outputs": [],
496
+ "source": [
497
+ "df_books[\"sentiment_label\"] = df_books[\"popularity_score\"].apply(get_sentiment)"
498
+ ]
499
+ },
500
+ {
501
+ "cell_type": "markdown",
502
+ "metadata": {
503
+ "id": "T8AdKkmASq9a"
504
+ },
505
+ "source": [
506
+ "## **4.** 📈 Generate synthetic book sales data of 18 months"
507
+ ]
508
+ },
509
+ {
510
+ "cell_type": "markdown",
511
+ "metadata": {
512
+ "id": "OhXbdGD5fH0c"
513
+ },
514
+ "source": [
515
+ "### *a. Create a generate_sales_profit function that would generate sales patterns based on sentiment_label (with some randomness)*"
516
+ ]
517
+ },
518
+ {
519
+ "cell_type": "code",
520
+ "execution_count": 12,
521
+ "metadata": {
522
+ "id": "qkVhYPXGbgEn"
523
+ },
524
+ "outputs": [],
525
+ "source": [
526
+ "def generate_sales_profile(sentiment):\n",
527
+ " months = pd.date_range(end=datetime.today(), periods=18, freq=\"M\")\n",
528
+ "\n",
529
+ " if sentiment == \"positive\":\n",
530
+ " base = random.randint(200, 300)\n",
531
+ " trend = np.linspace(base, base + random.randint(20, 60), len(months))\n",
532
+ " elif sentiment == \"negative\":\n",
533
+ " base = random.randint(20, 80)\n",
534
+ " trend = np.linspace(base, base - random.randint(10, 30), len(months))\n",
535
+ " else: # neutral\n",
536
+ " base = random.randint(80, 160)\n",
537
+ " trend = np.full(len(months), base + random.randint(-10, 10))\n",
538
+ "\n",
539
+ " seasonality = 10 * np.sin(np.linspace(0, 3 * np.pi, len(months)))\n",
540
+ " noise = np.random.normal(0, 5, len(months))\n",
541
+ " monthly_sales = np.clip(trend + seasonality + noise, a_min=0, a_max=None).astype(int)\n",
542
+ "\n",
543
+ " return list(zip(months.strftime(\"%Y-%m\"), monthly_sales))"
544
+ ]
545
+ },
546
+ {
547
+ "cell_type": "markdown",
548
+ "metadata": {
549
+ "id": "L2ak1HlcgoTe"
550
+ },
551
+ "source": [
552
+ "### *b. Run the function as part of building sales_data*"
553
+ ]
554
+ },
555
+ {
556
+ "cell_type": "code",
557
+ "execution_count": 13,
558
+ "metadata": {
559
+ "id": "SlJ24AUafoDB"
560
+ },
561
+ "outputs": [],
562
+ "source": [
563
+ "sales_data = []\n",
564
+ "for _, row in df_books.iterrows():\n",
565
+ " records = generate_sales_profile(row[\"sentiment_label\"])\n",
566
+ " for month, units in records:\n",
567
+ " sales_data.append({\n",
568
+ " \"title\": row[\"title\"],\n",
569
+ " \"month\": month,\n",
570
+ " \"units_sold\": units,\n",
571
+ " \"sentiment_label\": row[\"sentiment_label\"]\n",
572
+ " })"
573
+ ]
574
+ },
575
+ {
576
+ "cell_type": "markdown",
577
+ "metadata": {
578
+ "id": "4IXZKcCSgxnq"
579
+ },
580
+ "source": [
581
+ "### *c. ✋🏻🛑⛔️ Create a df_sales DataFrame from sales_data*"
582
+ ]
583
+ },
584
+ {
585
+ "cell_type": "code",
586
+ "execution_count": 14,
587
+ "metadata": {
588
+ "id": "wcN6gtiZg-ws"
589
+ },
590
+ "outputs": [],
591
+ "source": [
592
+ "df_sales = pd.DataFrame(sales_data)"
593
+ ]
594
+ },
595
+ {
596
+ "cell_type": "markdown",
597
+ "metadata": {
598
+ "id": "EhIjz9WohAmZ"
599
+ },
600
+ "source": [
601
+ "### *d. Save df_sales as synthetic_sales_data.csv & view first few lines*"
602
+ ]
603
+ },
604
+ {
605
+ "cell_type": "code",
606
+ "execution_count": 15,
607
+ "metadata": {
608
+ "colab": {
609
+ "base_uri": "https://localhost:8080/"
610
+ },
611
+ "id": "MzbZvLcAhGaH",
612
+ "outputId": "c692bb04-7263-4115-a2ba-c72fe0180722"
613
+ },
614
+ "outputs": [
615
+ {
616
+ "output_type": "stream",
617
+ "name": "stdout",
618
+ "text": [
619
+ " title month units_sold sentiment_label\n",
620
+ "0 A Light in the Attic 2024-08 100 neutral\n",
621
+ "1 A Light in the Attic 2024-09 109 neutral\n",
622
+ "2 A Light in the Attic 2024-10 102 neutral\n",
623
+ "3 A Light in the Attic 2024-11 107 neutral\n",
624
+ "4 A Light in the Attic 2024-12 108 neutral\n"
625
+ ]
626
+ }
627
+ ],
628
+ "source": [
629
+ "df_sales.to_csv(\"synthetic_sales_data.csv\", index=False)\n",
630
+ "\n",
631
+ "print(df_sales.head())"
632
+ ]
633
+ },
634
+ {
635
+ "cell_type": "markdown",
636
+ "metadata": {
637
+ "id": "7g9gqBgQMtJn"
638
+ },
639
+ "source": [
640
+ "## **5.** 🎯 Generate synthetic customer reviews"
641
+ ]
642
+ },
643
+ {
644
+ "cell_type": "markdown",
645
+ "metadata": {
646
+ "id": "Gi4y9M9KuDWx"
647
+ },
648
+ "source": [
649
+ "### *a. ✋🏻🛑⛔️ Ask ChatGPT to create a list of 50 distinct generic book review texts for the sentiment labels \"positive\", \"neutral\", and \"negative\" called synthetic_reviews_by_sentiment*"
650
+ ]
651
+ },
652
+ {
653
+ "cell_type": "code",
654
+ "execution_count": 16,
655
+ "metadata": {
656
+ "id": "b3cd2a50"
657
+ },
658
+ "outputs": [],
659
+ "source": [
660
+ "synthetic_reviews_by_sentiment = {\n",
661
+ " \"positive\": [\n",
662
+ " \"A compelling and heartwarming read that stayed with me long after I finished.\",\n",
663
+ " \"Brilliantly written! The characters were unforgettable and the plot was engaging.\",\n",
664
+ " \"One of the best books I've read this year — inspiring and emotionally rich.\",\n",
665
+ " \"The author's storytelling was vivid and powerful. Highly recommended!\",\n",
666
+ " \"An absolute masterpiece. I couldn't put it down from start to finish.\",\n",
667
+ " \"Gripping, intelligent, and beautifully crafted — I loved every page.\",\n",
668
+ " \"The emotional depth and layered narrative were just perfect.\",\n",
669
+ " \"A thought-provoking journey with stunning character development.\",\n",
670
+ " \"Everything about this book just clicked. A top-tier read!\",\n",
671
+ " \"A flawless blend of emotion, intrigue, and style. Truly impressive.\",\n",
672
+ " \"Absolutely stunning work of fiction. Five stars from me.\",\n",
673
+ " \"Remarkably executed with breathtaking prose.\",\n",
674
+ " \"The pacing was perfect and I was hooked from page one.\",\n",
675
+ " \"Heartfelt and hopeful — a story well worth telling.\",\n",
676
+ " \"A vivid journey through complex emotions and stunning imagery.\",\n",
677
+ " \"This book had soul. Every word felt like it mattered.\",\n",
678
+ " \"It delivered more than I ever expected. Powerful and wise.\",\n",
679
+ " \"The characters leapt off the page and into my heart.\",\n",
680
+ " \"I could see every scene clearly in my mind — beautifully descriptive.\",\n",
681
+ " \"Refreshing, original, and impossible to forget.\",\n",
682
+ " \"A radiant celebration of resilience and love.\",\n",
683
+ " \"Powerful themes handled with grace and insight.\",\n",
684
+ " \"An unforgettable literary experience.\",\n",
685
+ " \"The best book club pick we’ve had all year.\",\n",
686
+ " \"A layered, lyrical narrative that resonates deeply.\",\n",
687
+ " \"Surprising, profound, and deeply humane.\",\n",
688
+ " \"One of those rare books I wish I could read again for the first time.\",\n",
689
+ " \"Both epic and intimate — a perfect balance.\",\n",
690
+ " \"It reads like a love letter to the human spirit.\",\n",
691
+ " \"Satisfying and uplifting with a memorable ending.\",\n",
692
+ " \"This novel deserves every bit of praise it gets.\",\n",
693
+ " \"Introspective, emotional, and elegantly composed.\",\n",
694
+ " \"A tour de force in contemporary fiction.\",\n",
695
+ " \"Left me smiling, teary-eyed, and completely fulfilled.\",\n",
696
+ " \"A novel with the rare ability to entertain and enlighten.\",\n",
697
+ " \"Incredibly moving. I highlighted so many lines.\",\n",
698
+ " \"A smart, sensitive take on relationships and identity.\",\n",
699
+ " \"You feel wiser by the end of it.\",\n",
700
+ " \"A gorgeously crafted tale about hope and second chances.\",\n",
701
+ " \"Poignant and real — a beautiful escape.\",\n",
702
+ " \"Brims with insight and authenticity.\",\n",
703
+ " \"Compelling characters and a satisfying plot.\",\n",
704
+ " \"An empowering and important read.\",\n",
705
+ " \"Elegantly crafted and deeply humane.\",\n",
706
+ " \"Taut storytelling that never lets go.\",\n",
707
+ " \"Each chapter offered a new treasure.\",\n",
708
+ " \"Lyrical writing that stays with you.\",\n",
709
+ " \"A wonderful blend of passion and thoughtfulness.\",\n",
710
+ " \"Uplifting, honest, and completely engrossing.\",\n",
711
+ " \"This one made me believe in storytelling again.\"\n",
712
+ " ],\n",
713
+ " \"neutral\": [\n",
714
+ " \"An average book — not great, but not bad either.\",\n",
715
+ " \"Some parts really stood out, others felt a bit flat.\",\n",
716
+ " \"It was okay overall. A decent way to pass the time.\",\n",
717
+ " \"The writing was fine, though I didn’t fully connect with the story.\",\n",
718
+ " \"Had a few memorable moments but lacked depth in some areas.\",\n",
719
+ " \"A mixed experience — neither fully engaging nor forgettable.\",\n",
720
+ " \"There was potential, but it didn't quite come together for me.\",\n",
721
+ " \"A reasonable effort that just didn’t leave a lasting impression.\",\n",
722
+ " \"Serviceable but not something I'd go out of my way to recommend.\",\n",
723
+ " \"Not much to dislike, but not much to rave about either.\",\n",
724
+ " \"It had its strengths, though they didn’t shine consistently.\",\n",
725
+ " \"I’m on the fence — parts were enjoyable, others not so much.\",\n",
726
+ " \"The book had a unique concept but lacked execution.\",\n",
727
+ " \"A middle-of-the-road read.\",\n",
728
+ " \"Engaging at times, but it lost momentum.\",\n",
729
+ " \"Would have benefited from stronger character development.\",\n",
730
+ " \"It passed the time, but I wouldn't reread it.\",\n",
731
+ " \"The plot had some holes that affected immersion.\",\n",
732
+ " \"Mediocre pacing made it hard to stay invested.\",\n",
733
+ " \"Satisfying in parts, underwhelming in others.\",\n",
734
+ " \"Neutral on this one — didn’t love it or hate it.\",\n",
735
+ " \"Fairly forgettable but with glimpses of promise.\",\n",
736
+ " \"The themes were solid, but not well explored.\",\n",
737
+ " \"Competent, just not compelling.\",\n",
738
+ " \"Had moments of clarity and moments of confusion.\",\n",
739
+ " \"I didn’t regret reading it, but I wouldn’t recommend it.\",\n",
740
+ " \"Readable, yet uninspired.\",\n",
741
+ " \"There was a spark, but it didn’t ignite.\",\n",
742
+ " \"A slow burn that didn’t quite catch fire.\",\n",
743
+ " \"I expected more nuance given the premise.\",\n",
744
+ " \"A safe, inoffensive choice.\",\n",
745
+ " \"Some parts lagged, others piqued my interest.\",\n",
746
+ " \"Decent, but needed polish.\",\n",
747
+ " \"Moderately engaging but didn’t stick the landing.\",\n",
748
+ " \"It simply lacked that emotional punch.\",\n",
749
+ " \"Just fine — no better, no worse.\",\n",
750
+ " \"Some thoughtful passages amid otherwise dry writing.\",\n",
751
+ " \"I appreciated the ideas more than the execution.\",\n",
752
+ " \"Struggled with cohesion.\",\n",
753
+ " \"Solidly average.\",\n",
754
+ " \"Good on paper, flat in practice.\",\n",
755
+ " \"A few bright spots, but mostly dim.\",\n",
756
+ " \"The kind of book that fades from memory.\",\n",
757
+ " \"It scratched the surface but didn’t dig deep.\",\n",
758
+ " \"Standard fare with some promise.\",\n",
759
+ " \"Okay, but not memorable.\",\n",
760
+ " \"Had potential that went unrealized.\",\n",
761
+ " \"Could have been tighter, sharper, deeper.\",\n",
762
+ " \"A blend of mediocrity and mild interest.\",\n",
763
+ " \"I kept reading, but barely.\"\n",
764
+ " ],\n",
765
+ " \"negative\": [\n",
766
+ " \"I struggled to get through this one — it just didn’t grab me.\",\n",
767
+ " \"The plot was confusing and the characters felt underdeveloped.\",\n",
768
+ " \"Disappointing. I had high hopes, but they weren't met.\",\n",
769
+ " \"Uninspired writing and a story that never quite took off.\",\n",
770
+ " \"Unfortunately, it was dull and predictable throughout.\",\n",
771
+ " \"The pacing dragged and I couldn’t find anything compelling.\",\n",
772
+ " \"This felt like a chore to read — lacked heart and originality.\",\n",
773
+ " \"Nothing really worked for me in this book.\",\n",
774
+ " \"A frustrating read that left me unsatisfied.\",\n",
775
+ " \"I kept hoping it would improve, but it never did.\",\n",
776
+ " \"The characters didn’t feel real, and the dialogue was forced.\",\n",
777
+ " \"I couldn't connect with the story at all.\",\n",
778
+ " \"A slow, meandering narrative with little payoff.\",\n",
779
+ " \"Tried too hard to be deep, but just felt empty.\",\n",
780
+ " \"The tone was uneven and confusing.\",\n",
781
+ " \"Way too repetitive and lacking progression.\",\n",
782
+ " \"The ending was abrupt and unsatisfying.\",\n",
783
+ " \"No emotional resonance — I felt nothing throughout.\",\n",
784
+ " \"I expected much more, but this fell flat.\",\n",
785
+ " \"Poorly edited and full of clichés.\",\n",
786
+ " \"The premise was interesting, but poorly executed.\",\n",
787
+ " \"Just didn’t live up to the praise.\",\n",
788
+ " \"A disjointed mess from start to finish.\",\n",
789
+ " \"Overly long and painfully dull.\",\n",
790
+ " \"Dialogue that felt robotic and unrealistic.\",\n",
791
+ " \"A hollow shell of what it could’ve been.\",\n",
792
+ " \"It lacked a coherent structure.\",\n",
793
+ " \"More confusing than complex.\",\n",
794
+ " \"Reading it felt like a task, not a treat.\",\n",
795
+ " \"There was no tension, no emotion — just words.\",\n",
796
+ " \"Characters with no motivation or development.\",\n",
797
+ " \"The plot twists were nonsensical.\",\n",
798
+ " \"Regret buying this book.\",\n",
799
+ " \"Nothing drew me in, nothing made me stay.\",\n",
800
+ " \"Too many subplots and none were satisfying.\",\n",
801
+ " \"Tedious and unimaginative.\",\n",
802
+ " \"Like reading a rough draft.\",\n",
803
+ " \"Disjointed, distant, and disappointing.\",\n",
804
+ " \"A lot of buildup with no payoff.\",\n",
805
+ " \"I don’t understand the hype.\",\n",
806
+ " \"This book simply didn’t work.\",\n",
807
+ " \"Forgettable in every sense.\",\n",
808
+ " \"More effort should’ve gone into editing.\",\n",
809
+ " \"The story lost its way early on.\",\n",
810
+ " \"It dragged endlessly.\",\n",
811
+ " \"I kept checking how many pages were left.\",\n",
812
+ " \"This lacked vision and clarity.\",\n",
813
+ " \"I expected substance — got fluff.\",\n",
814
+ " \"It failed to make me care.\"\n",
815
+ " ]\n",
816
+ "}"
817
+ ]
818
+ },
819
+ {
820
+ "cell_type": "markdown",
821
+ "metadata": {
822
+ "id": "fQhfVaDmuULT"
823
+ },
824
+ "source": [
825
+ "### *b. Generate 10 reviews per book using random sampling from the corresponding 50*"
826
+ ]
827
+ },
828
+ {
829
+ "cell_type": "code",
830
+ "execution_count": 17,
831
+ "metadata": {
832
+ "id": "l2SRc3PjuTGM"
833
+ },
834
+ "outputs": [],
835
+ "source": [
836
+ "review_rows = []\n",
837
+ "for _, row in df_books.iterrows():\n",
838
+ " title = row['title']\n",
839
+ " sentiment_label = row['sentiment_label']\n",
840
+ " review_pool = synthetic_reviews_by_sentiment[sentiment_label]\n",
841
+ " sampled_reviews = random.sample(review_pool, 10)\n",
842
+ " for review_text in sampled_reviews:\n",
843
+ " review_rows.append({\n",
844
+ " \"title\": title,\n",
845
+ " \"sentiment_label\": sentiment_label,\n",
846
+ " \"review_text\": review_text,\n",
847
+ " \"rating\": row['rating'],\n",
848
+ " \"popularity_score\": row['popularity_score']\n",
849
+ " })"
850
+ ]
851
+ },
852
+ {
853
+ "cell_type": "markdown",
854
+ "metadata": {
855
+ "id": "bmJMXF-Bukdm"
856
+ },
857
+ "source": [
858
+ "### *c. Create the final dataframe df_reviews & save it as synthetic_book_reviews.csv*"
859
+ ]
860
+ },
861
+ {
862
+ "cell_type": "code",
863
+ "execution_count": 18,
864
+ "metadata": {
865
+ "id": "ZUKUqZsuumsp"
866
+ },
867
+ "outputs": [],
868
+ "source": [
869
+ "df_reviews = pd.DataFrame(review_rows)\n",
870
+ "df_reviews.to_csv(\"synthetic_book_reviews.csv\", index=False)"
871
+ ]
872
+ },
873
+ {
874
+ "cell_type": "code",
875
+ "execution_count": 19,
876
+ "metadata": {
877
+ "colab": {
878
+ "base_uri": "https://localhost:8080/"
879
+ },
880
+ "id": "3946e521",
881
+ "outputId": "514d7bef-0488-4933-b03c-953b9e8a7f66"
882
+ },
883
+ "outputs": [
884
+ {
885
+ "output_type": "stream",
886
+ "name": "stdout",
887
+ "text": [
888
+ "✅ Wrote synthetic_title_level_features.csv\n",
889
+ "✅ Wrote synthetic_monthly_revenue_series.csv\n"
890
+ ]
891
+ }
892
+ ],
893
+ "source": [
894
+ "\n",
895
+ "# ============================================================\n",
896
+ "# ✅ Create \"R-ready\" derived inputs (root-level files)\n",
897
+ "# ============================================================\n",
898
+ "# These two files make the R notebook robust and fast:\n",
899
+ "# 1) synthetic_title_level_features.csv -> regression-ready, one row per title\n",
900
+ "# 2) synthetic_monthly_revenue_series.csv -> forecasting-ready, one row per month\n",
901
+ "\n",
902
+ "import numpy as np\n",
903
+ "\n",
904
+ "def _safe_num(s):\n",
905
+ " return pd.to_numeric(\n",
906
+ " pd.Series(s).astype(str).str.replace(r\"[^0-9.]\", \"\", regex=True),\n",
907
+ " errors=\"coerce\"\n",
908
+ " )\n",
909
+ "\n",
910
+ "# --- Clean book metadata (price/rating) ---\n",
911
+ "df_books_r = df_books.copy()\n",
912
+ "if \"price\" in df_books_r.columns:\n",
913
+ " df_books_r[\"price\"] = _safe_num(df_books_r[\"price\"])\n",
914
+ "if \"rating\" in df_books_r.columns:\n",
915
+ " df_books_r[\"rating\"] = _safe_num(df_books_r[\"rating\"])\n",
916
+ "\n",
917
+ "df_books_r[\"title\"] = df_books_r[\"title\"].astype(str).str.strip()\n",
918
+ "\n",
919
+ "# --- Clean sales ---\n",
920
+ "df_sales_r = df_sales.copy()\n",
921
+ "df_sales_r[\"title\"] = df_sales_r[\"title\"].astype(str).str.strip()\n",
922
+ "df_sales_r[\"month\"] = pd.to_datetime(df_sales_r[\"month\"], errors=\"coerce\")\n",
923
+ "df_sales_r[\"units_sold\"] = _safe_num(df_sales_r[\"units_sold\"])\n",
924
+ "\n",
925
+ "# --- Clean reviews ---\n",
926
+ "df_reviews_r = df_reviews.copy()\n",
927
+ "df_reviews_r[\"title\"] = df_reviews_r[\"title\"].astype(str).str.strip()\n",
928
+ "df_reviews_r[\"sentiment_label\"] = df_reviews_r[\"sentiment_label\"].astype(str).str.lower().str.strip()\n",
929
+ "if \"rating\" in df_reviews_r.columns:\n",
930
+ " df_reviews_r[\"rating\"] = _safe_num(df_reviews_r[\"rating\"])\n",
931
+ "if \"popularity_score\" in df_reviews_r.columns:\n",
932
+ " df_reviews_r[\"popularity_score\"] = _safe_num(df_reviews_r[\"popularity_score\"])\n",
933
+ "\n",
934
+ "# --- Sentiment shares per title (from reviews) ---\n",
935
+ "sent_counts = (\n",
936
+ " df_reviews_r.groupby([\"title\", \"sentiment_label\"])\n",
937
+ " .size()\n",
938
+ " .unstack(fill_value=0)\n",
939
+ ")\n",
940
+ "for lab in [\"positive\", \"neutral\", \"negative\"]:\n",
941
+ " if lab not in sent_counts.columns:\n",
942
+ " sent_counts[lab] = 0\n",
943
+ "\n",
944
+ "sent_counts[\"total_reviews\"] = sent_counts[[\"positive\", \"neutral\", \"negative\"]].sum(axis=1)\n",
945
+ "den = sent_counts[\"total_reviews\"].replace(0, np.nan)\n",
946
+ "sent_counts[\"share_positive\"] = sent_counts[\"positive\"] / den\n",
947
+ "sent_counts[\"share_neutral\"] = sent_counts[\"neutral\"] / den\n",
948
+ "sent_counts[\"share_negative\"] = sent_counts[\"negative\"] / den\n",
949
+ "sent_counts = sent_counts.reset_index()\n",
950
+ "\n",
951
+ "# --- Sales aggregation per title ---\n",
952
+ "sales_by_title = (\n",
953
+ " df_sales_r.dropna(subset=[\"title\"])\n",
954
+ " .groupby(\"title\", as_index=False)\n",
955
+ " .agg(\n",
956
+ " months_observed=(\"month\", \"nunique\"),\n",
957
+ " avg_units_sold=(\"units_sold\", \"mean\"),\n",
958
+ " total_units_sold=(\"units_sold\", \"sum\"),\n",
959
+ " )\n",
960
+ ")\n",
961
+ "\n",
962
+ "# --- Title-level features (join sales + books + sentiment) ---\n",
963
+ "df_title = (\n",
964
+ " sales_by_title\n",
965
+ " .merge(df_books_r[[\"title\", \"price\", \"rating\"]], on=\"title\", how=\"left\")\n",
966
+ " .merge(sent_counts[[\"title\", \"share_positive\", \"share_neutral\", \"share_negative\", \"total_reviews\"]],\n",
967
+ " on=\"title\", how=\"left\")\n",
968
+ ")\n",
969
+ "\n",
970
+ "df_title[\"avg_revenue\"] = df_title[\"avg_units_sold\"] * df_title[\"price\"]\n",
971
+ "df_title[\"total_revenue\"] = df_title[\"total_units_sold\"] * df_title[\"price\"]\n",
972
+ "\n",
973
+ "df_title.to_csv(\"synthetic_title_level_features.csv\", index=False)\n",
974
+ "print(\"✅ Wrote synthetic_title_level_features.csv\")\n",
975
+ "\n",
976
+ "# --- Monthly revenue series (proxy: units_sold * price) ---\n",
977
+ "monthly_rev = (\n",
978
+ " df_sales_r.merge(df_books_r[[\"title\", \"price\"]], on=\"title\", how=\"left\")\n",
979
+ ")\n",
980
+ "monthly_rev[\"revenue\"] = monthly_rev[\"units_sold\"] * monthly_rev[\"price\"]\n",
981
+ "\n",
982
+ "df_monthly = (\n",
983
+ " monthly_rev.dropna(subset=[\"month\"])\n",
984
+ " .groupby(\"month\", as_index=False)[\"revenue\"]\n",
985
+ " .sum()\n",
986
+ " .rename(columns={\"revenue\": \"total_revenue\"})\n",
987
+ " .sort_values(\"month\")\n",
988
+ ")\n",
989
+ "# if revenue is all NA (e.g., missing price), fallback to units_sold as a teaching proxy\n",
990
+ "if df_monthly[\"total_revenue\"].notna().sum() == 0:\n",
991
+ " df_monthly = (\n",
992
+ " df_sales_r.dropna(subset=[\"month\"])\n",
993
+ " .groupby(\"month\", as_index=False)[\"units_sold\"]\n",
994
+ " .sum()\n",
995
+ " .rename(columns={\"units_sold\": \"total_revenue\"})\n",
996
+ " .sort_values(\"month\")\n",
997
+ " )\n",
998
+ "\n",
999
+ "df_monthly[\"month\"] = pd.to_datetime(df_monthly[\"month\"], errors=\"coerce\").dt.strftime(\"%Y-%m-%d\")\n",
1000
+ "df_monthly.to_csv(\"synthetic_monthly_revenue_series.csv\", index=False)\n",
1001
+ "print(\"✅ Wrote synthetic_monthly_revenue_series.csv\")\n"
1002
+ ]
1003
+ },
1004
+ {
1005
+ "cell_type": "markdown",
1006
+ "metadata": {
1007
+ "id": "RYvGyVfXuo54"
1008
+ },
1009
+ "source": [
1010
+ "### *d. ✋🏻🛑⛔️ View the first few lines*"
1011
+ ]
1012
+ },
1013
+ {
1014
+ "cell_type": "code",
1015
+ "execution_count": 20,
1016
+ "metadata": {
1017
+ "colab": {
1018
+ "base_uri": "https://localhost:8080/"
1019
+ },
1020
+ "id": "xfE8NMqOurKo",
1021
+ "outputId": "191730ba-d5e2-4df7-97d2-99feb0b704af"
1022
+ },
1023
+ "outputs": [
1024
+ {
1025
+ "output_type": "stream",
1026
+ "name": "stdout",
1027
+ "text": [
1028
+ " title sentiment_label \\\n",
1029
+ "0 A Light in the Attic neutral \n",
1030
+ "1 A Light in the Attic neutral \n",
1031
+ "2 A Light in the Attic neutral \n",
1032
+ "3 A Light in the Attic neutral \n",
1033
+ "4 A Light in the Attic neutral \n",
1034
+ "\n",
1035
+ " review_text rating popularity_score \n",
1036
+ "0 Had potential that went unrealized. Three 3 \n",
1037
+ "1 The themes were solid, but not well explored. Three 3 \n",
1038
+ "2 It simply lacked that emotional punch. Three 3 \n",
1039
+ "3 Serviceable but not something I'd go out of my... Three 3 \n",
1040
+ "4 Standard fare with some promise. Three 3 \n"
1041
+ ]
1042
+ }
1043
+ ],
1044
+ "source": [
1045
+ "print(df_reviews.head())"
1046
+ ]
1047
+ }
1048
+ ],
1049
+ "metadata": {
1050
+ "colab": {
1051
+ "collapsed_sections": [
1052
+ "jpASMyIQMaAq",
1053
+ "lquNYCbfL9IM",
1054
+ "0IWuNpxxYDJF",
1055
+ "oCdTsin2Yfp3",
1056
+ "T0TOeRC4Yrnn",
1057
+ "duI5dv3CZYvF",
1058
+ "qMjRKMBQZlJi",
1059
+ "p-1Pr2szaqLk",
1060
+ "SIaJUGIpaH4V",
1061
+ "pY4yCoIuaQqp",
1062
+ "n4-TaNTFgPak",
1063
+ "HnngRNTgacYt",
1064
+ "HF9F9HIzgT7Z",
1065
+ "T8AdKkmASq9a",
1066
+ "OhXbdGD5fH0c",
1067
+ "L2ak1HlcgoTe",
1068
+ "4IXZKcCSgxnq",
1069
+ "EhIjz9WohAmZ",
1070
+ "Gi4y9M9KuDWx",
1071
+ "fQhfVaDmuULT",
1072
+ "bmJMXF-Bukdm",
1073
+ "RYvGyVfXuo54"
1074
+ ],
1075
+ "provenance": []
1076
+ },
1077
+ "kernelspec": {
1078
+ "display_name": "Python 3",
1079
+ "name": "python3"
1080
+ },
1081
+ "language_info": {
1082
+ "name": "python"
1083
+ }
1084
+ },
1085
+ "nbformat": 4,
1086
+ "nbformat_minor": 0
1087
+ }
2a_Python_Analysis_Solutions (1).ipynb ADDED
The diff for this file is too large to render. See raw diff