Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,18 +4,26 @@ from sklearn.cluster import KMeans
|
|
4 |
from folium.plugins import MarkerCluster
|
5 |
import requests
|
6 |
from io import BytesIO
|
7 |
-
import streamlit as st
|
8 |
|
9 |
-
# Load data from Excel
|
10 |
def load_data(url):
|
11 |
-
# Fetch the file from the URL
|
12 |
response = requests.get(url)
|
13 |
# Read the Excel file from the response content
|
14 |
lat_long_data = pd.read_excel(BytesIO(response.content), sheet_name="lat long", engine='openpyxl')
|
15 |
measurement_data = pd.read_excel(BytesIO(response.content), sheet_name="measurement data", engine='openpyxl')
|
16 |
-
|
17 |
# Merge data on school_id_giga
|
18 |
-
merged_data = pd.merge(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
return merged_data
|
20 |
|
21 |
# Perform clustering to find data center location
|
@@ -23,8 +31,11 @@ def find_data_center(df, n_clusters=1):
|
|
23 |
kmeans = KMeans(n_clusters=n_clusters, random_state=0).fit(df[["latitude", "longitude"]])
|
24 |
return kmeans.cluster_centers_
|
25 |
|
26 |
-
#
|
27 |
def plot_map(df, center):
|
|
|
|
|
|
|
28 |
map = folium.Map(location=[center[0][0], center[0][1]], zoom_start=10)
|
29 |
marker_cluster = MarkerCluster().add_to(map)
|
30 |
|
@@ -33,7 +44,7 @@ def plot_map(df, center):
|
|
33 |
folium.Marker(
|
34 |
location=[row["latitude"], row["longitude"]],
|
35 |
popup=(
|
36 |
-
f"School Name: {row
|
37 |
f"Download Speed: {row['download_speed']} Mbps<br>"
|
38 |
f"Upload Speed: {row['upload_speed']} Mbps<br>"
|
39 |
f"Latency: {row['latency']} ms"
|
@@ -41,62 +52,68 @@ def plot_map(df, center):
|
|
41 |
icon=folium.Icon(color="blue", icon="info-sign")
|
42 |
).add_to(marker_cluster)
|
43 |
|
44 |
-
# Add data center
|
45 |
folium.Marker(
|
46 |
location=[center[0][0], center[0][1]],
|
47 |
popup="Proposed Data Center",
|
48 |
icon=folium.Icon(color="red", icon="cloud")
|
49 |
).add_to(map)
|
|
|
50 |
return map
|
51 |
|
52 |
-
# Calculate
|
53 |
-
def
|
54 |
-
# Calculate
|
55 |
-
|
|
|
|
|
56 |
|
57 |
-
# Assuming
|
58 |
-
|
59 |
-
|
|
|
60 |
|
61 |
-
#
|
62 |
-
|
63 |
-
before_latency = before_data['latency'].mean()
|
64 |
-
|
65 |
-
after_bandwidth = after_data[['download_speed', 'upload_speed']].mean()
|
66 |
-
after_latency = after_data['latency'].mean()
|
67 |
|
68 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
|
70 |
# Main function to run the application
|
71 |
def main():
|
72 |
-
url = "https://huggingface.co/spaces/engralimalik/lace/resolve/main/data%20barbados.xlsx"
|
73 |
df = load_data(url)
|
74 |
|
75 |
-
# Find the
|
76 |
center = find_data_center(df)
|
77 |
|
78 |
-
#
|
79 |
map = plot_map(df, center)
|
80 |
map.save("index.html")
|
81 |
|
82 |
-
# Calculate
|
83 |
-
|
84 |
-
|
85 |
-
# Display the map in Streamlit
|
86 |
-
st.title("Impact of Data Center Addition")
|
87 |
-
st.markdown("Here’s the map of the schools and the proposed data center location.")
|
88 |
-
st.components.v1.html(open("index.html", "r").read(), height=500)
|
89 |
|
90 |
-
# Display
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
st.subheader("After Data Center Addition")
|
97 |
-
st.write(f"Average Download Speed: {after_bandwidth['download_speed']} Mbps")
|
98 |
-
st.write(f"Average Upload Speed: {after_bandwidth['upload_speed']} Mbps")
|
99 |
-
st.write(f"Average Latency: {after_latency} ms")
|
100 |
|
101 |
if __name__ == "__main__":
|
102 |
main()
|
|
|
4 |
from folium.plugins import MarkerCluster
|
5 |
import requests
|
6 |
from io import BytesIO
|
|
|
7 |
|
8 |
+
# Load data from Excel URL
|
9 |
def load_data(url):
|
|
|
10 |
response = requests.get(url)
|
11 |
# Read the Excel file from the response content
|
12 |
lat_long_data = pd.read_excel(BytesIO(response.content), sheet_name="lat long", engine='openpyxl')
|
13 |
measurement_data = pd.read_excel(BytesIO(response.content), sheet_name="measurement data", engine='openpyxl')
|
14 |
+
|
15 |
# Merge data on school_id_giga
|
16 |
+
merged_data = pd.merge(
|
17 |
+
lat_long_data,
|
18 |
+
measurement_data,
|
19 |
+
left_on="school_id_giga",
|
20 |
+
right_on="school_id_giga",
|
21 |
+
how="inner"
|
22 |
+
)
|
23 |
+
|
24 |
+
# Strip any extra spaces from column names
|
25 |
+
merged_data.columns = merged_data.columns.str.strip()
|
26 |
+
|
27 |
return merged_data
|
28 |
|
29 |
# Perform clustering to find data center location
|
|
|
31 |
kmeans = KMeans(n_clusters=n_clusters, random_state=0).fit(df[["latitude", "longitude"]])
|
32 |
return kmeans.cluster_centers_
|
33 |
|
34 |
+
# Plot the map with markers
|
35 |
def plot_map(df, center):
|
36 |
+
# Print column names for debugging
|
37 |
+
print(df.columns)
|
38 |
+
|
39 |
map = folium.Map(location=[center[0][0], center[0][1]], zoom_start=10)
|
40 |
marker_cluster = MarkerCluster().add_to(map)
|
41 |
|
|
|
44 |
folium.Marker(
|
45 |
location=[row["latitude"], row["longitude"]],
|
46 |
popup=(
|
47 |
+
f"School Name: {row.get('school_name', 'N/A')}<br>"
|
48 |
f"Download Speed: {row['download_speed']} Mbps<br>"
|
49 |
f"Upload Speed: {row['upload_speed']} Mbps<br>"
|
50 |
f"Latency: {row['latency']} ms"
|
|
|
52 |
icon=folium.Icon(color="blue", icon="info-sign")
|
53 |
).add_to(marker_cluster)
|
54 |
|
55 |
+
# Add data center location
|
56 |
folium.Marker(
|
57 |
location=[center[0][0], center[0][1]],
|
58 |
popup="Proposed Data Center",
|
59 |
icon=folium.Icon(color="red", icon="cloud")
|
60 |
).add_to(map)
|
61 |
+
|
62 |
return map
|
63 |
|
64 |
+
# Calculate the impact of data center on latency and bandwidth
|
65 |
+
def calculate_impact(df, center):
|
66 |
+
# Calculate average latency and bandwidth before the data center
|
67 |
+
avg_latency_before = df['latency'].mean()
|
68 |
+
avg_download_before = df['download_speed'].mean()
|
69 |
+
avg_upload_before = df['upload_speed'].mean()
|
70 |
|
71 |
+
# Assuming the data center reduces latency and increases bandwidth by 20%
|
72 |
+
latency_reduction = avg_latency_before * 0.8
|
73 |
+
download_increase = avg_download_before * 1.2
|
74 |
+
upload_increase = avg_upload_before * 1.2
|
75 |
|
76 |
+
# Return the new statistics
|
77 |
+
return latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before
|
|
|
|
|
|
|
|
|
78 |
|
79 |
+
# Display the impact of the data center on latency and bandwidth
|
80 |
+
def display_impact(latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before):
|
81 |
+
impact_data = {
|
82 |
+
"Before Data Center": {
|
83 |
+
"Latency": f"{avg_latency_before:.2f} ms",
|
84 |
+
"Download Speed": f"{avg_download_before:.2f} Mbps",
|
85 |
+
"Upload Speed": f"{avg_upload_before:.2f} Mbps"
|
86 |
+
},
|
87 |
+
"After Data Center": {
|
88 |
+
"Latency": f"{latency_reduction:.2f} ms",
|
89 |
+
"Download Speed": f"{download_increase:.2f} Mbps",
|
90 |
+
"Upload Speed": f"{upload_increase:.2f} Mbps"
|
91 |
+
}
|
92 |
+
}
|
93 |
+
|
94 |
+
return impact_data
|
95 |
|
96 |
# Main function to run the application
|
97 |
def main():
|
98 |
+
url = "https://huggingface.co/spaces/engralimalik/lace/resolve/main/data%20barbados.xlsx" # URL of your Excel file
|
99 |
df = load_data(url)
|
100 |
|
101 |
+
# Find the data center location using clustering
|
102 |
center = find_data_center(df)
|
103 |
|
104 |
+
# Create the map and save it
|
105 |
map = plot_map(df, center)
|
106 |
map.save("index.html")
|
107 |
|
108 |
+
# Calculate the impact of adding the data center
|
109 |
+
latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before = calculate_impact(df, center)
|
|
|
|
|
|
|
|
|
|
|
110 |
|
111 |
+
# Display the impact
|
112 |
+
impact_data = display_impact(latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before)
|
113 |
+
print("Impact of Data Center on Latency and Bandwidth:")
|
114 |
+
print(impact_data)
|
115 |
+
|
116 |
+
print("Map has been saved as index.html.")
|
|
|
|
|
|
|
|
|
117 |
|
118 |
if __name__ == "__main__":
|
119 |
main()
|