engralimalik commited on
Commit
70cf329
·
verified ·
1 Parent(s): ffb3516

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -17
app.py CHANGED
@@ -4,11 +4,12 @@ from sklearn.cluster import KMeans
4
  from folium.plugins import MarkerCluster
5
  import requests
6
  from io import BytesIO
 
7
 
8
  # Load data from Excel URL with error handling
9
  def load_data(url):
10
  try:
11
- print("Loading data from:", url)
12
  response = requests.get(url)
13
  if response.status_code == 200:
14
  lat_long_data = pd.read_excel(BytesIO(response.content), sheet_name="lat long", engine='openpyxl')
@@ -25,19 +26,19 @@ def load_data(url):
25
 
26
  # Strip any extra spaces from column names
27
  merged_data.columns = merged_data.columns.str.strip()
28
- print("Data loaded successfully")
29
  return merged_data
30
  else:
31
- print(f"Failed to load data. Status code: {response.status_code}")
32
  return pd.DataFrame()
33
  except Exception as e:
34
- print(f"Error loading data: {e}")
35
  return pd.DataFrame()
36
 
37
  # Perform clustering to find data center location
38
  def find_data_center(df, n_clusters=1):
39
  if df.empty:
40
- print("Dataframe is empty, skipping clustering")
41
  return None
42
  kmeans = KMeans(n_clusters=n_clusters, random_state=0).fit(df[["latitude", "longitude"]])
43
  return kmeans.cluster_centers_
@@ -45,7 +46,7 @@ def find_data_center(df, n_clusters=1):
45
  # Plot the map with markers
46
  def plot_map(df, center):
47
  if df.empty:
48
- print("Dataframe is empty, skipping map plotting")
49
  return None
50
 
51
  map = folium.Map(location=[center[0][0], center[0][1]], zoom_start=10)
@@ -76,7 +77,7 @@ def plot_map(df, center):
76
  # Calculate the impact of data center on latency and bandwidth
77
  def calculate_impact(df, center):
78
  if df.empty:
79
- print("Dataframe is empty, skipping impact calculation")
80
  return None
81
  avg_latency_before = df['latency'].mean()
82
  avg_download_before = df['download_speed'].mean()
@@ -105,7 +106,8 @@ def display_impact(latency_reduction, download_increase, upload_increase, avg_la
105
  }
106
  }
107
 
108
- return impact_data
 
109
 
110
  # Main function to run the application
111
  def main():
@@ -113,28 +115,26 @@ def main():
113
  df = load_data(url)
114
 
115
  if df.empty:
116
- print("No data to process, exiting application.")
117
  return
118
 
119
  # Find the data center location using clustering
120
  center = find_data_center(df)
121
  if center is None:
122
- print("Could not find data center, exiting application.")
123
  return
124
 
125
- # Create the map and save it
126
  map = plot_map(df, center)
127
  if map:
128
- map.save("index.html")
 
 
129
 
130
  # Calculate the impact of adding the data center
131
  latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before = calculate_impact(df, center)
132
  if latency_reduction is not None:
133
- impact_data = display_impact(latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before)
134
- print("Impact of Data Center on Latency and Bandwidth:")
135
- print(impact_data)
136
-
137
- print("Map has been saved as index.html.")
138
 
139
  if __name__ == "__main__":
140
  main()
 
4
  from folium.plugins import MarkerCluster
5
  import requests
6
  from io import BytesIO
7
+ import streamlit as st
8
 
9
  # Load data from Excel URL with error handling
10
  def load_data(url):
11
  try:
12
+ st.write(f"Loading data from {url}...")
13
  response = requests.get(url)
14
  if response.status_code == 200:
15
  lat_long_data = pd.read_excel(BytesIO(response.content), sheet_name="lat long", engine='openpyxl')
 
26
 
27
  # Strip any extra spaces from column names
28
  merged_data.columns = merged_data.columns.str.strip()
29
+ st.write("Data loaded successfully")
30
  return merged_data
31
  else:
32
+ st.write(f"Failed to load data. Status code: {response.status_code}")
33
  return pd.DataFrame()
34
  except Exception as e:
35
+ st.write(f"Error loading data: {e}")
36
  return pd.DataFrame()
37
 
38
  # Perform clustering to find data center location
39
  def find_data_center(df, n_clusters=1):
40
  if df.empty:
41
+ st.write("Dataframe is empty, skipping clustering")
42
  return None
43
  kmeans = KMeans(n_clusters=n_clusters, random_state=0).fit(df[["latitude", "longitude"]])
44
  return kmeans.cluster_centers_
 
46
  # Plot the map with markers
47
  def plot_map(df, center):
48
  if df.empty:
49
+ st.write("Dataframe is empty, skipping map plotting")
50
  return None
51
 
52
  map = folium.Map(location=[center[0][0], center[0][1]], zoom_start=10)
 
77
  # Calculate the impact of data center on latency and bandwidth
78
  def calculate_impact(df, center):
79
  if df.empty:
80
+ st.write("Dataframe is empty, skipping impact calculation")
81
  return None
82
  avg_latency_before = df['latency'].mean()
83
  avg_download_before = df['download_speed'].mean()
 
106
  }
107
  }
108
 
109
+ st.write("Impact of Data Center on Latency and Bandwidth:")
110
+ st.write(impact_data)
111
 
112
  # Main function to run the application
113
  def main():
 
115
  df = load_data(url)
116
 
117
  if df.empty:
118
+ st.write("No data to process, exiting application.")
119
  return
120
 
121
  # Find the data center location using clustering
122
  center = find_data_center(df)
123
  if center is None:
124
+ st.write("Could not find data center, exiting application.")
125
  return
126
 
127
+ # Create the map and embed it in the Streamlit app
128
  map = plot_map(df, center)
129
  if map:
130
+ from io import BytesIO
131
+ map_html = map._repr_html_() # Get the HTML representation of the map
132
+ st.markdown(map_html, unsafe_allow_html=True)
133
 
134
  # Calculate the impact of adding the data center
135
  latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before = calculate_impact(df, center)
136
  if latency_reduction is not None:
137
+ display_impact(latency_reduction, download_increase, upload_increase, avg_latency_before, avg_download_before, avg_upload_before)
 
 
 
 
138
 
139
  if __name__ == "__main__":
140
  main()