import gradio as gr
import requests
import json
import os
from PIL import Image
import moviepy.video.io.ImageSequenceClip as ic
from pathlib import Path
import bs4
import datetime
import urllib.request
import uuid
import io
import base64
main_directory = "https://services.swpc.noaa.gov/images/animations/"
sdo_source = "https://sdo.gsfc.nasa.gov/assets/img/browse/"
sdo_source_format = "https://sdo.gsfc.nasa.gov/assets/img/browse/YEAR/MONTH/DAY/DATE_IDENT_SIZE_TOOL.jpg"
sdo_obj = ["HMIB","HMIBC","HMIIC","HMIIF","HMID","HMII","HMI171",
"0094","0131","0171","0193","0211",
"0304","0335","1600","1700","4500",
"211193171","211193171n","211193171rg",
"094335193","304211171"]
sdo_size= [256,512,1024,2048,4096]
html = """
PAGE_LINK
"""
css="""
.img_box{
display: flex;
flex-direction: column;
flex-flow: unset;
flex-wrap: wrap;
justify-content: space-around;
}
.img_class{
background: #ffffff;
max-width: 48%;
font-family: monospace;
border-top: #9300ff;
border-style: inset;
margin-top: 5px;
}
.img_class_raw{
background: #ffffff;
width: 100%;
font-family: monospace;
border-top: #9300ff;
border-style: inset;
margin-top: 5px;
display:flex;
flex-direction:column;
}
.img_box_soho{
display: flex;
flex-direction: row;
flex-wrap: wrap;
justify-content: space-between;
}
.img_class_soho{
background: #ffffff;
font-family: monospace;
border-top: #9300ff;
border-style: inset;
margin-top: 5px;
max-width: 48%;
}
.img_class_sdo{
background: #ffffff;
font-family: monospace;
border-top: #9300ff;
border-style: inset;
margin-top: 5px;
max-width: 25%;
font-size: small;
}
"""
def make_animation(url1="",url2="",url3="",url4="",url5="",url6="",url7="",url8="",cnt=1):
print("###################################################################")
get_url=f'{main_directory}{url1}{url2}{url3}{url4}{url5}{url6}{url7}{url8}'
print(f'{get_url}')
get_url=get_url.split("None")[0]
print(f'{get_url}')
get_url=get_url.split("[]")[0]
print(f'{get_url}')
link_box=[]
#get_url=f'{main_directory}images/animations/'
if not get_url.endswith('.json'):
feed1 = requests.get(get_url)
spl = feed1.text.split("href=")
for line in spl:
spl2 = line.split(">")[0]
print(spl2)
if spl2.endswith('.json"') or spl2.endswith('.png"') or spl2.endswith('.gif"') or spl2.endswith('.jpg"'):
fin=line.split(">")[0].strip('""')
link_box.append(fin)
#html_out=html_out+html.replace("PAGE_LINK",fin)
print("############################## LINK BOX ###############################")
print(link_box)
link_box.sort(reverse=True)
print(link_box[0])
#spl = feed1.text.split("href=")
files_out=[]
html_out=""
gif_box=[]
print("############################ LINES #######################################")
for line in link_box[:cnt]:
#try:
print(line)
if line.endswith('.png') or line.endswith('.gif') or line.endswith('.jpg'):
gif_box.append(f'{get_url}{line}')
#gif_box.append(f'{line}')
else: print("Passing: Not Image")
if gif_box:
print(gif_box)
gif_box.sort(reverse=False)
print("Making GIF...")
try:
#frames = [Image.open(image) for image in glob.glob(f"{frame_folder}/*.JPG")]
frames = []
for i,ea in enumerate(gif_box):
print("############")
print(ea)
urllib.request.urlretrieve(ea,f'tmp{i}.png')
frames.append(f'tmp{i}.png')
fps=60
uid=uuid.uuid4()
clip = ic.ImageSequenceClip(frames, fps = fps)
# Save GIF to a temporary file
temp_gif_path = f"temp_{uid}.gif"
clip.write_gif(temp_gif_path, fps=fps)
# Read the GIF file into bytes
with open(temp_gif_path, 'rb') as gif_file:
gif_bytes = gif_file.read()
# Encode GIF bytes as base64 for HTML
gif_base64 = base64.b64encode(gif_bytes).decode('utf-8')
gif_data_url = f"data:image/gif;base64,{gif_base64}"
html_out=f""
html_out+=f'
'
html_out+="
"
except Exception as e:
print("ERROR HAPPENED")
print(e)
else:
print("passed")
return html_out
def load_json(url1="",url2="",url3="",url4="",url5="",url6="",url7="",url8=""):
get_url=f'{main_directory}{url1}{url2}{url3}{url4}{url5}{url6}{url7}{url8}'
print(f'{get_url}')
get_url=get_url.split("None")[0]
print(f'{get_url}')
get_url=get_url.split("[]")[0]
print(f'{get_url}')
if get_url.endswith('.json'):
feed1 = requests.get(get_url)
return None, feed1.text
elif get_url.endswith(".png") or get_url.endswith(".gif") or get_url.endswith(".jpg"):
html_out=f"'
return html_out, None
return None,None
def make_tree(url1="",url2="",url3="",url4="",url5="",url6="",url7="",url8=""):
link_box=[]
html_out=""
get_url=f'{main_directory}{url1}{url2}{url3}{url4}{url5}{url6}{url7}{url8}'
print(f'######### :: {get_url}')
if not get_url.endswith('.json'):
feed1 = requests.get(get_url)
spl = feed1.text.split("href=")
for line in spl:
spl2 = line.split(">")[0]
print(spl2)
if spl2.endswith('/"') or spl2.endswith('.json"') or spl2.endswith('.png"') or spl2.endswith('.gif"') or spl2.endswith('.jpg"'):
fin=line.split(">")[0].strip('""')
link_box.append(fin)
#html_out=html_out+html.replace("PAGE_LINK",fin)
return gr.update(choices=[l for l in link_box],interactive=True)
else:
return None
def get_images():
html_out=f""
get_url=f'{main_directory}images/geospace/'
feed1 = requests.get(get_url)
spl = feed1.text.split("href=")
for line in spl:
spl2 = line.split(">")[0].strip('""')
if spl2.endswith(".png") or spl2.endswith(".gif") or spl2.endswith(".jpg"):
print(spl2)
html_out+=f'
'
else:
print(spl2)
get_url2=f'{main_directory}images/'
feed2 = requests.get(get_url2)
spl = feed2.text.split("href=")
for line2 in spl:
spl2 = line2.split(">")[0].strip('""')
if spl2.endswith(".png") or spl2.endswith(".gif") or spl2.endswith(".jpg"):
print(spl2)
html_out+=f'
'
else:
print(spl2)
html_out+="
"
return html_out
def run():
out=make_tree()
im_html=get_images()
return im_html
datestamp=["1","1","1"]
timestamp=["1","1","1"]
datetimestamp="1:1:1"
def get_time():
datetimestamp=str(datetime.datetime.now()).split(".")[0]
datestamp=datetimestamp.replace(" ","-").split("-")
timestamp=datestamp[3].split(":")
html_out=f"""Current: {datetimestamp} UTC
"""
year1=int(datestamp[0])
month1=int(datestamp[1])
day1=int(datestamp[2])
hour1=int(timestamp[0])
minute1=int(timestamp[1])
year2=int(datestamp[0])
month2=int(datestamp[1])
day2=int(datestamp[2])
hour2=int(timestamp[0])
minute2=int(timestamp[1])
return gr.update(value=html_out)
'''return (gr.update(value=html_out),gr.update(value=year1),gr.update(value=month1),
gr.update(value=day1),gr.update(value=hour1),gr.update(value=minute1),
gr.update(value=year2),gr.update(value=month2),gr.update(value=day2),gr.update(value=hour2),gr.update(value=minute2))'''
with gr.Blocks() as app:
#datetimestamp=gr.Textbox()
#datestamp=gr.Textbox()
#timestamp=gr.State()
datetimestamp=str(datetime.datetime.now()).split(".")[0]
datestamp=datetimestamp.replace(" ","-").split("-")
timestamp=datestamp[3].split(":")
time_html=gr.HTML(f"""Current: {datetimestamp} UTC
""")
with gr.Row():
gr.Markdown("Frames:")
cnt=gr.Number(label="Count",minimum=1,maximum=1000,precision=0,value=1,min_width=10)
drop1=gr.Dropdown()
drop2=gr.Dropdown()
drop3=gr.Dropdown()
drop4=gr.Dropdown()
with gr.Row():
drop5=gr.Dropdown()
drop6=gr.Dropdown()
drop7=gr.Dropdown()
drop8=gr.Dropdown()
load_btn=gr.Button("Load")
anim_btn=gr.Button("Make Animation")
html_raw=gr.HTML()
links=gr.JSON()
#app.load(get_time,None,[time_html,year1,month1,day1,hour1,minute1,year2,month2,day2,hour2,minute2]).then(make_tree,None,drop1)
app.load(get_time,None,[time_html]).then(make_tree,None,drop1)
load_btn.click(load_json,[drop1,drop2,drop3,drop4,drop5,drop6,drop7,drop8],[html_raw,links])
anim_btn.click(make_animation,[drop1,drop2,drop3,drop4,drop5,drop6,drop7,drop8,cnt],[html_raw])
drop1.change(make_tree,drop1,[drop2])
drop2.change(make_tree,[drop1,drop2],[drop3])
drop3.change(make_tree,[drop1,drop2,drop3],[drop4])
drop4.change(make_tree,[drop1,drop2,drop3,drop4],[drop5])
drop5.change(make_tree,[drop1,drop2,drop3,drop4,drop5],[drop6])
drop6.change(make_tree,[drop1,drop2,drop3,drop4,drop5,drop6],[drop7])
drop7.change(make_tree,[drop1,drop2,drop3,drop4,drop5,drop6,drop7],[drop8])
#app.load(make_tree,None,drop1)
app.queue(default_concurrency_limit=10).launch()