serJD's picture
Update app.py
d297568 verified
import copy
import gradio as gr
from huggingface_hub import webhook_endpoint, WebhookPayload
from fastapi import Request
#import other libaries
from specklepy.api.client import SpeckleClient
from specklepy.api.credentials import get_default_account, get_local_accounts
from specklepy.transports.server import ServerTransport
from specklepy.api import operations
from specklepy.objects.geometry import Polyline, Point
import pandas as pd
import numpy as np
import json
import os
from utils import *
speckle_token = os.environ.get("SPECKLE_TOKEN")
current_directory = os.path.dirname(os.path.abspath(__file__))
# Path to the config.json file
config_file_path = os.path.join(current_directory, "config.json")
with open(config_file_path, 'r') as f:
config = json.load(f)
CLIENT = SpeckleClient(host="https://speckle.xyz/")
CLIENT.authenticate_with_token(token=speckle_token)
print(config.keys())
branchA = config["branchA"]
branchB = config["branchB"]
STREAM_ID = config["streamID"]
@webhook_endpoint
async def mergeStreams(request: Request):
# Initialize flag
should_continue = False
# Read the request body as JSON
payload = await request.json()
print("============= payload =============")
print(payload)
print("============= config =============")
print(config)
payload = payload["payload"]
# webhook calls can come from different sources
if payload.get('source') == 'notionTrigger':
action = payload.get('action')
streamName = payload.get('streamName')
branchName = payload.get('branchName')
update_source = "notionTrigger"
should_continue = True
else:
update_source = "speckleWebhook"
event_name = payload["event"]["event_name"]
streamid = payload.get("stream", {}).get("id")
# Extract branchName for commit_update events from the "old" commit data
if event_name == "commit_update":
branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName")
else:
branchName = payload.get("event", {}).get("data", {}).get("commit", {}).get("branchName")
# List of valid event types
valid_event_types = ["commit_create", "commit_delete", "commit_update"]
if event_name in valid_event_types:
if streamid == STREAM_ID:
if branchName == branchA:
should_continue = True
else:
print(f"Branch name {branchName} not found in config.")
else:
print(f"Stream name {streamid} not found in config.")
else:
print(f"Event type {event_name} is not one of the specified types.")
# If the flag is True, continue running the main part of the code
if should_continue:
# get stream
stream = getSpeckleStream(STREAM_ID,
branchA,
CLIENT,
commit_id = "")
# navigate to list with speckle objects of interest
try:
stream_data = stream["@Data"]["@{0}"]
except:
print("something went wrong, try again with non-capital d")
try:
stream_data = stream["@data"]["@{0}"]
except:
print("check on speckle.com how to access the data")
# transform stream_data to dataframe (create a backup copy of this dataframe)
df = get_dataframe(stream_data, return_original_df=False)
df_A = df.copy()
# get stream
stream = getSpeckleStream(STREAM_ID,
branchB,
CLIENT,
commit_id = "")
# navigate to list with speckle objects of interest
try:
stream_data = stream["@Data"]["@{0}"]
except:
print("something went wrong, try again with non-capital d")
try:
stream_data = stream["@data"]["@{0}"]
except:
print("check on speckle.com how to access the data")
# transform stream_data to dataframe (create a backup copy of this dataframe)
df = get_dataframe(stream_data, return_original_df=False)
df_B = df.copy()
excludeCol = config["EXCLUDE_COLS"]
uuidCol = config["UUID_COL"]
refCol = config["REFERENCE_COL"]
aggregated_df_b, log_dict = aggregate_data_optimized(df_A.copy(), df_B.copy(), uuidCol, refCol,excludeCol)
# additional cleanups, remove geometry and fill na
try:
aggregated_df_b_noGeo = aggregated_df_b.drop(columns=['@geometry'])
except:
pass
try:
aggregated_df_b_noGeo = aggregated_df_b.drop(columns=['@Geometry'])
except:
pass
aggregated_df_b_noGeo = aggregated_df_b.fillna("NA")
print (aggregated_df_b_noGeo)
commit_id = updateStreamAnalysisFast(
client = CLIENT,
stream_id =STREAM_ID,
branch_name = branchB,
new_data=aggregated_df_b_noGeo ,
geometryGroupPath=["@Data", "@{0}"],
match_by_id="id",
#openai_key =None,
return_original = False,
comm_message="auto commit from HF; Triggered by:" + update_source)
return "https://speckle.xyz/streams/" + STREAM_ID + "/commits/" + commit_id