|
import json |
|
import re |
|
import sys |
|
import time |
|
import copy |
|
from notion_client import Client |
|
|
|
from specklepy.api.client import SpeckleClient |
|
from specklepy.api.credentials import get_default_account, get_local_accounts |
|
from specklepy.transports.server import ServerTransport |
|
from specklepy.api import operations |
|
from specklepy.objects.geometry import Polyline, Point |
|
from specklepy.objects import Base |
|
import os |
|
from functools import wraps |
|
|
|
import gradio as gr |
|
import requests |
|
from huggingface_hub import webhook_endpoint, WebhookPayload |
|
from fastapi import Request |
|
import datetime |
|
|
|
import gradio as gr |
|
|
|
|
|
from utils import * |
|
|
|
current_directory = os.path.dirname(os.path.abspath(__file__)) |
|
|
|
config_file_path = os.path.join(current_directory, "config.json") |
|
with open(config_file_path, 'r') as f: |
|
config = json.load(f) |
|
|
|
|
|
speckle_token = os.environ.get("SPECKLE_TOKEN") |
|
notion_token = os.environ.get("NOTION_TOKEN") |
|
|
|
notion_token = notion_token |
|
notion = Client(auth=notion_token) |
|
|
|
|
|
CLIENT = SpeckleClient(host="https://speckle.xyz/") |
|
CLIENT.authenticate_with_token(token=speckle_token) |
|
|
|
|
|
def mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME): |
|
|
|
if type(TARGET_BRANCH) == type([]): |
|
TARGET_BRANCH = SOURCE_BRANCH.replace(TARGET_BRANCH[0], TARGET_BRANCH[1]) |
|
|
|
|
|
databaseFUll_pages = fetch_all_database_pages(notion, ATTR_METADATA) |
|
time.sleep(1) |
|
kpi_database_pages = fetch_all_database_pages(notion, KPI_METADATA) |
|
time.sleep(1) |
|
|
|
|
|
attributeMetaData, availableAttributes = notionTable2JSON(databaseFUll_pages, kpi_database_pages) |
|
|
|
|
|
attributesOfInterest = DEFAULT_ATTRIBUTES |
|
for page in databaseFUll_pages: |
|
pv = get_property_value(page, "name") |
|
attributesOfInterest.append(pv) |
|
if UUID_COL not in attributesOfInterest: |
|
attributesOfInterest.append(UUID_COL) |
|
|
|
|
|
|
|
stream = getSpeckleStream(STREAM_ID, |
|
SOURCE_BRANCH, |
|
CLIENT) |
|
|
|
try: |
|
stream_data = stream["@Data"]["@{0}"] |
|
except: |
|
print("something went wrong, try again with non-capital d") |
|
try: |
|
stream_data = stream["@data"]["@{0}"] |
|
except: |
|
print("check on speckle.com how to access the data") |
|
|
|
|
|
|
|
streamTrg = getSpeckleStream(STREAM_ID, |
|
TARGET_BRANCH, |
|
CLIENT) |
|
|
|
try: |
|
stream_dataTrg = streamTrg["@Data"]["@{0}"] |
|
except: |
|
print("something went wrong, try again with non-capital d") |
|
try: |
|
stream_dataTrg = streamTrg["@analysis_geometry"]["@{0}"] |
|
except: |
|
print("check on speckle.com how to access the data") |
|
|
|
time.sleep(1) |
|
|
|
idMapper = {} |
|
for i, obj in enumerate(stream_dataTrg): |
|
objDict = obj.__dict__ |
|
curUUid= objDict["uuid"] |
|
idMapper[curUUid] = i |
|
|
|
|
|
|
|
streamData_new = [] |
|
log = {"removedDatapoints":0,"removedID":[], "avg_attrRemoved":0, "removedAttr":[]} |
|
for i, obj in enumerate(stream_data): |
|
objDict = obj.__dict__ |
|
|
|
|
|
pass_flag = True |
|
|
|
|
|
datanew = Base() |
|
|
|
|
|
for k, v in objDict.items(): |
|
if k in attributesOfInterest: |
|
datanew[k] = v |
|
else: |
|
log["avg_attrRemoved"] +=1 |
|
log["removedAttr"].append(k) |
|
|
|
datanew["dataset"] = DATASET_NAME |
|
|
|
|
|
curUUid = objDict[UUID_COL] |
|
targetGeoRow = idMapper[curUUid] |
|
print(stream_dataTrg[targetGeoRow].__dict__.keys()) |
|
|
|
try: |
|
datanew["@geometry"] = stream_dataTrg[targetGeoRow].__dict__["@geometry"] |
|
except: |
|
datanew["@Geometry"] = stream_dataTrg[targetGeoRow].__dict__["@Geometry"] |
|
streamData_new.append(datanew) |
|
|
|
log["avg_attrRemoved"] = log["avg_attrRemoved"]/len(stream_data) |
|
log["removedAttr"] = list(set(log["removedAttr"])) |
|
|
|
stream_new = copy.deepcopy(stream) |
|
stream_new["@Data"]["@{0}"] = streamData_new |
|
|
|
|
|
stream_new["logs"] = json.dumps(log) |
|
stream_new["attributeMetaData"] = json.dumps(attributeMetaData) |
|
stream_new["availableAttributes"] = json.dumps(availableAttributes) |
|
stream_new["datasetMetadata"] = DATASET_NAME |
|
|
|
|
|
|
|
|
|
transport = ServerTransport(client=CLIENT, stream_id=STREAM_ID) |
|
|
|
|
|
object_id = operations.send(stream_new, [transport]) |
|
|
|
time.sleep(1) |
|
|
|
commit_id = CLIENT.commit.create( |
|
STREAM_ID, |
|
object_id= object_id, |
|
message="auto commit, removed datapoints: " + str( log["removedDatapoints"]) + "; avg. removed attributes: "+ str(int(log["avg_attrRemoved"])), |
|
branch_name=TARGET_BRANCH, |
|
) |
|
|
|
print(commit_id) |
|
|
|
|
|
@webhook_endpoint |
|
async def update_streams(request: Request): |
|
|
|
should_continue = False |
|
|
|
|
|
payload = await request.json() |
|
|
|
print("============= payload =============") |
|
print(payload) |
|
print("============= config ==============") |
|
print(config) |
|
|
|
|
|
payload = payload["payload"] |
|
|
|
|
|
event_name = payload["event"]["event_name"] |
|
streamName = payload.get("stream", {}).get("name") |
|
|
|
if event_name == "commit_update": |
|
branchName = payload.get("event", {}).get("data", {}).get("old", {}).get("branchName") |
|
else: |
|
branchName = payload.get("event", {}).get("data", {}).get("commit", {}).get("branchName") |
|
|
|
|
|
valid_event_types = ["commit_create", "commit_delete", "commit_update"] |
|
|
|
if event_name in valid_event_types: |
|
if streamName in config: |
|
if branchName in config[streamName]: |
|
stream_config = config[streamName][branchName] |
|
should_continue = True |
|
else: |
|
print(f"Branch name {branchName} not found in config for stream {streamName}.") |
|
else: |
|
print(f"Stream name {streamName} not found in config.") |
|
else: |
|
print(f"Event type {event_name} is not one of the specified types.") |
|
|
|
|
|
if should_continue: |
|
config_entry = config[streamName][branchName] |
|
STREAM_NAME = config_entry["STREAM_NAME"] |
|
STREAM_ID = config_entry["STREAM_ID"] |
|
SOURCE_BRANCH = config_entry["SOURCE_BRANCH"] |
|
TARGET_BRANCH = config_entry["TARGET_BRANCH"] |
|
UUID_COL = config_entry["UUID_COL"] |
|
ATTR_METADATA = config_entry["ATTR_METADATA"] |
|
KPI_METADATA = config_entry["KPI_METADATA"] |
|
DEFAULT_ATTRIBUTES = config_entry["DEFAULT_ATTRIBUTES"] |
|
DATASET_NAME = config_entry.get("DATASET_NAME") |
|
|
|
|
|
mainFunc(STREAM_ID, SOURCE_BRANCH, TARGET_BRANCH, UUID_COL, ATTR_METADATA, KPI_METADATA, DEFAULT_ATTRIBUTES, DATASET_NAME) |
|
|