import os import subprocess import io import base64 import threading import logging import tempfile import shutil import uuid import json import re from pathlib import Path from flask import request from dash import Dash, dcc, html, Input, Output, State, ctx, no_update import dash_bootstrap_components as dbc import openai import anthropic from google import generativeai as genai import requests import graphviz logging.basicConfig(level=logging.INFO) os.environ['PATH'] += ':/usr/bin:/usr/local/bin:/home/user/.local/bin' try: result = subprocess.run(['which', 'dot'], check=True, capture_output=True, text=True) print(f"dot location: {result.stdout}") result = subprocess.run(['dot', '-V'], check=True, capture_output=True, text=True) print(f"Graphviz version: {result.stdout}") except Exception as e: print(f"Error checking Graphviz: {e}") print(f"Current PATH: {os.environ['PATH']}") openai.api_key = os.getenv("OPENAI_API_KEY") if not openai.api_key: logging.warning("OPENAI_API_KEY not set. GPT-3.5 model will not be available.") google_api_key = os.getenv("GOOGLE_API_KEY") if google_api_key: try: genai.configure(api_key=google_api_key) except Exception as e: logging.error(f"Failed to configure Google Gemini: {e}") genai = None else: genai = None logging.warning("GOOGLE_API_KEY not set. Gemini model will not be available.") anthropic_api_key = os.getenv("ANTHROPIC_API_KEY") if not anthropic_api_key: logging.warning("ANTHROPIC_API_KEY not set. Claude model will not be available.") grok_api_key = os.getenv("GROK_API_KEY") if not grok_api_key: logging.warning("GROK_API_KEY not set. Groq model will not be available.") SESSION_DIR = Path(tempfile.gettempdir()) / "arch_diagram_sessions" SESSION_DIR.mkdir(parents=True, exist_ok=True) session_locks = {} session_data = {} def get_or_create_session_id(): sid = request.cookies.get("sid") if not sid or not isinstance(sid, str) or len(sid) < 8: sid = str(uuid.uuid4()) return sid def get_session_dir(sid): p = SESSION_DIR / sid p.mkdir(parents=True, exist_ok=True) return p def get_session_lock(sid): if sid not in session_locks: session_locks[sid] = threading.Lock() return session_locks[sid] def get_session_data(sid): if sid not in session_data: session_data[sid] = {"diagram_base64": None, "description": "", "model": "gpt-3.5-turbo"} return session_data[sid] def save_session_state(sid, data): session_data[sid] = data sdir = get_session_dir(sid) with open(sdir / "session.json", "w") as f: json.dump(data, f) def load_session_state(sid): sdir = get_session_dir(sid) sf = sdir / "session.json" if sf.exists(): try: with open(sf) as f: session_data[sid] = json.load(f) except Exception as e: logging.error(f"Session load error: {e}") def clear_session(sid): sdir = get_session_dir(sid) try: if sdir.exists(): shutil.rmtree(sdir) session_data.pop(sid, None) session_locks.pop(sid, None) logging.info(f"Session {sid} cleared.") except Exception as e: logging.error(f"Error clearing session {sid}: {e}") def strip_markdown_codeblock(text): if text is None: return "" pattern = r"^```[a-zA-Z0-9]*\s*([\s\S]*?)```$" match = re.search(pattern, text.strip(), re.MULTILINE) if match: return match.group(1).strip() return text.strip() def generate_diagram(dot_code, sid): dot_code = dot_code.strip() try: src = graphviz.Source(dot_code) out_img = src.pipe(format="png") diagram_base64 = base64.b64encode(out_img).decode('utf-8') return diagram_base64 except Exception as e: logging.error(f"Graphviz error: {e}") raise Exception("Graphviz rendering failed. Please check your description or try again.") def get_ai_response(model, user_prompt): instruction = ( "You are an expert architecture-to-graphviz converter. " "Given any architecture description, output ONLY the Graphviz DOT code to represent the architecture, " "using the best layout for architectural diagrams (using digraph and edge/node attributes as needed). " "Output ONLY the code, in a markdown ```dot block, and nothing else. " "No explanations, no intro or outro, just the code. " ) prompt = f"{instruction}\n\n{user_prompt}" if model == 'gpt-3.5-turbo': response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=[{"role": "user", "content": prompt}] ) return response.choices[0].message.content elif model == 'gemini-1.5-flash-latest': if genai is None: raise Exception("Gemini API not configured.") model_obj = genai.GenerativeModel('gemini-1.5-pro-latest') response = model_obj.generate_content(prompt) return response.text elif model == 'claude-3-5-haiku-20241022': if not anthropic_api_key: raise Exception("Anthropic API not configured.") client = anthropic.Anthropic(api_key=anthropic_api_key) response = client.messages.create( model="claude-3-5-haiku-20241022", system=None, max_tokens=2048, messages=[{"role": "user", "content": prompt}] ) return response.content[0].text elif model == 'grok-3-mini-fast-beta': if not grok_api_key: raise Exception("Groq API not configured.") groq_url = "https://api.groq.com/openai/v1/chat/completions" headers = { "Authorization": f"Bearer {grok_api_key}", "Content-Type": "application/json" } data = { "model": "grok-3-mini-fast-beta", "messages": [{"role": "user", "content": prompt}] } response = requests.post(groq_url, json=data, headers=headers) return response.json()['choices'][0]['message']['content'] else: raise Exception("Error: Invalid model selected.") app = Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP]) app.layout = dbc.Container(fluid=True, children=[ dbc.Row([ dbc.Col([ html.H1("Architecture Diagram Generator", className="mb-4"), ], width=12) ], className="mt-2"), dbc.Row([ dbc.Col([ dbc.Card([ dbc.CardHeader("Controls"), dbc.CardBody([ html.Div([ dcc.Dropdown( id='model-dropdown', options=[ {'label': 'GPT-3.5 Turbo', 'value': 'gpt-3.5-turbo'}, {'label': 'Gemini 1.5 Flash', 'value': 'gemini-1.5-flash-latest'}, {'label': 'Claude 3.5 Haiku', 'value': 'claude-3-5-haiku-20241022'}, {'label': 'Grok 3 Mini Fast', 'value': 'grok-3-mini-fast-beta'} ], value="gpt-3.5-turbo", className="mb-3" ), dbc.Textarea( id='description-input', placeholder="Enter architecture description here...", style={'height': '200px', 'whiteSpace': 'pre-wrap', 'wordWrap': 'break-word'}, className="mb-3" ), dbc.Button("Generate Diagram", id='generate-button', color="primary", className="me-2 mb-2"), dbc.Button("Download Diagram", id='download-button', color="secondary", className="mb-2", disabled=True), dbc.Button("Clear Session", id='clear-session-button', color="danger", className="mb-2", style={'marginTop': '0.5em'}), dcc.Download(id="download-diagram"), html.Div(id='status-message', className="mb-3"), ]), ]) ]) ], width=4, style={'backgroundColor': '#f8f9fa', 'minHeight': '100vh'}), dbc.Col([ dbc.Card([ dbc.CardHeader("Diagram"), dbc.CardBody([ dcc.Loading( id="loading", type="default", fullscreen=False, children=[ html.Img( id='diagram-output', src="", style={'width': '100%', 'minHeight': '300px'} ), ] ) ]) ]), ], width=8, style={'backgroundColor': '#fff', 'minHeight': '100vh'}), ]) ]) @app.server.after_request def set_cookie(response): try: sid = request.cookies.get("sid") if not sid or not isinstance(sid, str) or len(sid) < 8: sid = str(uuid.uuid4()) response.set_cookie("sid", sid, max_age=60*60*24*30, httponly=False, samesite="Lax") except Exception as e: logging.error(f"Error setting cookie: {e}") return response @app.callback( Output('diagram-output', 'src'), Output('status-message', 'children'), Output('download-button', 'disabled'), Output('description-input', 'value'), Output('model-dropdown', 'value'), Output('download-diagram', 'data'), Input('generate-button', 'n_clicks'), Input('download-button', 'n_clicks'), Input('clear-session-button', 'n_clicks'), State('description-input', 'value'), State('model-dropdown', 'value'), prevent_initial_call=True ) def main_callback(generate_clicks, download_clicks, clear_clicks, description, model): trigger = ctx.triggered_id try: sid = get_or_create_session_id() except Exception as e: logging.error(f"Session id error: {e}") return "", "Session error. Try refreshing the page.", True, "", "gpt-3.5-turbo", None lock = get_session_lock(sid) if trigger == "generate-button": if not description: return no_update, "Please enter a description.", True, no_update, no_update, None with lock: try: ai_response = get_ai_response(model, description) dot_code = strip_markdown_codeblock(ai_response) diagram_base64 = generate_diagram(dot_code, sid) sdata = get_session_data(sid) sdata["diagram_base64"] = diagram_base64 sdata["description"] = description sdata["model"] = model save_session_state(sid, sdata) logging.info(f"Session {sid}: Diagram generated successfully.") return f'data:image/png;base64,{diagram_base64}', "Diagram generated successfully!", False, description, model, None except Exception as e: logging.error(f"Session {sid}: Error generating diagram: {str(e)}") return no_update, f"Error: {str(e)}", True, no_update, no_update, None elif trigger == "download-button": with lock: load_session_state(sid) sdata = get_session_data(sid) diagram_base64 = sdata.get("diagram_base64", None) if not diagram_base64: return no_update, "No diagram to download.", True, sdata.get("description", ""), sdata.get("model", "gpt-3.5-turbo"), None try: diagram_bytes = base64.b64decode(diagram_base64) logging.info(f"Session {sid}: Diagram downloaded.") return no_update, "Download started.", False, sdata.get("description", ""), sdata.get("model", "gpt-3.5-turbo"), dcc.send_bytes(diagram_bytes, "architecture_diagram.png") except Exception as e: logging.error(f"Session {sid}: Download error: {e}") return no_update, f"Download error: {e}", False, sdata.get("description", ""), sdata.get("model", "gpt-3.5-turbo"), None elif trigger == "clear-session-button": with lock: clear_session(sid) logging.info(f"Session {sid}: Session cleared by user.") return "", "Session cleared. All data deleted.", True, "", "gpt-3.5-turbo", None else: with lock: load_session_state(sid) sdata = get_session_data(sid) diagram_base64 = sdata.get("diagram_base64", None) description_val = sdata.get("description", "") model_val = sdata.get("model", "gpt-3.5-turbo") if diagram_base64: return f'data:image/png;base64,{diagram_base64}', "", False, description_val, model_val, None else: return "", "", True, description_val, model_val, None if __name__ == '__main__': print("Starting the Dash application...") app.run(debug=True, host='0.0.0.0', port=7860, threaded=True) print("Dash application has finished running.")