File size: 3,448 Bytes
a2913a9 80922f1 8c14ec2 a2913a9 ff342dc 80922f1 c3c6e00 ff342dc 5997785 d57d53a 3b69291 8c14ec2 ff342dc 3b69291 8c14ec2 131111c 865daca d57d53a 865daca d57d53a 865daca d57d53a ff342dc d57d53a ff342dc 05f1269 d57d53a 80922f1 d57d53a 99137ea 8c14ec2 ff342dc d57d53a 80922f1 ff342dc 80922f1 ff342dc 80922f1 3b69291 ff342dc c3c6e00 d57d53a 99137ea 2a067a7 d57d53a ff342dc d57d53a 3b69291 5997785 ff342dc a2913a9 d57d53a c3c6e00 99137ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
"""
app.py β Enterprise SQL Agent (Gradio + smolagents + MCP)
HubSpot Integration Only
"""
import os, pathlib, json, pprint, gradio as gr
from mcp import StdioServerParameters
from smolagents import MCPClient, CodeAgent
from smolagents.models import LiteLLMModel, InferenceClientModel
# βββββββββββββββββββββββββ 1. Choose base LLM ββββββββββββββββββββββββββ
OPENAI_KEY = os.getenv("OPENAI_API_KEY")
OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4o")
GEMINI_KEY = os.getenv("GOOGLE_API_KEY")
GEM_MODEL = os.getenv("GOOGLE_MODEL", "gemini-pro")
HF_MODEL_ID = os.getenv("HF_MODEL_ID", "microsoft/Phi-3-mini-4k-instruct")
HF_TOKEN = os.getenv("HF_API_TOKEN")
if OPENAI_KEY:
BASE_MODEL = LiteLLMModel(model_id=f"openai/{OPENAI_MODEL}", api_key=OPENAI_KEY)
ACTIVE = f"OpenAI Β· {OPENAI_MODEL}"
elif GEMINI_KEY:
BASE_MODEL = LiteLLMModel(model_id=f"google/{GEM_MODEL}", api_key=GEMINI_KEY)
ACTIVE = f"Gemini Β· {GEM_MODEL}"
else:
BASE_MODEL = InferenceClientModel(model_id=HF_MODEL_ID, hf_api_token=HF_TOKEN, timeout=90)
ACTIVE = f"Hugging Face Β· {HF_MODEL_ID}"
# βββββββββββββββββββββββββ 2. MCP server path ββββββββββββββββββββββββββ
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py")
# βββββββββββββββββββββββββ 3. Chat callback ββββββββββββββββββββββββββββ
def respond(message: str, history: list):
"""Prompt β CodeAgent β MCP tools β string reply."""
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)])
try:
with MCPClient(params) as tools:
answer = CodeAgent(tools=tools, model=BASE_MODEL).run(message)
except Exception as e:
answer = f"Error while querying tools: {e}"
# ensure plain-text output
if not isinstance(answer, str):
try:
answer = json.dumps(answer, indent=2, ensure_ascii=False)
except Exception:
answer = pprint.pformat(answer, width=100)
history += [
{"role": "user", "content": message},
{"role": "assistant", "content": answer},
]
return history, history
# βββββββββββββββββββββββββ 4. Gradio UI ββββββββββββββββββββββββββββββββ
with gr.Blocks(title="Enterprise SQL Agent") as demo:
state = gr.State([])
gr.Markdown("## π’ Enterprise SQL Agent β query your data with natural language")
chat = gr.Chatbot(type="messages", label="Conversation")
box = gr.Textbox(
placeholder="e.g. Who are my inactive Northeast customers?",
show_label=False,
)
box.submit(respond, [box, state], [chat, state])
with gr.Accordion("Example prompts", open=False):
gr.Markdown(
"* Who are my **Northeast** customers with no orders in 6 months?\n"
"* List customers sorted by **LastOrderDate**.\n"
"* Draft re-engagement emails for inactive accounts."
)
gr.Markdown(f"_Powered by MCP Β· smolagents Β· Gradio β’ Active model β **{ACTIVE}**_")
if __name__ == "__main__":
demo.launch()
|