Spaces:
Runtime error
Runtime error
File size: 1,751 Bytes
cafc1fb 466c0e2 cafc1fb 2653f1b cafc1fb 2653f1b bbc5870 15a416e cafc1fb 7cfd810 15a416e 2653f1b bbc5870 2b69f90 0168b93 2653f1b 4063645 170f0c5 2653f1b 466c0e2 170f0c5 2653f1b 170f0c5 4063645 2653f1b bbc5870 2653f1b cafc1fb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
"""
This is a Gradio MCP client that connects to my MCP server (mcp-rag-workflow).
This script initializes a Gradio interface for an agent that uses tools from the MCP server.
It connects to the MCP server, retrieves available tools, and sets up a chat interface where users can interact with the agent.
"""
import os
from dotenv import load_dotenv
load_dotenv() # Load environment variables from .env file
import gradio as gr
from smolagents import InferenceClientModel, CodeAgent, MCPClient
try:
mcp_client = MCPClient(
{
"url": "https://agents-mcp-hackathon-mcp-rag-workflow.hf.space/gradio_api/mcp/sse",
"transport": "sse"
}
)
tools = mcp_client.get_tools()
model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
agent = CodeAgent(tools=[*tools], model=model)
mcp_description = """
**Example Queries**:
- "What are the main features of fuel system of SU-35?"
- "What is the combat potential of SU-35?"
- "Write me a report on origin of the universe."
- "Write me a report on the impact of climate change on polar bears."
"""
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
chatbot=gr.Chatbot(height=450, placeholder="Ask me about Sukhoi SU-35 or ask to write report on any topic."),
type="messages",
title="A Gradio MCP client that uses Tools from my Hackathon MCP server",
examples=[ "What are the main features of fuel system of SU-35?", "What is the combat potential of SU-35?", "Write me a report on origin of the universe."],
description=mcp_description,
)
demo.launch()
finally:
mcp_client.disconnect() |