inoculatemedia commited on
Commit
c442081
·
verified ·
1 Parent(s): 326b7a2

Rename gradio_mcp_server.py to app.py

Browse files
Files changed (1) hide show
  1. gradio_mcp_server.py → app.py +49 -2
gradio_mcp_server.py → app.py RENAMED
@@ -39,6 +39,47 @@ async def generate_image(prompt: str, space_id: str = "ysharma/SanaSprint") -> s
39
 
40
 
41
  @mcp.tool()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  async def run_dia_tts(prompt: str, space_id: str = "ysharma/Dia-1.6B") -> str:
43
  """Text-to-Speech Synthesis.
44
 
@@ -65,5 +106,11 @@ if __name__ == "__main__":
65
  import sys
66
  import io
67
  sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
68
-
69
- mcp.run(transport='stdio')
 
 
 
 
 
 
 
39
 
40
 
41
  @mcp.tool()
42
+
43
+ from mcp.server.fastmcp import FastMCP
44
+ from gradio_client import Client
45
+ import sys
46
+ import io
47
+ import json
48
+ import gradio as gr
49
+ from huggingface_hub import InferenceClient
50
+ mcp = FastMCP("gradio-spaces")
51
+
52
+ clients = {}
53
+
54
+ def get_client(space_id: str) -> Client:
55
+ """Get or create a Gradio client for the specified space."""
56
+ if space_id not in clients:
57
+ clients[space_id] = Client(space_id)
58
+ return clients[space_id]
59
+
60
+
61
+ @mcp.tool()
62
+ async def generate_image(prompt: str, space_id: str = "inoculatemedia/SanaSprint") -> str:
63
+ """Generate an image using Flux.
64
+
65
+ Args:
66
+ prompt: Text prompt describing the image to generate
67
+ space_id: inoculatemedia/SanaSprint
68
+ """
69
+ client = get_client(space_id)
70
+ result = client.predict(
71
+ prompt=prompt,
72
+ model_size="1.6B",
73
+ seed=0,
74
+ randomize_seed=True,
75
+ width=1024,
76
+ height=1024,
77
+ guidance_scale=4.5,
78
+ num_inference_steps=2,
79
+ api_name="/infer"
80
+ )
81
+ return result
82
+
83
  async def run_dia_tts(prompt: str, space_id: str = "ysharma/Dia-1.6B") -> str:
84
  """Text-to-Speech Synthesis.
85
 
 
106
  import sys
107
  import io
108
  sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
109
+ gr.on(
110
+ triggers=[run_button.click, prompt.submit],
111
+ fn = infer,
112
+ inputs = [prompt, model_size, seed, randomize_seed, width, height, guidance_scale, num_inference_steps], # Add model_size to inputs
113
+ outputs = [result, seed]
114
+ )
115
+
116
+ mcp.run(transport='stdio')