celinah HF Staff commited on
Commit
d6bf2e7
·
1 Parent(s): 7865775

add download

Browse files
Files changed (1) hide show
  1. app.py +106 -70
app.py CHANGED
@@ -1,7 +1,9 @@
1
  import os
 
2
 
3
  import gradio as gr
4
  import numpy as np
 
5
  from dotenv import load_dotenv
6
  from huggingface_hub import InferenceClient, login
7
 
@@ -12,6 +14,26 @@ MAX_IMAGE_SIZE = 2048
12
  TOKEN = None
13
 
14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  def get_token(oauth_token: gr.OAuthToken | None):
16
  global TOKEN
17
  if oauth_token and oauth_token.token:
@@ -64,82 +86,96 @@ css="""
64
 
65
  with gr.Blocks(css=css) as demo:
66
  demo.load(get_token, inputs=None, outputs=None)
 
67
  with gr.Sidebar():
68
  gr.Markdown("# Inference Provider")
69
- gr.Markdown("This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
 
 
70
  button = gr.LoginButton("Sign in")
71
  button.click(fn=get_token, inputs=[], outputs=[])
72
-
73
- with gr.Column(elem_id="col-container"):
74
- gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡
75
- learn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)""")
76
-
77
- with gr.Row():
78
-
79
- prompt = gr.Text(
80
- label="Prompt",
81
- show_label=False,
82
- max_lines=1,
83
- placeholder="Enter your prompt",
84
- container=False,
85
- )
86
-
87
- run_button = gr.Button("Run", scale=0)
88
-
89
- result = gr.Image(label="Result", show_label=False, format="png")
90
-
91
- with gr.Accordion("Advanced Settings", open=False):
92
-
93
- seed = gr.Slider(
94
- label="Seed",
95
- minimum=0,
96
- maximum=MAX_SEED,
97
- step=1,
98
- value=42,
99
- )
100
-
101
- with gr.Row():
102
-
103
- width = gr.Slider(
104
- label="Width",
105
- minimum=256,
106
- maximum=MAX_IMAGE_SIZE,
107
- step=32,
108
- value=1024,
109
  )
110
-
111
- height = gr.Slider(
112
- label="Height",
113
- minimum=256,
114
- maximum=MAX_IMAGE_SIZE,
115
- step=32,
116
- value=1024,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
  )
118
-
119
- with gr.Row():
120
-
121
-
122
- num_inference_steps = gr.Slider(
123
- label="Number of inference steps",
124
- minimum=1,
125
- maximum=50,
126
- step=1,
127
- value=25,
128
  )
129
-
130
- gr.Examples(
131
- examples = examples,
132
- fn = generate,
133
- inputs = [prompt],
134
- outputs = [result, seed],
135
- cache_examples="lazy"
136
- )
137
 
138
- gr.on(
139
- triggers=[run_button.click, prompt.submit],
140
- fn = generate,
141
- inputs = [prompt, seed, width, height, num_inference_steps],
142
- outputs = [result, seed]
143
- )
 
 
 
 
 
 
 
 
 
144
 
145
- demo.launch(mcp_server=True)
 
 
1
  import os
2
+ import tempfile
3
 
4
  import gradio as gr
5
  import numpy as np
6
+ import requests
7
  from dotenv import load_dotenv
8
  from huggingface_hub import InferenceClient, login
9
 
 
14
  TOKEN = None
15
 
16
 
17
+ def download_image_locally(image_url: str, local_path: str = "downloaded_image.png") -> str:
18
+ """Download an image from a URL to a local path.
19
+
20
+ Args:
21
+ image_url (str): The URL of the image to download. Must be an http(s) URL.
22
+ local_path (str, optional): The path (including filename) where the file should be saved. Defaults to "downloaded_image.png".
23
+
24
+ Returns:
25
+ str: The filesystem path of the saved file – suitable for returning to a **gr.File** output, or as an MCP tool response.
26
+ """
27
+ response = requests.get(image_url, timeout=30)
28
+ response.raise_for_status()
29
+ # If the caller passed only a filename, save into a temporary directory to avoid permission issues
30
+ if os.path.dirname(local_path) == "":
31
+ tmp_dir = tempfile.gettempdir()
32
+ local_path = os.path.join(tmp_dir, local_path)
33
+ with open(local_path, "wb") as f:
34
+ f.write(response.content)
35
+ return local_path
36
+
37
  def get_token(oauth_token: gr.OAuthToken | None):
38
  global TOKEN
39
  if oauth_token and oauth_token.token:
 
86
 
87
  with gr.Blocks(css=css) as demo:
88
  demo.load(get_token, inputs=None, outputs=None)
89
+
90
  with gr.Sidebar():
91
  gr.Markdown("# Inference Provider")
92
+ gr.Markdown(
93
+ "This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API."
94
+ )
95
  button = gr.LoginButton("Sign in")
96
  button.click(fn=get_token, inputs=[], outputs=[])
97
+
98
+ with gr.Tabs():
99
+ with gr.TabItem("Generate Image"):
100
+ with gr.Column(elem_id="col-container"):
101
+ gr.Markdown(
102
+ """# FLUX.1 [schnell] with fal‑ai through HF Inference Providers ⚡\nLearn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  )
104
+
105
+ with gr.Row():
106
+ prompt = gr.Text(
107
+ label="Prompt",
108
+ show_label=False,
109
+ max_lines=1,
110
+ placeholder="Enter your prompt",
111
+ container=False,
112
+ )
113
+ run_button = gr.Button("Run", scale=0)
114
+
115
+ result = gr.Image(label="Result", show_label=False, format="png")
116
+
117
+ with gr.Accordion("Advanced Settings", open=False):
118
+ seed = gr.Slider(
119
+ label="Seed",
120
+ minimum=0,
121
+ maximum=MAX_SEED,
122
+ step=1,
123
+ value=42,
124
+ )
125
+ with gr.Row():
126
+ width = gr.Slider(
127
+ label="Width",
128
+ minimum=256,
129
+ maximum=MAX_IMAGE_SIZE,
130
+ step=32,
131
+ value=1024,
132
+ )
133
+ height = gr.Slider(
134
+ label="Height",
135
+ minimum=256,
136
+ maximum=MAX_IMAGE_SIZE,
137
+ step=32,
138
+ value=1024,
139
+ )
140
+ num_inference_steps = gr.Slider(
141
+ label="Number of inference steps",
142
+ minimum=1,
143
+ maximum=50,
144
+ step=1,
145
+ value=25,
146
+ )
147
+
148
+ gr.Examples(
149
+ examples=examples,
150
+ fn=generate,
151
+ inputs=[prompt],
152
+ outputs=[result, seed],
153
+ cache_examples="lazy",
154
  )
155
+
156
+ gr.on(
157
+ triggers=[run_button.click, prompt.submit],
158
+ fn=generate,
159
+ inputs=[prompt, seed, width, height, num_inference_steps],
160
+ outputs=[result, seed],
161
+ show_api=True,
 
 
 
162
  )
 
 
 
 
 
 
 
 
163
 
164
+ with gr.TabItem("Download Image"):
165
+ gr.Markdown("## Download an image from a URL and make it available as a file")
166
+ url_input = gr.Text(label="Image URL", placeholder="https://example.com/cool-image.png")
167
+ filename_input = gr.Text(
168
+ label="Save as (filename)", value="downloaded_image.png", placeholder="downloaded_image.png"
169
+ )
170
+ download_btn = gr.Button("Download")
171
+ file_output = gr.File(label="Downloaded file")
172
+
173
+ download_btn.click(
174
+ fn=download_image_locally,
175
+ inputs=[url_input, filename_input],
176
+ outputs=[file_output],
177
+ show_api=True,
178
+ )
179
 
180
+ if __name__ == "__main__":
181
+ demo.launch(mcp_server=True)