celinah HF Staff commited on
Commit
153fade
·
1 Parent(s): d6bf2e7
Files changed (1) hide show
  1. app.py +70 -70
app.py CHANGED
@@ -95,87 +95,87 @@ with gr.Blocks(css=css) as demo:
95
  button = gr.LoginButton("Sign in")
96
  button.click(fn=get_token, inputs=[], outputs=[])
97
 
98
- with gr.Tabs():
99
- with gr.TabItem("Generate Image"):
100
- with gr.Column(elem_id="col-container"):
101
- gr.Markdown(
102
- """# FLUX.1 [schnell] with fal‑ai through HF Inference Providers ⚡\nLearn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)"""
103
- )
104
 
105
- with gr.Row():
106
- prompt = gr.Text(
107
- label="Prompt",
108
- show_label=False,
109
- max_lines=1,
110
- placeholder="Enter your prompt",
111
- container=False,
112
- )
113
- run_button = gr.Button("Run", scale=0)
114
 
115
- result = gr.Image(label="Result", show_label=False, format="png")
 
 
 
 
 
 
 
 
116
 
117
- with gr.Accordion("Advanced Settings", open=False):
118
- seed = gr.Slider(
119
- label="Seed",
120
- minimum=0,
121
- maximum=MAX_SEED,
122
- step=1,
123
- value=42,
124
- )
125
- with gr.Row():
126
- width = gr.Slider(
127
- label="Width",
128
- minimum=256,
129
- maximum=MAX_IMAGE_SIZE,
130
- step=32,
131
- value=1024,
132
- )
133
- height = gr.Slider(
134
- label="Height",
135
- minimum=256,
136
- maximum=MAX_IMAGE_SIZE,
137
- step=32,
138
- value=1024,
139
- )
140
- num_inference_steps = gr.Slider(
141
- label="Number of inference steps",
142
- minimum=1,
143
- maximum=50,
144
- step=1,
145
- value=25,
146
- )
147
 
148
- gr.Examples(
149
- examples=examples,
150
- fn=generate,
151
- inputs=[prompt],
152
- outputs=[result, seed],
153
- cache_examples="lazy",
 
154
  )
155
-
156
- gr.on(
157
- triggers=[run_button.click, prompt.submit],
158
- fn=generate,
159
- inputs=[prompt, seed, width, height, num_inference_steps],
160
- outputs=[result, seed],
161
- show_api=True,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  )
163
 
164
- with gr.TabItem("Download Image"):
165
- gr.Markdown("## Download an image from a URL and make it available as a file")
166
- url_input = gr.Text(label="Image URL", placeholder="https://example.com/cool-image.png")
167
- filename_input = gr.Text(
168
- label="Save as (filename)", value="downloaded_image.png", placeholder="downloaded_image.png"
 
169
  )
170
- download_btn = gr.Button("Download")
171
- file_output = gr.File(label="Downloaded file")
172
 
173
- download_btn.click(
174
- fn=download_image_locally,
175
- inputs=[url_input, filename_input],
176
- outputs=[file_output],
 
177
  show_api=True,
178
  )
179
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
180
  if __name__ == "__main__":
181
  demo.launch(mcp_server=True)
 
95
  button = gr.LoginButton("Sign in")
96
  button.click(fn=get_token, inputs=[], outputs=[])
97
 
 
 
 
 
 
 
98
 
99
+ with gr.Tab("Generate Image"):
100
+ with gr.Column(elem_id="col-container"):
101
+ gr.Markdown(
102
+ """# FLUX.1 [schnell] with fal‑ai through HF Inference Providers ⚡\nLearn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)"""
103
+ )
 
 
 
 
104
 
105
+ with gr.Row():
106
+ prompt = gr.Text(
107
+ label="Prompt",
108
+ show_label=False,
109
+ max_lines=1,
110
+ placeholder="Enter your prompt",
111
+ container=False,
112
+ )
113
+ run_button = gr.Button("Run", scale=0)
114
 
115
+ result = gr.Image(label="Result", show_label=False, format="png")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
116
 
117
+ with gr.Accordion("Advanced Settings", open=False):
118
+ seed = gr.Slider(
119
+ label="Seed",
120
+ minimum=0,
121
+ maximum=MAX_SEED,
122
+ step=1,
123
+ value=42,
124
  )
125
+ with gr.Row():
126
+ width = gr.Slider(
127
+ label="Width",
128
+ minimum=256,
129
+ maximum=MAX_IMAGE_SIZE,
130
+ step=32,
131
+ value=1024,
132
+ )
133
+ height = gr.Slider(
134
+ label="Height",
135
+ minimum=256,
136
+ maximum=MAX_IMAGE_SIZE,
137
+ step=32,
138
+ value=1024,
139
+ )
140
+ num_inference_steps = gr.Slider(
141
+ label="Number of inference steps",
142
+ minimum=1,
143
+ maximum=50,
144
+ step=1,
145
+ value=25,
146
  )
147
 
148
+ gr.Examples(
149
+ examples=examples,
150
+ fn=generate,
151
+ inputs=[prompt],
152
+ outputs=[result, seed],
153
+ cache_examples="lazy",
154
  )
 
 
155
 
156
+ gr.on(
157
+ triggers=[run_button.click, prompt.submit],
158
+ fn=generate,
159
+ inputs=[prompt, seed, width, height, num_inference_steps],
160
+ outputs=[result, seed],
161
  show_api=True,
162
  )
163
 
164
+ with gr.Tab("Download Image"):
165
+ gr.Markdown("## Download an image from a URL and make it available as a file")
166
+ url_input = gr.Text(label="Image URL", placeholder="https://example.com/cool-image.png")
167
+ filename_input = gr.Text(
168
+ label="Save as (filename)", value="downloaded_image.png", placeholder="downloaded_image.png"
169
+ )
170
+ download_btn = gr.Button("Download")
171
+ file_output = gr.File(label="Downloaded file")
172
+
173
+ download_btn.click(
174
+ fn=download_image_locally,
175
+ inputs=[url_input, filename_input],
176
+ outputs=[file_output],
177
+ show_api=True,
178
+ )
179
+
180
  if __name__ == "__main__":
181
  demo.launch(mcp_server=True)