celinah HF Staff commited on
Commit
41211aa
·
1 Parent(s): f8caef0
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import os
2
- import random
3
 
4
  import gradio as gr
5
  import numpy as np
@@ -11,7 +10,7 @@ TOKEN = None
11
 
12
  def get_token(oauth_token: gr.OAuthToken | None):
13
  global TOKEN
14
- TOKEN = oauth_token.token if oauth_token else None
15
 
16
  def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
17
  client = InferenceClient(provider="fal-ai", token=TOKEN)
@@ -42,7 +41,8 @@ with gr.Blocks(css=css) as demo:
42
  with gr.Sidebar():
43
  gr.Markdown("# Inference Provider")
44
  gr.Markdown("This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
45
- button = gr.LoginButton("Sign in", fn=get_token)
 
46
 
47
  with gr.Column(elem_id="col-container"):
48
  gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡
 
1
  import os
 
2
 
3
  import gradio as gr
4
  import numpy as np
 
10
 
11
  def get_token(oauth_token: gr.OAuthToken | None):
12
  global TOKEN
13
+ TOKEN = os.getenv("HF_TOKEN") or oauth_token.token if oauth_token else None
14
 
15
  def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
16
  client = InferenceClient(provider="fal-ai", token=TOKEN)
 
41
  with gr.Sidebar():
42
  gr.Markdown("# Inference Provider")
43
  gr.Markdown("This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
44
+ button = gr.LoginButton("Sign in")
45
+ button.click(fn=get_token, inputs=button, outputs=[])
46
 
47
  with gr.Column(elem_id="col-container"):
48
  gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡