celinah HF Staff commited on
Commit
f8caef0
·
1 Parent(s): 9823696
Files changed (1) hide show
  1. app.py +8 -3
app.py CHANGED
@@ -7,10 +7,14 @@ from huggingface_hub import InferenceClient, login
7
 
8
  MAX_SEED = np.iinfo(np.int32).max
9
  MAX_IMAGE_SIZE = 2048
 
10
 
 
 
 
11
 
12
- def infer(oauth_token: gr.OAuthToken | None, prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
13
- client = InferenceClient(provider="fal-ai", token=oauth_token.token)
14
  image = client.text_to_image(
15
  prompt=prompt,
16
  width=width,
@@ -38,7 +42,8 @@ with gr.Blocks(css=css) as demo:
38
  with gr.Sidebar():
39
  gr.Markdown("# Inference Provider")
40
  gr.Markdown("This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
41
- button = gr.LoginButton("Sign in")
 
42
  with gr.Column(elem_id="col-container"):
43
  gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡
44
  learn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)""")
 
7
 
8
  MAX_SEED = np.iinfo(np.int32).max
9
  MAX_IMAGE_SIZE = 2048
10
+ TOKEN = None
11
 
12
+ def get_token(oauth_token: gr.OAuthToken | None):
13
+ global TOKEN
14
+ TOKEN = oauth_token.token if oauth_token else None
15
 
16
+ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
17
+ client = InferenceClient(provider="fal-ai", token=TOKEN)
18
  image = client.text_to_image(
19
  prompt=prompt,
20
  width=width,
 
42
  with gr.Sidebar():
43
  gr.Markdown("# Inference Provider")
44
  gr.Markdown("This Space showcases the black-forest-labs/FLUX.1-dev model, served by the nebius API. Sign in with your Hugging Face account to use this API.")
45
+ button = gr.LoginButton("Sign in", fn=get_token)
46
+
47
  with gr.Column(elem_id="col-container"):
48
  gr.Markdown(f"""# FLUX.1 [schnell] with fal-ai through HF Inference Providers ⚡
49
  learn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)""")