fix login
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ import gradio as gr
|
|
5 |
import numpy as np
|
6 |
import requests
|
7 |
from dotenv import load_dotenv
|
8 |
-
from huggingface_hub import InferenceClient
|
9 |
|
10 |
load_dotenv()
|
11 |
|
@@ -34,7 +34,7 @@ def download_image_locally(image_url: str, local_path: str = "downloaded_image.p
|
|
34 |
f.write(response.content)
|
35 |
return local_path
|
36 |
|
37 |
-
def
|
38 |
global TOKEN
|
39 |
if oauth_token and oauth_token.token:
|
40 |
print("Received OAuth token, logging in...")
|
@@ -85,15 +85,15 @@ css="""
|
|
85 |
"""
|
86 |
|
87 |
with gr.Blocks(css=css) as demo:
|
88 |
-
|
89 |
|
90 |
with gr.Sidebar():
|
91 |
gr.Markdown("# Inference Provider")
|
92 |
gr.Markdown(
|
93 |
"This Space showcases the black‑forest‑labs/FLUX.1‑dev model, served by the nebius API. Sign in with your Hugging Face account to use this API."
|
94 |
)
|
95 |
-
|
96 |
-
|
97 |
with gr.Column(elem_id="col-container"):
|
98 |
gr.Markdown(
|
99 |
"""# FLUX.1 [schnell] with fal‑ai through HF Inference Providers ⚡\nLearn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)"""
|
|
|
5 |
import numpy as np
|
6 |
import requests
|
7 |
from dotenv import load_dotenv
|
8 |
+
from huggingface_hub import InferenceClient
|
9 |
|
10 |
load_dotenv()
|
11 |
|
|
|
34 |
f.write(response.content)
|
35 |
return local_path
|
36 |
|
37 |
+
def login(oauth_token: gr.OAuthToken | None):
|
38 |
global TOKEN
|
39 |
if oauth_token and oauth_token.token:
|
40 |
print("Received OAuth token, logging in...")
|
|
|
85 |
"""
|
86 |
|
87 |
with gr.Blocks(css=css) as demo:
|
88 |
+
|
89 |
|
90 |
with gr.Sidebar():
|
91 |
gr.Markdown("# Inference Provider")
|
92 |
gr.Markdown(
|
93 |
"This Space showcases the black‑forest‑labs/FLUX.1‑dev model, served by the nebius API. Sign in with your Hugging Face account to use this API."
|
94 |
)
|
95 |
+
|
96 |
+
demo.load(login, inputs=None, outputs=None)
|
97 |
with gr.Column(elem_id="col-container"):
|
98 |
gr.Markdown(
|
99 |
"""# FLUX.1 [schnell] with fal‑ai through HF Inference Providers ⚡\nLearn more about HF Inference Providers [here](https://huggingface.co/docs/inference-providers/index)"""
|