Spaces:
Runtime error
Runtime error
Axel-Student
commited on
Commit
·
fe63753
1
Parent(s):
6fc88c7
login client
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import os
|
|
2 |
import torch
|
3 |
from diffusers import FluxPipeline # type: ignore
|
4 |
import gradio as gr # type: ignore
|
5 |
-
from huggingface_hub import login
|
6 |
|
7 |
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
|
8 |
pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
|
@@ -10,6 +10,11 @@ token = os.getenv("HF_TOKEN")
|
|
10 |
login(token=token)
|
11 |
|
12 |
|
|
|
|
|
|
|
|
|
|
|
13 |
prompt = "A cat holding a sign that says hello world"
|
14 |
image = pipe(
|
15 |
prompt,
|
@@ -19,8 +24,9 @@ image = pipe(
|
|
19 |
num_inference_steps=50,
|
20 |
max_sequence_length=512,
|
21 |
generator=torch.Generator("cpu").manual_seed(0),
|
22 |
-
use_auth_token=
|
23 |
).images[0]
|
|
|
24 |
image.save("flux-dev.png")
|
25 |
|
26 |
gradio_app = gr.Interface(
|
|
|
2 |
import torch
|
3 |
from diffusers import FluxPipeline # type: ignore
|
4 |
import gradio as gr # type: ignore
|
5 |
+
from huggingface_hub import login, InferenceClient
|
6 |
|
7 |
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16)
|
8 |
pipe.enable_model_cpu_offload() #save some VRAM by offloading the model to CPU. Remove this if you have enough GPU power
|
|
|
10 |
login(token=token)
|
11 |
|
12 |
|
13 |
+
client = InferenceClient(
|
14 |
+
provider="together",
|
15 |
+
api_key="token"
|
16 |
+
)
|
17 |
+
|
18 |
prompt = "A cat holding a sign that says hello world"
|
19 |
image = pipe(
|
20 |
prompt,
|
|
|
24 |
num_inference_steps=50,
|
25 |
max_sequence_length=512,
|
26 |
generator=torch.Generator("cpu").manual_seed(0),
|
27 |
+
use_auth_token=token
|
28 |
).images[0]
|
29 |
+
|
30 |
image.save("flux-dev.png")
|
31 |
|
32 |
gradio_app = gr.Interface(
|