celinah HF Staff commited on
Commit
6466623
·
1 Parent(s): 8148403
Files changed (2) hide show
  1. app.py +5 -4
  2. requirements.txt +2 -1
app.py CHANGED
@@ -6,8 +6,10 @@ import gradio as gr
6
  import numpy as np
7
  import requests
8
  from dotenv import load_dotenv
 
9
  from huggingface_hub import InferenceClient
10
 
 
11
  load_dotenv()
12
 
13
  MAX_SEED = np.iinfo(np.int32).max
@@ -15,13 +17,12 @@ MAX_IMAGE_SIZE = 2048
15
  TOKEN = None
16
  FAL_KEY = None
17
 
 
18
  def download_locally(url: str, local_path: str = "downloaded_file.png") -> str:
19
  """Download an image or a video from a URL to a local path.
20
-
21
  Args:
22
  url (str): The URL of the image to download. Must be an http(s) URL.
23
  local_path (str, optional): The path (including filename) where the file should be saved. Defaults to "downloaded_file.png".
24
-
25
  Returns:
26
  str: The filesystem path of the saved file – suitable for returning to a **gr.File** output, or as an MCP tool response.
27
  """
@@ -41,7 +42,6 @@ def download_locally(url: str, local_path: str = "downloaded_file.png") -> str:
41
  def login_hf(oauth_token: gr.OAuthToken | None):
42
  """
43
  Login to Hugging Face and check initial key statuses.
44
-
45
  Args:
46
  oauth_token (gr.OAuthToken | None): The OAuth token from Hugging Face.
47
  """
@@ -52,6 +52,7 @@ def login_hf(oauth_token: gr.OAuthToken | None):
52
  else:
53
  print("No OAuth token provided, using environment variable HF_TOKEN.")
54
  TOKEN = os.environ.get("HF_TOKEN")
 
55
 
56
 
57
  def login_fal(fal_key_from_ui: str | None):
@@ -68,6 +69,7 @@ def login_fal(fal_key_from_ui: str | None):
68
  else:
69
  FAL_KEY = os.environ.get("FAL_KEY")
70
  print("FAL_KEY is configured from environment variable.")
 
71
 
72
 
73
  def generate_image(prompt: str, seed: int = 42, width: int = 1024, height: int = 1024, num_inference_steps: int = 25):
@@ -85,7 +87,6 @@ def generate_image(prompt: str, seed: int = 42, width: int = 1024, height: int =
85
  num_inference_steps (int, default=25):
86
  The number of denoising steps. More denoising steps usually lead to a higher quality image at the
87
  expense of slower inference.
88
-
89
  """
90
  client = InferenceClient(provider="fal-ai", token=TOKEN)
91
  image = client.text_to_image(
 
6
  import numpy as np
7
  import requests
8
  from dotenv import load_dotenv
9
+
10
  from huggingface_hub import InferenceClient
11
 
12
+
13
  load_dotenv()
14
 
15
  MAX_SEED = np.iinfo(np.int32).max
 
17
  TOKEN = None
18
  FAL_KEY = None
19
 
20
+
21
  def download_locally(url: str, local_path: str = "downloaded_file.png") -> str:
22
  """Download an image or a video from a URL to a local path.
 
23
  Args:
24
  url (str): The URL of the image to download. Must be an http(s) URL.
25
  local_path (str, optional): The path (including filename) where the file should be saved. Defaults to "downloaded_file.png".
 
26
  Returns:
27
  str: The filesystem path of the saved file – suitable for returning to a **gr.File** output, or as an MCP tool response.
28
  """
 
42
  def login_hf(oauth_token: gr.OAuthToken | None):
43
  """
44
  Login to Hugging Face and check initial key statuses.
 
45
  Args:
46
  oauth_token (gr.OAuthToken | None): The OAuth token from Hugging Face.
47
  """
 
52
  else:
53
  print("No OAuth token provided, using environment variable HF_TOKEN.")
54
  TOKEN = os.environ.get("HF_TOKEN")
55
+ print("TOKEN: ", TOKEN)
56
 
57
 
58
  def login_fal(fal_key_from_ui: str | None):
 
69
  else:
70
  FAL_KEY = os.environ.get("FAL_KEY")
71
  print("FAL_KEY is configured from environment variable.")
72
+ print("FAL_KEY: ", FAL_KEY)
73
 
74
 
75
  def generate_image(prompt: str, seed: int = 42, width: int = 1024, height: int = 1024, num_inference_steps: int = 25):
 
87
  num_inference_steps (int, default=25):
88
  The number of denoising steps. More denoising steps usually lead to a higher quality image at the
89
  expense of slower inference.
 
90
  """
91
  client = InferenceClient(provider="fal-ai", token=TOKEN)
92
  image = client.text_to_image(
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  huggingface-hub
2
  numpy
3
  python-dotenv
4
- fal-client
 
 
1
  huggingface-hub
2
  numpy
3
  python-dotenv
4
+ fal-client
5
+ gradio