Diffusers
Safetensors

i problem with the model in google colab

#1
by adamdahdah - opened

import torch
from diffusers import DiffusionPipeline

switch to "mps" for apple devices

pipe = DiffusionPipeline.from_pretrained("inclusionAI/TwinFlow", dtype=torch.bfloat16, device_map="cuda")

prompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k"
image = pipe(prompt).images[0]


HTTPError Traceback (most recent call last)
/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_http.py in hf_raise_for_status(response, endpoint_name)
401 try:
--> 402 response.raise_for_status()
403 except HTTPError as e:

14 frames/usr/local/lib/python3.12/dist-packages/requests/models.py in raise_for_status(self)
1025 if http_error_msg:
-> 1026 raise HTTPError(http_error_msg, response=self)
1027

HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/inclusionAI/TwinFlow/resolve/main/model_index.json

The above exception was the direct cause of the following exception:

EntryNotFoundError Traceback (most recent call last)
/tmp/ipython-input-1606966401.py in <cell line: 0>()
3
4 # switch to "mps" for apple devices
----> 5 pipe = DiffusionPipeline.from_pretrained("inclusionAI/TwinFlow", dtype=torch.bfloat16, device_map="cuda")
6
7 prompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k"

/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_validators.py in _inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
113
--> 114 return fn(*args, **kwargs)
115
116 return _inner_fn # type: ignore

/usr/local/lib/python3.12/dist-packages/diffusers/pipelines/pipeline_utils.py in from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
827 " is neither a valid local path nor a valid repo id. Please check the parameter."
828 )
--> 829 cached_folder = cls.download(
830 pretrained_model_name_or_path,
831 cache_dir=cache_dir,

/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_validators.py in _inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
113
--> 114 return fn(*args, **kwargs)
115
116 return _inner_fn # type: ignore

/usr/local/lib/python3.12/dist-packages/diffusers/pipelines/pipeline_utils.py in download(cls, pretrained_model_name, **kwargs)
1603
1604 if not local_files_only:
-> 1605 config_file = hf_hub_download(
1606 pretrained_model_name,
1607 cls.config_name,

/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_validators.py in _inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
113
--> 114 return fn(*args, **kwargs)
115
116 return _inner_fn # type: ignore

/usr/local/lib/python3.12/dist-packages/huggingface_hub/file_download.py in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, resume_download, force_filename, local_dir_use_symlinks)
1005 )
1006 else:
-> 1007 return _hf_hub_download_to_cache_dir(
1008 # Destination
1009 cache_dir=cache_dir,

/usr/local/lib/python3.12/dist-packages/huggingface_hub/file_download.py in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)
1068 # Try to get metadata (etag, commit_hash, url, size) from the server.
1069 # If we can't, a HEAD request error is returned.
-> 1070 (url_to_download, etag, commit_hash, expected_size, xet_file_data, head_call_error) = _get_metadata_or_catch_error(
1071 repo_id=repo_id,
1072 filename=filename,

/usr/local/lib/python3.12/dist-packages/huggingface_hub/file_download.py in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)
1541 try:
1542 try:
-> 1543 metadata = get_hf_file_metadata(
1544 url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token, endpoint=endpoint
1545 )

/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_validators.py in _inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
113
--> 114 return fn(*args, **kwargs)
115
116 return _inner_fn # type: ignore

/usr/local/lib/python3.12/dist-packages/huggingface_hub/file_download.py in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers, endpoint)
1458
1459 # Retrieve metadata
-> 1460 r = _request_wrapper(
1461 method="HEAD",
1462 url=url,

/usr/local/lib/python3.12/dist-packages/huggingface_hub/file_download.py in _request_wrapper(method, url, follow_relative_redirects, **params)
281 # Recursively follow relative redirects
282 if follow_relative_redirects:
--> 283 response = _request_wrapper(
284 method=method,
285 url=url,

/usr/local/lib/python3.12/dist-packages/huggingface_hub/file_download.py in _request_wrapper(method, url, follow_relative_redirects, **params)
305 # Perform request and return if status_code is not in the retry list.
306 response = http_backoff(method=method, url=url, **params)
--> 307 hf_raise_for_status(response)
308 return response
309

/usr/local/lib/python3.12/dist-packages/huggingface_hub/utils/_http.py in hf_raise_for_status(response, endpoint_name)
411 elif error_code == "EntryNotFound":
412 message = f"{response.status_code} Client Error." + "\n\n" + f"Entry Not Found for url: {response.url}."
--> 413 raise _format(EntryNotFoundError, message, response) from e
414
415 elif error_code == "GatedRepo":

EntryNotFoundError: 404 Client Error. (Request ID: Root=1-6937c675-6533d0ac4368b16b0a02219b;d27b6f6d-d646-4747-8b75-25f0efa1ee4f)

Entry Not Found for url: https://huggingface.co/inclusionAI/TwinFlow/resolve/main/model_index.json.

inclusionAI org

Hi, we did not test with pipeline in diffusers for now (in our plan).

We recommend use our official inference code in https://github.com/inclusionAI/TwinFlow

Sign up or log in to comment