Spaces:
Sleeping
Sleeping
import gradio as gr | |
from huggingface_hub import HfApi | |
import uuid | |
from slugify import slugify | |
import os | |
import json | |
import subprocess | |
import tempfile | |
import requests | |
import shutil | |
import time | |
from pathlib import Path | |
from typing import Optional, Dict, List | |
def is_lfs_pointer_file(filepath): | |
try: | |
with open(filepath, 'rb') as f: | |
header = f.read(100) | |
return header.startswith(b'version https://git-lfs.github.com/spec/v1') | |
except: | |
return False | |
def remove_lfs_files(folder): | |
removed_files = [] | |
for root, dirs, files in os.walk(folder): | |
if '.git' in root: | |
continue | |
for file in files: | |
filepath = os.path.join(root, file) | |
if is_lfs_pointer_file(filepath): | |
os.remove(filepath) | |
removed_files.append(filepath.replace(folder + os.sep, '')) | |
return removed_files | |
def analyze_repository(src_path: Path) -> Dict: | |
analysis = { | |
"has_requirements": False, | |
"has_readme": False, | |
"main_language": "python", | |
"key_files": [], | |
"dependencies": [], | |
"description": "", | |
"entry_points": [], | |
"model_files": [], | |
"config_files": [] | |
} | |
req_file = src_path / "requirements.txt" | |
if req_file.exists(): | |
analysis["has_requirements"] = True | |
try: | |
reqs = req_file.read_text(encoding="utf-8").strip().split("\n") | |
cleaned_deps = [] | |
for r in reqs: | |
r = r.strip() | |
if r and not r.startswith("#"): | |
if "opencv-python==4.10.0" in r: | |
r = "opencv-python>=4.10.0.82" | |
elif "opencv-python==4.10" in r: | |
r = "opencv-python>=4.10.0.82" | |
if "==" in r and not r.startswith("git+"): | |
pkg_name = r.split("==")[0] | |
if pkg_name.lower() in ["torch", "tensorflow", "transformers", "numpy"]: | |
cleaned_deps.append(r) | |
else: | |
version = r.split("==")[1] | |
if version.count('.') == 1: | |
version = version + ".0" | |
cleaned_deps.append(f"{pkg_name}>={version}") | |
else: | |
cleaned_deps.append(r) | |
analysis["dependencies"] = cleaned_deps | |
except: | |
analysis["dependencies"] = [] | |
for readme_name in ["README.md", "readme.md", "README.rst", "README.txt"]: | |
readme_file = src_path / readme_name | |
if readme_file.exists(): | |
analysis["has_readme"] = True | |
try: | |
readme_content = readme_file.read_text(encoding="utf-8") | |
analysis["readme_content"] = readme_content[:5000] | |
lines = readme_content.split("\n") | |
for i, line in enumerate(lines[:10]): | |
if line.strip() and not line.startswith("#") and not line.startswith("!"): | |
analysis["description"] = line.strip() | |
break | |
except: | |
pass | |
py_files = list(src_path.glob("**/*.py")) | |
for py_file in py_files[:20]: | |
if "__pycache__" not in str(py_file) and ".git" not in str(py_file): | |
relative_path = py_file.relative_to(src_path) | |
if any(name in py_file.name for name in ["main.py", "app.py", "demo.py", "run.py", "server.py", "streamlit_app.py"]): | |
analysis["entry_points"].append(str(relative_path)) | |
try: | |
content = py_file.read_text(encoding="utf-8")[:1000] | |
if "if __name__" in content and "main" in content: | |
analysis["entry_points"].append(str(relative_path)) | |
if any(lib in content for lib in ["torch", "tensorflow", "transformers", "numpy", "pandas", "cv2", "PIL"]): | |
analysis["key_files"].append({ | |
"path": str(relative_path), | |
"preview": content[:500] | |
}) | |
except: | |
pass | |
model_extensions = [".pth", ".pt", ".ckpt", ".h5", ".pb", ".onnx", ".safetensors"] | |
for ext in model_extensions: | |
model_files = list(src_path.glob(f"**/*{ext}")) | |
for mf in model_files[:5]: | |
if ".git" not in str(mf): | |
analysis["model_files"].append(str(mf.relative_to(src_path))) | |
config_patterns = ["config.json", "config.yaml", "config.yml", "*.json", "*.yaml"] | |
for pattern in config_patterns: | |
config_files = list(src_path.glob(pattern)) | |
for cf in config_files[:5]: | |
if ".git" not in str(cf): | |
analysis["config_files"].append(str(cf.relative_to(src_path))) | |
return analysis | |
def generate_gradio_app(repo_url: str, analysis: Dict) -> Dict: | |
context = f"""Repository URL: {repo_url} | |
Repository Analysis: | |
- Description: {analysis.get('description', 'N/A')} | |
- Main Dependencies: {', '.join(analysis['dependencies'][:10])} | |
- Entry Points: {', '.join(analysis['entry_points'][:5])} | |
- Model Files: {', '.join(analysis['model_files'][:3])} | |
- Config Files: {', '.join(analysis['config_files'][:3])} | |
Key Files Found: | |
""" | |
for kf in analysis.get('key_files', [])[:3]: | |
context += f"\n--- {kf['path']} ---\n{kf['preview']}\n" | |
if analysis.get('readme_content'): | |
context += f"\n--- README.md (excerpt) ---\n{analysis['readme_content'][:2000]}\n" | |
system_prompt = """You are an expert at creating Gradio apps from GitHub repositories. | |
Your task is to generate a complete, working Gradio interface that demonstrates the main functionality of the repository. | |
CRITICAL REQUIREMENTS: | |
1. The app.py must be FULLY FUNCTIONAL and runnable | |
2. DO NOT use 'from agent import' or any repository-specific imports that won't exist | |
3. Handle errors gracefully with clear user feedback | |
4. Include API key inputs when external services are required | |
5. Create intuitive UI components for the main features | |
6. Always use gradio>=5.35.0 | |
Return ONLY valid JSON with these exact keys: | |
- app_py: Complete Gradio app code | |
- requirements_txt: All necessary dependencies including gradio>=5.35.0 | |
- summary: Brief description of what the app does""" | |
fireworks_key = os.getenv("FIREWORKS_API_KEY") | |
if fireworks_key: | |
try: | |
url = "https://api.fireworks.ai/inference/v1/chat/completions" | |
payload = { | |
"model": "accounts/fireworks/models/qwen3-coder-480b-a35b-instruct", | |
"max_tokens": 4096, | |
"top_p": 1, | |
"top_k": 40, | |
"presence_penalty": 0, | |
"frequency_penalty": 0, | |
"temperature": 0.6, | |
"messages": [ | |
{"role": "system", "content": system_prompt}, | |
{"role": "user", "content": f"Create a fully functional Gradio app for this repository:\n\n{context[:8000]}"} | |
] | |
} | |
headers = { | |
"Accept": "application/json", | |
"Content-Type": "application/json", | |
"Authorization": f"Bearer {fireworks_key.strip()}" | |
} | |
r = requests.post(url, headers=headers, data=json.dumps(payload), timeout=30) | |
if r.status_code == 200: | |
response_text = r.json()["choices"][0]["message"]["content"] | |
print("β Fireworks AIλ‘ μ± μμ± μ±κ³΅") | |
try: | |
if "```json" in response_text: | |
start = response_text.find("```json") + 7 | |
end = response_text.find("```", start) | |
response_text = response_text[start:end].strip() | |
elif "```" in response_text: | |
start = response_text.find("```") + 3 | |
end = response_text.find("```", start) | |
response_text = response_text[start:end].strip() | |
result = json.loads(response_text) | |
if not all(key in result for key in ["app_py", "requirements_txt", "summary"]): | |
raise ValueError("Missing required keys in response") | |
if "gradio" not in result.get("requirements_txt", "").lower(): | |
result["requirements_txt"] = "gradio>=5.35.0\n" + result.get("requirements_txt", "") | |
return result | |
except (json.JSONDecodeError, ValueError) as e: | |
print(f"β οΈ JSON νμ± μ€λ₯: {e}") | |
return None | |
except Exception as e: | |
print(f"β οΈ Fireworks AI API μ€λ₯: {e}") | |
print("βΉοΈ AI APIκ° μμ΄ κΈ°λ³Έ ν νλ¦Ώμ μμ±ν©λλ€.") | |
return create_smart_template(repo_url, analysis) | |
def create_smart_template(repo_url: str, analysis: Dict) -> Dict: | |
repo_name = Path(repo_url.rstrip("/")).name | |
description = analysis.get("description", "A project deployed from GitHub") if analysis else "A project deployed from GitHub" | |
deps = " ".join(analysis.get("dependencies", [])) if analysis else "" | |
has_cv = any(lib in deps for lib in ["cv2", "PIL", "pillow", "opencv"]) | |
has_nlp = any(lib in deps for lib in ["transformers", "nltk", "spacy"]) | |
has_3d = any(lib in deps for lib in ["gaussian", "rasterizer", "plyfile", "trimesh"]) | |
requirements = ["gradio>=5.35.0"] | |
if analysis and analysis.get("dependencies"): | |
filtered_deps = [] | |
for dep in analysis["dependencies"][:15]: | |
if not dep.startswith("git+") and not dep.startswith("-e") and not dep.startswith("file:"): | |
if "==" in dep and dep.split("==")[0].lower() not in ["torch", "tensorflow", "numpy"]: | |
pkg_name = dep.split("==")[0] | |
version = dep.split("==")[1] | |
filtered_deps.append(f"{pkg_name}>={version}") | |
else: | |
filtered_deps.append(dep) | |
requirements.extend(filtered_deps) | |
if has_3d or "gaussian" in repo_name.lower(): | |
app_code = f'''import gradio as gr | |
import os | |
def process_3d(input_file): | |
if input_file is None: | |
return "Please upload a 3D file or image" | |
info = """ | |
## β οΈ Build Requirements Notice | |
This project requires: | |
1. CUDA-enabled GPU | |
2. Custom C++/CUDA extensions compilation | |
Original repository: {repo_url} | |
""" | |
return info | |
with gr.Blocks(title="{repo_name}") as demo: | |
gr.Markdown(f""" | |
# {repo_name.replace("-", " ").title()} | |
{description} | |
This space was created from: [{repo_url}]({repo_url}) | |
""") | |
with gr.Row(): | |
with gr.Column(): | |
input_file = gr.File(label="Upload 3D File or Image") | |
process_btn = gr.Button("Process", variant="primary") | |
with gr.Column(): | |
output_info = gr.Markdown() | |
process_btn.click( | |
fn=process_3d, | |
inputs=input_file, | |
outputs=output_info | |
) | |
if __name__ == "__main__": | |
demo.launch() | |
''' | |
elif has_cv: | |
app_code = f'''import gradio as gr | |
from PIL import Image | |
import numpy as np | |
def process_image(image): | |
if image is None: | |
return None, "Please upload an image" | |
img_array = np.array(image) | |
processed = Image.fromarray(img_array) | |
info = f"Image shape: {{img_array.shape}}" | |
return processed, info | |
with gr.Blocks(title="{repo_name}") as demo: | |
gr.Markdown(f""" | |
# {repo_name.replace("-", " ").title()} | |
{description} | |
This space was created from: [{repo_url}]({repo_url}) | |
""") | |
with gr.Row(): | |
with gr.Column(): | |
input_image = gr.Image(label="Input Image", type="pil") | |
process_btn = gr.Button("Process Image", variant="primary") | |
with gr.Column(): | |
output_image = gr.Image(label="Output Image") | |
output_info = gr.Textbox(label="Information") | |
process_btn.click( | |
fn=process_image, | |
inputs=input_image, | |
outputs=[output_image, output_info] | |
) | |
if __name__ == "__main__": | |
demo.launch() | |
''' | |
elif has_nlp: | |
app_code = f'''import gradio as gr | |
def process_text(text, max_length=100): | |
if not text: | |
return "Please enter some text" | |
word_count = len(text.split()) | |
char_count = len(text) | |
result = f""" | |
**Analysis Results:** | |
- Word count: {{word_count}} | |
- Character count: {{char_count}} | |
- Average word length: {{char_count/max(word_count, 1):.1f}} | |
""" | |
return result | |
with gr.Blocks(title="{repo_name}") as demo: | |
gr.Markdown(f""" | |
# {repo_name.replace("-", " ").title()} | |
{description} | |
This space was created from: [{repo_url}]({repo_url}) | |
""") | |
with gr.Row(): | |
with gr.Column(): | |
input_text = gr.Textbox( | |
label="Input Text", | |
placeholder="Enter your text here...", | |
lines=5 | |
) | |
max_length = gr.Slider( | |
minimum=10, | |
maximum=500, | |
value=100, | |
label="Max Length" | |
) | |
process_btn = gr.Button("Process Text", variant="primary") | |
with gr.Column(): | |
output_text = gr.Markdown(label="Results") | |
process_btn.click( | |
fn=process_text, | |
inputs=[input_text, max_length], | |
outputs=output_text | |
) | |
if __name__ == "__main__": | |
demo.launch() | |
''' | |
else: | |
app_code = f'''import gradio as gr | |
def main_function(input_data): | |
if not input_data: | |
return "Please provide input" | |
result = f"Processed successfully! Input received: {{input_data}}" | |
return result | |
with gr.Blocks(title="{repo_name}") as demo: | |
gr.Markdown(f""" | |
# {repo_name.replace("-", " ").title()} | |
{description} | |
This space was created from: [{repo_url}]({repo_url}) | |
""") | |
with gr.Row(): | |
with gr.Column(): | |
input_data = gr.Textbox( | |
label="Input", | |
placeholder="Enter your input here...", | |
lines=3 | |
) | |
process_btn = gr.Button("Process", variant="primary") | |
with gr.Column(): | |
output_data = gr.Textbox(label="Output") | |
process_btn.click( | |
fn=main_function, | |
inputs=input_data, | |
outputs=output_data | |
) | |
if __name__ == "__main__": | |
demo.launch() | |
''' | |
return { | |
"app_py": app_code, | |
"requirements_txt": "\n".join(requirements), | |
"summary": f"Smart template created for {repo_name}" | |
} | |
def clone(repo_git, repo_hf, sdk_type, skip_lfs, enable_smart_generation): | |
folder = str(uuid.uuid4()) | |
hf_token = os.getenv("HF_TOKEN") | |
if not hf_token: | |
yield "β Error: HF_TOKEN not found in environment variables." | |
return | |
try: | |
yield "π Starting clone process..." | |
api = HfApi(token=hf_token) | |
try: | |
user_info = api.whoami() | |
username = user_info["name"] | |
yield f"β Authenticated as: {username}" | |
except Exception as e: | |
yield f"β Authentication failed: {str(e)}" | |
return | |
yield f"π₯ Cloning repository from {repo_git}..." | |
env = os.environ.copy() | |
env['GIT_LFS_SKIP_SMUDGE'] = '1' | |
clone_cmd = ['git', 'clone', '--recurse-submodules', repo_git, folder] | |
subprocess.run(clone_cmd, check=True, env=env) | |
if not skip_lfs: | |
yield "π¦ Attempting to download LFS files..." | |
try: | |
subprocess.run(['git', 'lfs', 'install'], cwd=folder, check=True) | |
lfs_result = subprocess.run(['git', 'lfs', 'pull'], cwd=folder, capture_output=True, text=True) | |
if lfs_result.returncode != 0: | |
yield f"β οΈ Warning: LFS download failed" | |
skip_lfs = True | |
else: | |
yield "β LFS files downloaded successfully" | |
except Exception as e: | |
yield f"β οΈ LFS error: {str(e)}" | |
skip_lfs = True | |
if skip_lfs: | |
yield "π§Ή Removing LFS pointer files..." | |
removed_files = remove_lfs_files(folder) | |
if removed_files: | |
yield f"π Removed {len(removed_files)} LFS pointer files" | |
if enable_smart_generation: | |
yield "π Analyzing repository structure..." | |
folder_path = Path(folder) | |
analysis = analyze_repository(folder_path) | |
yield "π€ Generating smart Gradio app..." | |
generated = generate_gradio_app(repo_git, analysis) | |
if generated and isinstance(generated, dict) and "app_py" in generated: | |
app_path = folder_path / "app.py" | |
app_path.write_text(generated["app_py"], encoding="utf-8") | |
yield "β Smart app.py generated" | |
req_path = folder_path / "requirements.txt" | |
existing_reqs = [] | |
if req_path.exists(): | |
try: | |
existing_reqs = req_path.read_text(encoding="utf-8").strip().split("\n") | |
except: | |
existing_reqs = [] | |
new_reqs = generated["requirements_txt"].strip().split("\n") if generated["requirements_txt"] else [] | |
all_reqs = set() | |
git_reqs = [] | |
torch_reqs = [] | |
regular_reqs = [] | |
for req in existing_reqs + new_reqs: | |
req = req.strip() | |
if not req or req.startswith("#"): | |
continue | |
if req.startswith("git+"): | |
git_reqs.append(req) | |
elif "torch" in req.lower() or "cuda" in req.lower(): | |
torch_reqs.append(req) | |
else: | |
regular_reqs.append(req) | |
has_gradio = any("gradio" in req for req in regular_reqs) | |
if not has_gradio: | |
regular_reqs.append("gradio>=5.35.0") | |
final_reqs = [] | |
if torch_reqs: | |
final_reqs.extend(sorted(set(torch_reqs))) | |
final_reqs.append("") | |
final_reqs.extend(sorted(set(regular_reqs))) | |
if git_reqs: | |
final_reqs.append("") | |
final_reqs.extend(sorted(set(git_reqs))) | |
req_content = "\n".join(final_reqs) | |
req_path.write_text(req_content, encoding="utf-8") | |
yield "β Requirements.txt updated" | |
readme_path = folder_path / "README.md" | |
readme_content = f"""--- | |
title: {repo_hf.replace("-", " ").title()} | |
emoji: π | |
colorFrom: blue | |
colorTo: green | |
sdk: {sdk_type} | |
sdk_version: "5.35.0" | |
app_file: app.py | |
pinned: false | |
--- | |
# {repo_hf.replace("-", " ").title()} | |
{analysis.get('description', 'Deployed from GitHub repository')} | |
Deployed from: {repo_git} | |
""" | |
readme_path.write_text(readme_content, encoding="utf-8") | |
yield "β README.md created/updated" | |
git_dir = os.path.join(folder, '.git') | |
if os.path.exists(git_dir): | |
shutil.rmtree(git_dir) | |
yield "π§Ή Removed .git directory" | |
gitattributes_path = os.path.join(folder, '.gitattributes') | |
if os.path.exists(gitattributes_path): | |
with open(gitattributes_path, 'r') as f: | |
lines = f.readlines() | |
new_lines = [] | |
for line in lines: | |
if 'filter=lfs' not in line: | |
new_lines.append(line) | |
if new_lines: | |
with open(gitattributes_path, 'w') as f: | |
f.writelines(new_lines) | |
else: | |
os.remove(gitattributes_path) | |
yield "ποΈ Creating Hugging Face Space..." | |
repo_id = f"{username}/{slugify(repo_hf)}" | |
space_created = False | |
for attempt in range(3): | |
try: | |
yield f" Creating Space: {repo_id} (attempt {attempt + 1}/3)" | |
try: | |
existing_space = api.space_info(repo_id=repo_id, token=hf_token) | |
yield f" βΉοΈ Space already exists: {existing_space.id}" | |
space_created = True | |
break | |
except: | |
pass | |
create_result = api.create_repo( | |
repo_id=repo_id, | |
repo_type="space", | |
space_sdk=sdk_type, | |
exist_ok=True, | |
private=False, | |
token=hf_token | |
) | |
time.sleep(3) | |
space_info = api.space_info(repo_id=repo_id, token=hf_token) | |
yield f" β Space created successfully: {space_info.id}" | |
space_created = True | |
break | |
except Exception as e: | |
error_msg = str(e) | |
if "429" in error_msg or "Too Many Requests" in error_msg: | |
yield f"β Rate Limit Error - Try again in 17-24 hours" | |
raise Exception(f"Rate limit reached.") | |
yield f" β οΈ Attempt {attempt + 1} failed: {error_msg[:100]}..." | |
if attempt < 2: | |
yield " Retrying in 5 seconds..." | |
time.sleep(5) | |
else: | |
yield f" β Failed to create space after 3 attempts" | |
raise Exception(f"Could not create space: {error_msg}") | |
if not space_created: | |
raise Exception("Failed to create space") | |
folder_size = sum(os.path.getsize(os.path.join(dirpath, filename)) | |
for dirpath, dirnames, filenames in os.walk(folder) | |
for filename in filenames) / (1024 * 1024) | |
yield f"π Folder size: {folder_size:.2f} MB" | |
file_count = sum(len(files) for _, _, files in os.walk(folder)) | |
yield f"π Total files to upload: {file_count}" | |
upload_success = False | |
max_retries = 3 | |
for attempt in range(max_retries): | |
try: | |
if attempt > 0: | |
yield f"π€ Upload attempt {attempt + 1}/{max_retries}..." | |
time.sleep(5) | |
if folder_size > 500: | |
yield "π€ Uploading large folder to Hugging Face..." | |
api.upload_large_folder( | |
folder_path=folder, | |
repo_id=repo_id, | |
repo_type="space", | |
token=hf_token, | |
commit_message="Deploy from GitHub repository", | |
ignore_patterns=["*.pyc", "__pycache__", ".git*", ".DS_Store", "*.egg-info"] | |
) | |
else: | |
yield "π€ Uploading to Hugging Face..." | |
api.upload_folder( | |
folder_path=folder, | |
repo_id=repo_id, | |
repo_type="space", | |
token=hf_token, | |
commit_message="Deploy from GitHub repository", | |
ignore_patterns=["*.pyc", "__pycache__", ".git*", ".DS_Store", "*.egg-info"] | |
) | |
upload_success = True | |
yield "β Upload completed successfully" | |
break | |
except Exception as upload_error: | |
error_msg = str(upload_error) | |
if "404" in error_msg and attempt < max_retries - 1: | |
yield f" β οΈ Upload failed (404). Retrying..." | |
time.sleep(10) | |
try: | |
space_info = api.space_info(repo_id=repo_id, token=hf_token) | |
yield f" β Space confirmed to exist" | |
except: | |
yield " π Attempting to recreate space..." | |
try: | |
api.create_repo( | |
repo_id=repo_id, | |
repo_type="space", | |
space_sdk=sdk_type, | |
exist_ok=True, | |
private=False, | |
token=hf_token | |
) | |
yield " β Space recreated" | |
except Exception as recreate_error: | |
yield f" β Could not recreate space: {str(recreate_error)}" | |
elif "LFS pointer" in error_msg: | |
yield "β Upload failed due to remaining LFS pointer files" | |
raise upload_error | |
elif attempt == max_retries - 1: | |
yield f"β Upload failed after {max_retries} attempts" | |
raise upload_error | |
else: | |
yield f" β οΈ Upload failed: {error_msg[:100]}..." | |
if not upload_success: | |
raise Exception("Upload failed after all retries") | |
shutil.rmtree(folder) | |
space_url = f"https://huggingface.co/spaces/{repo_id}" | |
yield f""" | |
β **Successfully created Space!** | |
π **Your Space URL**: [{space_url}]({space_url}) | |
π **Summary:** | |
- Space ID: `{repo_id}` | |
- Source: {repo_git} | |
- SDK: {sdk_type} | |
- Smart Generation: {'Enabled' if enable_smart_generation else 'Disabled'} | |
- LFS Files: {'Skipped' if skip_lfs else 'Included'} | |
""" | |
if skip_lfs: | |
yield "\nβ οΈ LFS files were removed." | |
if enable_smart_generation: | |
yield "\nπ€ AI-generated Gradio interface was created" | |
except subprocess.CalledProcessError as e: | |
if os.path.exists(folder): | |
shutil.rmtree(folder) | |
yield f"β Git error: {str(e)}" | |
except Exception as e: | |
if os.path.exists(folder): | |
shutil.rmtree(folder) | |
yield f"β Error: {str(e)}" | |
def space_to_github(hf_username, hf_space_name, github_username, github_repo_name, github_token): | |
"""Clone HuggingFace Space and push to GitHub""" | |
tmp_dir = None | |
try: | |
# HF Space URL μμ± | |
hf_repo_url = f"https://huggingface.co/spaces/{hf_username}/{hf_space_name}.git" | |
# μμ λλ ν 리 μμ± λ° ν΄λ‘ | |
tmp_dir = tempfile.mkdtemp() | |
yield f"π₯ Cloning HF Space: {hf_username}/{hf_space_name}..." | |
# Git ν΄λ‘ | |
env = os.environ.copy() | |
env['GIT_LFS_SKIP_SMUDGE'] = '1' # LFS νμΌ μ€ν΅ | |
clone_cmd = ['git', 'clone', hf_repo_url, tmp_dir] | |
result = subprocess.run(clone_cmd, env=env, capture_output=True, text=True) | |
if result.returncode != 0: | |
raise Exception(f"Clone failed: {result.stderr}") | |
yield "β HF Space cloned successfully" | |
# GitHub μΈμ¦ λ° λ ν¬ μμ± | |
yield "π Authenticating with GitHub..." | |
# GitHub API νΈμΆλ‘ λ ν¬ μμ± | |
headers = { | |
"Authorization": f"token {github_token}", | |
"Accept": "application/vnd.github.v3+json" | |
} | |
# μ¬μ©μ μ 보 κ°μ Έμ€κΈ° | |
user_response = requests.get("https://api.github.com/user", headers=headers) | |
if user_response.status_code != 200: | |
raise Exception("GitHub authentication failed. Please check your token.") | |
actual_github_username = user_response.json()["login"] | |
# μ λ ₯λ usernameκ³Ό μ€μ usernameμ΄ λ€λ₯Έ κ²½μ° μ²λ¦¬ | |
if github_username.lower() != actual_github_username.lower(): | |
yield f"β οΈ Note: Using authenticated user '{actual_github_username}' instead of '{github_username}'" | |
github_username = actual_github_username | |
yield f"β Authenticated as GitHub user: {github_username}" | |
# λ ν¬μ§ν 리 μμ± | |
yield f"π¦ Creating GitHub repository: {github_repo_name}..." | |
create_data = { | |
"name": github_repo_name, | |
"private": False, | |
"auto_init": False, | |
"description": f"Exported from HuggingFace Space: {hf_username}/{hf_space_name}" | |
} | |
create_response = requests.post( | |
"https://api.github.com/user/repos", | |
headers=headers, | |
json=create_data | |
) | |
if create_response.status_code == 201: | |
repo_info = create_response.json() | |
github_url = repo_info['html_url'] | |
yield f"β GitHub repository created: {github_url}" | |
elif create_response.status_code == 422: | |
# λ ν¬κ° μ΄λ―Έ μ‘΄μ¬ν¨ | |
github_url = f"https://github.com/{github_username}/{github_repo_name}" | |
yield f"βΉοΈ Repository already exists: {github_url}" | |
else: | |
error_msg = create_response.json().get('message', 'Unknown error') | |
raise Exception(f"Failed to create repository: {error_msg}") | |
# Git remote λ³κ²½ λ° νΈμ | |
yield "π€ Pushing to GitHub..." | |
# git λͺ λ Ήμ΄λ‘ remote λ³κ²½ λ° νΈμ | |
os.chdir(tmp_dir) | |
# κΈ°μ‘΄ origin μ κ±° | |
subprocess.run(['git', 'remote', 'remove', 'origin'], capture_output=True) | |
# GitHub remote μΆκ° | |
github_remote_url = f"https://{github_username}:{github_token}@github.com/{github_username}/{github_repo_name}.git" | |
subprocess.run(['git', 'remote', 'add', 'origin', github_remote_url], check=True) | |
# main λΈλμΉλ‘ νΈμ | |
push_result = subprocess.run( | |
['git', 'push', '-u', 'origin', 'HEAD:main', '--force'], | |
capture_output=True, | |
text=True | |
) | |
if push_result.returncode != 0: | |
# master λΈλμΉλ‘ μ¬μλ | |
push_result = subprocess.run( | |
['git', 'push', '-u', 'origin', 'HEAD:master', '--force'], | |
capture_output=True, | |
text=True | |
) | |
if push_result.returncode != 0: | |
raise Exception(f"Push failed: {push_result.stderr}") | |
yield "β Successfully pushed to GitHub!" | |
# μ΅μ’ κ²°κ³Ό μΆλ ₯ - ν΄λ¦ κ°λ₯ν λ§ν¬ | |
final_url = f"https://github.com/{github_username}/{github_repo_name}" | |
yield f""" | |
π **Export Complete!** | |
π¦ **GitHub Repository**: [{final_url}]({final_url}) | |
π **Details:** | |
- Source: HuggingFace Space `{hf_username}/{hf_space_name}` | |
- Destination: GitHub `{github_username}/{github_repo_name}` | |
π§ **Next Steps:** | |
```bash | |
git clone {final_url}.git | |
cd {github_repo_name} | |
``` | |
""" | |
except Exception as e: | |
yield f"β Error: {str(e)}" | |
finally: | |
# μμ λλ ν 리 μ 리 | |
if tmp_dir and os.path.exists(tmp_dir): | |
os.chdir("/") # λλ ν 리 λ°μΌλ‘ μ΄λ | |
shutil.rmtree(tmp_dir) | |
css = """ | |
/* Modern Professional UI */ | |
.container { | |
max-width: 1200px !important; | |
margin: auto; | |
padding: 20px; | |
} | |
/* Gradient backgrounds */ | |
.main-header { | |
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); | |
padding: 2rem; | |
border-radius: 15px; | |
margin-bottom: 2rem; | |
color: white; | |
text-align: center; | |
box-shadow: 0 10px 30px rgba(0,0,0,0.1); | |
} | |
/* Tab styling */ | |
.tabs { | |
box-shadow: 0 4px 6px rgba(0,0,0,0.07); | |
border-radius: 12px; | |
overflow: hidden; | |
} | |
/* Card-like sections */ | |
.input-section { | |
background: white; | |
padding: 25px; | |
border-radius: 12px; | |
box-shadow: 0 2px 12px rgba(0,0,0,0.08); | |
margin-bottom: 20px; | |
border: 1px solid rgba(0,0,0,0.05); | |
} | |
/* Output box styling */ | |
.output-box { | |
min-height: 400px !important; | |
max-height: 600px !important; | |
overflow-y: auto !important; | |
font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', monospace !important; | |
font-size: 13px !important; | |
line-height: 1.6 !important; | |
background: linear-gradient(to bottom, #1e1e1e, #2d2d30) !important; | |
color: #d4d4d4 !important; | |
padding: 20px !important; | |
border-radius: 10px !important; | |
border: 1px solid rgba(255,255,255,0.1) !important; | |
} | |
/* Custom button styling */ | |
.primary-btn { | |
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%) !important; | |
color: white !important; | |
border: none !important; | |
padding: 12px 30px !important; | |
font-size: 16px !important; | |
font-weight: 600 !important; | |
border-radius: 8px !important; | |
cursor: pointer !important; | |
transition: all 0.3s ease !important; | |
box-shadow: 0 4px 15px rgba(102, 126, 234, 0.4) !important; | |
} | |
.primary-btn:hover { | |
transform: translateY(-2px) !important; | |
box-shadow: 0 6px 20px rgba(102, 126, 234, 0.6) !important; | |
} | |
/* Input field styling */ | |
input[type="text"], input[type="password"], textarea { | |
border: 2px solid #e2e8f0 !important; | |
border-radius: 8px !important; | |
padding: 10px 15px !important; | |
font-size: 14px !important; | |
transition: all 0.3s ease !important; | |
background: #f8fafc !important; | |
} | |
input[type="text"]:focus, input[type="password"]:focus, textarea:focus { | |
border-color: #667eea !important; | |
box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1) !important; | |
background: white !important; | |
} | |
/* Radio button group styling */ | |
.radio-group { | |
background: #f8fafc; | |
padding: 15px; | |
border-radius: 8px; | |
border: 1px solid #e2e8f0; | |
} | |
/* Checkbox styling */ | |
input[type="checkbox"] { | |
width: 20px !important; | |
height: 20px !important; | |
cursor: pointer !important; | |
} | |
/* Label styling */ | |
label { | |
font-weight: 600 !important; | |
color: #334155 !important; | |
margin-bottom: 8px !important; | |
display: block !important; | |
font-size: 14px !important; | |
} | |
/* Info text styling */ | |
.info-text { | |
color: #64748b !important; | |
font-size: 13px !important; | |
margin-top: 5px !important; | |
} | |
/* Status badges */ | |
.status-badge { | |
display: inline-block; | |
padding: 6px 12px; | |
border-radius: 20px; | |
font-size: 13px; | |
font-weight: 600; | |
margin: 5px 0; | |
} | |
.status-success { | |
background: #10b981; | |
color: white; | |
} | |
.status-error { | |
background: #ef4444; | |
color: white; | |
} | |
.status-warning { | |
background: #f59e0b; | |
color: white; | |
} | |
/* Tab button styling */ | |
button.tab-nav { | |
padding: 12px 24px !important; | |
font-weight: 600 !important; | |
font-size: 15px !important; | |
border-bottom: 3px solid transparent !important; | |
transition: all 0.3s ease !important; | |
} | |
button.tab-nav:hover { | |
background: rgba(102, 126, 234, 0.05) !important; | |
} | |
button.tab-nav.selected { | |
border-bottom-color: #667eea !important; | |
color: #667eea !important; | |
} | |
/* Markdown styling */ | |
.markdown-text h1 { | |
color: #1e293b !important; | |
font-size: 28px !important; | |
font-weight: 700 !important; | |
margin-bottom: 10px !important; | |
} | |
.markdown-text h2 { | |
color: #334155 !important; | |
font-size: 20px !important; | |
font-weight: 600 !important; | |
margin-top: 20px !important; | |
} | |
.markdown-text a { | |
color: #667eea !important; | |
text-decoration: none !important; | |
font-weight: 500 !important; | |
} | |
.markdown-text a:hover { | |
text-decoration: underline !important; | |
} | |
.markdown-text code { | |
background: #f1f5f9 !important; | |
padding: 2px 6px !important; | |
border-radius: 4px !important; | |
font-size: 13px !important; | |
color: #e11d48 !important; | |
} | |
/* Scrollbar styling */ | |
.output-box::-webkit-scrollbar { | |
width: 10px; | |
} | |
.output-box::-webkit-scrollbar-track { | |
background: #2d2d30; | |
border-radius: 5px; | |
} | |
.output-box::-webkit-scrollbar-thumb { | |
background: #555; | |
border-radius: 5px; | |
} | |
.output-box::-webkit-scrollbar-thumb:hover { | |
background: #666; | |
} | |
/* Animation */ | |
@keyframes fadeIn { | |
from { opacity: 0; transform: translateY(10px); } | |
to { opacity: 1; transform: translateY(0); } | |
} | |
.input-section { | |
animation: fadeIn 0.5s ease; | |
} | |
/* Responsive design */ | |
@media (max-width: 768px) { | |
.container { | |
padding: 10px; | |
} | |
.main-header { | |
padding: 1.5rem; | |
} | |
.input-section { | |
padding: 15px; | |
} | |
} | |
""" | |
with gr.Blocks(css=css, theme=gr.themes.Soft( | |
primary_hue="purple", | |
secondary_hue="purple", | |
neutral_hue="slate" | |
)) as demo: | |
gr.HTML(""" | |
<div class="main-header"> | |
<h1 style="font-size: 2.5rem; margin: 0; font-weight: 700;"> | |
π Repository Converter Pro | |
</h1> | |
<p style="font-size: 1.1rem; margin-top: 10px; opacity: 0.95;"> | |
Seamlessly transfer repositories between GitHub and HuggingFace | |
</p> | |
</div> | |
""") | |
with gr.Tabs(elem_classes="tabs"): | |
with gr.Tab("π GitHub β HF Space", elem_id="tab1"): | |
with gr.Row(): | |
with gr.Column(scale=5): | |
with gr.Group(elem_classes="input-section"): | |
gr.Markdown("### π¦ Source Repository") | |
repo_git = gr.Textbox( | |
label="GitHub Repository URL", | |
placeholder="https://github.com/username/repository", | |
info="Enter the full GitHub repository URL" | |
) | |
gr.Markdown("### π― Destination Settings") | |
repo_hf = gr.Textbox( | |
label="HuggingFace Space Name", | |
placeholder="my-awesome-space", | |
info="Choose a unique name for your Space" | |
) | |
sdk_choices = gr.Radio( | |
["gradio", "streamlit", "docker", "static"], | |
label="Space SDK Type", | |
value="gradio", | |
elem_classes="radio-group" | |
) | |
gr.Markdown("### βοΈ Advanced Options") | |
with gr.Row(): | |
skip_lfs = gr.Checkbox( | |
label="Skip Large Files (LFS)", | |
value=True, | |
info="Recommended for faster deployment" | |
) | |
enable_smart_generation = gr.Checkbox( | |
label="π€ AI-Powered App Generation", | |
value=False, | |
info="Generate app.py automatically" | |
) | |
btn_to_hf = gr.Button("π Deploy to HuggingFace", | |
variant="primary", | |
elem_classes="primary-btn", | |
size="lg") | |
with gr.Column(scale=7): | |
with gr.Group(elem_classes="input-section"): | |
gr.Markdown("### π Deployment Progress") | |
output_to_hf = gr.Textbox( | |
label="", | |
lines=20, | |
elem_classes="output-box", | |
interactive=False, | |
show_copy_button=True | |
) | |
# Status indicator | |
if not os.getenv("HF_TOKEN"): | |
gr.HTML(""" | |
<div style="background: #fef2f2; border: 1px solid #fecaca; border-radius: 8px; padding: 12px; margin-top: 10px;"> | |
<span style="color: #dc2626; font-weight: 600;">β οΈ Configuration Required:</span> | |
<span style="color: #7f1d1d;"> Please set HF_TOKEN in Space settings</span> | |
</div> | |
""") | |
else: | |
gr.HTML(""" | |
<div style="background: #f0fdf4; border: 1px solid #bbf7d0; border-radius: 8px; padding: 12px; margin-top: 10px;"> | |
<span style="color: #16a34a; font-weight: 600;">β Ready:</span> | |
<span style="color: #14532d;"> HF_TOKEN configured successfully</span> | |
</div> | |
""") | |
btn_to_hf.click( | |
fn=clone, | |
inputs=[repo_git, repo_hf, sdk_choices, skip_lfs, enable_smart_generation], | |
outputs=output_to_hf | |
) | |
with gr.Tab("π€ HF Space β GitHub", elem_id="tab2"): | |
with gr.Row(): | |
with gr.Column(scale=5): | |
with gr.Group(elem_classes="input-section"): | |
gr.Markdown("### π― Source Space") | |
hf_username = gr.Textbox( | |
label="HuggingFace Username", | |
placeholder="your-hf-username", | |
info="Your HuggingFace account name" | |
) | |
hf_space_name = gr.Textbox( | |
label="Space Name", | |
placeholder="your-space-name", | |
info="Name of the Space to export" | |
) | |
gr.Markdown("### π¦ GitHub Destination") | |
github_username = gr.Textbox( | |
label="GitHub Username", | |
placeholder="your-github-username", | |
info="Your GitHub account name" | |
) | |
github_repo_name = gr.Textbox( | |
label="Repository Name", | |
placeholder="new-repo-name", | |
info="Name for the new GitHub repository" | |
) | |
gr.Markdown("### π Authentication") | |
github_token = gr.Textbox( | |
label="GitHub Personal Access Token", | |
type="password", | |
placeholder="ghp_xxxxxxxxxxxxxxxxxxxx", | |
info="Generate at: github.com/settings/tokens (needs 'repo' scope)" | |
) | |
btn_to_github = gr.Button("π€ Export to GitHub", | |
variant="primary", | |
elem_classes="primary-btn", | |
size="lg") | |
with gr.Column(scale=7): | |
with gr.Group(elem_classes="input-section"): | |
gr.Markdown("### π Export Progress") | |
output_to_github = gr.Textbox( | |
label="", | |
lines=20, | |
elem_classes="output-box", | |
interactive=False, | |
show_copy_button=True | |
) | |
btn_to_github.click( | |
fn=space_to_github, | |
inputs=[hf_username, hf_space_name, github_username, github_repo_name, github_token], | |
outputs=output_to_github | |
) | |
gr.Markdown(""" | |
--- | |
<div style="text-align: center; margin-top: 30px; color: #64748b;"> | |
<h3 style="color: #334155;">π οΈ Quick Start Guide</h3> | |
<div style="display: flex; justify-content: space-around; margin-top: 20px; flex-wrap: wrap;"> | |
<div style="flex: 1; min-width: 250px; margin: 10px; padding: 20px; background: #f8fafc; border-radius: 10px;"> | |
<h4 style="color: #667eea;">GitHub β HuggingFace</h4> | |
<p>Deploy any GitHub repository as a HuggingFace Space with automatic LFS handling and optional AI-powered interface generation.</p> | |
</div> | |
<div style="flex: 1; min-width: 250px; margin: 10px; padding: 20px; background: #f8fafc; border-radius: 10px;"> | |
<h4 style="color: #667eea;">HuggingFace β GitHub</h4> | |
<p>Export your HuggingFace Spaces to GitHub for version control, collaboration, and backup.</p> | |
</div> | |
</div> | |
<div style="margin-top: 30px;"> | |
<h4 style="color: #334155;">Required Tokens</h4> | |
<p> | |
<code style="background: #e2e8f0; padding: 4px 8px; border-radius: 4px;">HF_TOKEN</code> for GitHub β HF | | |
<code style="background: #e2e8f0; padding: 4px 8px; border-radius: 4px;">GitHub PAT</code> for HF β GitHub | | |
<code style="background: #e2e8f0; padding: 4px 8px; border-radius: 4px;">FIREWORKS_API_KEY</code> for AI features | |
</p> | |
</div> | |
</div> | |
""") | |
if __name__ == "__main__": | |
demo.launch() |