Spaces:
Sleeping
Sleeping
| """ | |
| BPY MCP Server - Blender Chat Interface with Glass Theme | |
| CPU-only 3D generation with SmolLM3 | |
| """ | |
| import os | |
| import tempfile | |
| import uuid | |
| from pathlib import Path | |
| import gradio as gr | |
| import numpy as np | |
| from huggingface_hub import snapshot_download | |
| # OpenVINO imports | |
| import openvino_genai as ov_genai | |
| # Blender Python API | |
| import bpy | |
| # Global model | |
| SMOLLM3_PIPE = None | |
| # Glassmorphism CSS | |
| GLASS_CSS = """ | |
| /* Fullscreen Render als Background */ | |
| #render-bg { | |
| position: fixed !important; | |
| top: 0 !important; | |
| left: 0 !important; | |
| width: 100vw !important; | |
| height: 100vh !important; | |
| z-index: 0 !important; | |
| object-fit: cover !important; | |
| pointer-events: none !important; | |
| } | |
| #render-bg img { | |
| width: 100% !important; | |
| height: 100% !important; | |
| object-fit: cover !important; | |
| } | |
| /* Container transparent */ | |
| .gradio-container { | |
| background: transparent !important; | |
| position: relative; | |
| z-index: 1; | |
| } | |
| /* Chat Overlay mit Glass-Effekt */ | |
| .glass-chat { | |
| position: relative !important; | |
| z-index: 10 !important; | |
| background: rgba(0, 0, 0, 0.3) !important; | |
| backdrop-filter: blur(20px) !important; | |
| -webkit-backdrop-filter: blur(20px) !important; | |
| border-radius: 20px !important; | |
| border: 1px solid rgba(255, 255, 255, 0.1) !important; | |
| } | |
| .glass-chat .bubble-wrap { | |
| background: transparent !important; | |
| } | |
| .glass-chat .message { | |
| background: rgba(255, 255, 255, 0.1) !important; | |
| backdrop-filter: blur(10px) !important; | |
| -webkit-backdrop-filter: blur(10px) !important; | |
| border: 1px solid rgba(255, 255, 255, 0.15) !important; | |
| border-radius: 16px !important; | |
| } | |
| /* User message */ | |
| .glass-chat .message.user { | |
| background: rgba(100, 150, 255, 0.2) !important; | |
| } | |
| /* Bot message */ | |
| .glass-chat .message.bot { | |
| background: rgba(255, 255, 255, 0.1) !important; | |
| } | |
| /* Input textbox glass */ | |
| .glass-input textarea { | |
| background: rgba(255, 255, 255, 0.1) !important; | |
| backdrop-filter: blur(10px) !important; | |
| border: 1px solid rgba(255, 255, 255, 0.2) !important; | |
| border-radius: 12px !important; | |
| color: white !important; | |
| } | |
| /* Buttons glass */ | |
| button.primary { | |
| background: rgba(100, 150, 255, 0.3) !important; | |
| backdrop-filter: blur(10px) !important; | |
| border: 1px solid rgba(255, 255, 255, 0.2) !important; | |
| } | |
| /* Header transparent */ | |
| .app-header, header { | |
| background: transparent !important; | |
| } | |
| /* Dark text on glass */ | |
| .glass-chat .message p, .glass-chat .message code { | |
| color: white !important; | |
| } | |
| /* Hide default background */ | |
| .main, .contain, .wrap { | |
| background: transparent !important; | |
| } | |
| body { | |
| background: #1a1a2e !important; | |
| } | |
| """ | |
| def load_smollm3(): | |
| """Load SmolLM3 OpenVINO model for text generation""" | |
| global SMOLLM3_PIPE | |
| if SMOLLM3_PIPE is not None: | |
| return SMOLLM3_PIPE | |
| print("Loading SmolLM3 INT4 OpenVINO...") | |
| model_path = snapshot_download("dev-bjoern/smollm3-int4-ov") | |
| SMOLLM3_PIPE = ov_genai.LLMPipeline(model_path, device="CPU") | |
| print("SmolLM3 loaded") | |
| return SMOLLM3_PIPE | |
| def render_scene() -> str: | |
| """Render current Blender scene to image""" | |
| output_dir = tempfile.mkdtemp() | |
| render_path = f"{output_dir}/render_{uuid.uuid4().hex[:8]}.png" | |
| # Setup render settings | |
| bpy.context.scene.render.filepath = render_path | |
| bpy.context.scene.render.image_settings.file_format = 'PNG' | |
| bpy.context.scene.render.resolution_x = 1920 | |
| bpy.context.scene.render.resolution_y = 1080 | |
| bpy.context.scene.render.resolution_percentage = 50 | |
| # Render | |
| bpy.ops.render.render(write_still=True) | |
| return render_path | |
| def execute_bpy_code(code: str) -> bool: | |
| """Execute bpy Python code""" | |
| try: | |
| # Clean code | |
| if "```python" in code: | |
| code = code.split("```python")[1].split("```")[0] | |
| elif "```" in code: | |
| parts = code.split("```") | |
| if len(parts) > 1: | |
| code = parts[1] | |
| code = code.replace("import bpy", "# import bpy (already loaded)") | |
| # Execute | |
| exec(code, {"bpy": bpy, "math": __import__("math")}) | |
| return True | |
| except Exception as e: | |
| print(f"Exec error: {e}") | |
| return False | |
| def chat_with_blender(message: str, history: list): | |
| """ | |
| Chat mit SmolLM3 - generiert bpy Script und fuehrt aus | |
| """ | |
| try: | |
| pipe = load_smollm3() | |
| # Prompt fuer bpy Script Generierung | |
| prompt = f"""Du bist ein Blender Python (bpy) Experte. Schreibe ein kurzes bpy Script fuer: {message} | |
| Regeln: | |
| 1. Starte mit: bpy.ops.object.select_all(action='SELECT'); bpy.ops.object.delete() | |
| 2. Nutze bpy.ops.mesh.primitive_* fuer Objekte | |
| 3. Fuege Kamera hinzu: bpy.ops.object.camera_add(location=(x,y,z)) | |
| 4. Fuege Licht hinzu: bpy.ops.object.light_add(type='SUN') | |
| 5. Setze einfache Materialien mit bpy.data.materials.new() | |
| Nur Python Code, keine Erklaerungen. Starte mit import bpy.""" | |
| # Generate script | |
| result = pipe.generate(prompt, max_new_tokens=512) | |
| # Execute the script | |
| success = execute_bpy_code(result) | |
| if success: | |
| # Render scene | |
| render_path = render_scene() | |
| response = f"Scene erstellt!\n\n```python\n{result}\n```" | |
| return response, render_path | |
| else: | |
| return f"Fehler beim Ausfuehren:\n```python\n{result}\n```", None | |
| except Exception as e: | |
| return f"Error: {e}", None | |
| def create_initial_scene(): | |
| """Create a default scene for startup""" | |
| try: | |
| # Clear | |
| bpy.ops.object.select_all(action='SELECT') | |
| bpy.ops.object.delete() | |
| # Add cube | |
| bpy.ops.mesh.primitive_cube_add(location=(0, 0, 0)) | |
| cube = bpy.context.active_object | |
| # Material | |
| mat = bpy.data.materials.new(name="BlueMat") | |
| mat.diffuse_color = (0.2, 0.4, 0.8, 1.0) | |
| cube.data.materials.append(mat) | |
| # Camera | |
| bpy.ops.object.camera_add(location=(5, -5, 4)) | |
| cam = bpy.context.active_object | |
| cam.rotation_euler = (1.1, 0, 0.8) | |
| bpy.context.scene.camera = cam | |
| # Light | |
| bpy.ops.object.light_add(type='SUN', location=(5, 5, 10)) | |
| return render_scene() | |
| except Exception as e: | |
| print(f"Initial scene error: {e}") | |
| return None | |
| # Gradio Interface | |
| with gr.Blocks(css=GLASS_CSS, theme=gr.themes.Glass(), title="BPY Chat") as demo: | |
| # State fuer Render | |
| render_state = gr.State(value=None) | |
| # Fullscreen Render Background | |
| with gr.Column(elem_id="render-bg"): | |
| render_output = gr.Image( | |
| value=create_initial_scene, | |
| label="", | |
| show_label=False, | |
| interactive=False, | |
| show_download_button=False | |
| ) | |
| # Chat Interface Overlay | |
| gr.Markdown("## Blender Chat", elem_classes="glass-title") | |
| chatbot = gr.Chatbot( | |
| elem_classes="glass-chat", | |
| height=400, | |
| placeholder="Beschreibe eine 3D Szene..." | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| placeholder="z.B. 'Erstelle eine Pyramide mit rotem Material'", | |
| show_label=False, | |
| elem_classes="glass-input", | |
| scale=9 | |
| ) | |
| submit_btn = gr.Button("Senden", variant="primary", scale=1) | |
| # Chat logic | |
| def respond(message, chat_history): | |
| if not message.strip(): | |
| return "", chat_history, None | |
| response, render_path = chat_with_blender(message, chat_history) | |
| chat_history.append((message, response)) | |
| return "", chat_history, render_path | |
| submit_btn.click( | |
| respond, | |
| [msg, chatbot], | |
| [msg, chatbot, render_output] | |
| ) | |
| msg.submit( | |
| respond, | |
| [msg, chatbot], | |
| [msg, chatbot, render_output] | |
| ) | |
| gr.Markdown(""" | |
| --- | |
| **MCP Server:** `https://dev-bjoern-bpy-mcp.hf.space/gradio_api/mcp/sse` | |
| """) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=7860, mcp_server=True) | |