File size: 2,911 Bytes
d3b96a3
0467e53
133c124
d3b96a3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0467e53
d3b96a3
 
 
 
 
 
0467e53
133c124
0467e53
 
 
8906f9a
 
133c124
 
 
 
 
 
 
 
0467e53
133c124
 
d3b96a3
0467e53
133c124
d3b96a3
 
133c124
d3b96a3
133c124
d3b96a3
0467e53
d3b96a3
0467e53
 
 
 
 
d3b96a3
0467e53
d3b96a3
133c124
 
 
 
0467e53
 
133c124
0467e53
d3b96a3
 
133c124
d3b96a3
 
ab65f1a
0467e53
133c124
0467e53
 
133c124
 
0467e53
 
133c124
 
 
 
0467e53
133c124
0467e53
d3b96a3
0467e53
 
d3b96a3
 
 
0467e53
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
"""
BPY MCP Server - Blender Chat Interface
CPU-only 3D generation with SmolLM3
"""
import os
import tempfile
import uuid

import gradio as gr
from huggingface_hub import snapshot_download

import openvino_genai as ov_genai
import bpy

SMOLLM3_PIPE = None


def load_smollm3():
    global SMOLLM3_PIPE
    if SMOLLM3_PIPE is not None:
        return SMOLLM3_PIPE
    print("Loading SmolLM3...")
    model_path = snapshot_download("dev-bjoern/smollm3-int4-ov")
    SMOLLM3_PIPE = ov_genai.LLMPipeline(model_path, device="CPU")
    print("SmolLM3 loaded")
    return SMOLLM3_PIPE


def export_glb() -> str:
    output_dir = tempfile.mkdtemp()
    glb_path = f"{output_dir}/scene_{uuid.uuid4().hex[:8]}.glb"
    bpy.ops.export_scene.gltf(filepath=glb_path, export_format='GLB')
    return glb_path


def execute_bpy_code(code: str) -> bool:
    try:
        if "```python" in code:
            code = code.split("```python")[1].split("```")[0]
        elif "```" in code:
            parts = code.split("```")
            if len(parts) > 1:
                code = parts[1]
        code = code.replace("import bpy", "")
        exec(code, {"bpy": bpy, "math": __import__("math")})
        return True
    except Exception as e:
        print(f"Error: {e}")
        return False


def chat_with_blender(message: str, history: list):
    try:
        pipe = load_smollm3()

        prompt = f"""Write bpy Python code for: {message}

Rules:
1. Clear scene: bpy.ops.object.select_all(action='SELECT'); bpy.ops.object.delete()
2. Use bpy.ops.mesh.primitive_* for objects
3. Add camera: bpy.ops.object.camera_add()
4. Add light: bpy.ops.object.light_add(type='SUN')

Only Python code, no explanations."""

        result = pipe.generate(prompt, max_new_tokens=512)
        success = execute_bpy_code(result)

        if success:
            glb_path = export_glb()
            return f"Done!\n```python\n{result}\n```", glb_path
        else:
            return f"Error:\n```python\n{result}\n```", None

    except Exception as e:
        return f"Error: {e}", None


with gr.Blocks(title="BPY Chat") as demo:
    gr.Markdown("## Blender Chat")

    chatbot = gr.Chatbot(height=400)
    model_output = gr.Model3D(label="3D Scene")

    with gr.Row():
        msg = gr.Textbox(placeholder="Describe a 3D scene...", show_label=False, scale=9)
        btn = gr.Button("Send", variant="primary", scale=1)

    def respond(message, chat_history):
        if not message.strip():
            return "", chat_history, None
        response, glb_path = chat_with_blender(message, chat_history)
        chat_history.append((message, response))
        return "", chat_history, glb_path

    btn.click(respond, [msg, chatbot], [msg, chatbot, model_output])
    msg.submit(respond, [msg, chatbot], [msg, chatbot, model_output])


if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860, mcp_server=True)