File size: 3,741 Bytes
d3b96a3
0467e53
133c124
d3b96a3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0467e53
d3b96a3
 
 
 
 
 
cc0d1d2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0467e53
133c124
0467e53
 
 
8906f9a
 
133c124
 
 
 
 
 
 
 
0467e53
133c124
 
d3b96a3
0467e53
133c124
d3b96a3
 
133c124
d3b96a3
133c124
d3b96a3
0467e53
d3b96a3
0467e53
 
 
 
 
d3b96a3
0467e53
d3b96a3
133c124
 
 
 
cc0d1d2
 
133c124
0467e53
d3b96a3
 
133c124
d3b96a3
 
ab65f1a
0467e53
133c124
cc0d1d2
 
133c124
 
0467e53
 
133c124
 
 
 
cc0d1d2
09f0931
 
cc0d1d2
d3b96a3
cc0d1d2
 
d3b96a3
 
 
0467e53
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
"""
BPY MCP Server - Blender Chat Interface
CPU-only 3D generation with SmolLM3
"""
import os
import tempfile
import uuid

import gradio as gr
from huggingface_hub import snapshot_download

import openvino_genai as ov_genai
import bpy

SMOLLM3_PIPE = None


def load_smollm3():
    global SMOLLM3_PIPE
    if SMOLLM3_PIPE is not None:
        return SMOLLM3_PIPE
    print("Loading SmolLM3...")
    model_path = snapshot_download("dev-bjoern/smollm3-int4-ov")
    SMOLLM3_PIPE = ov_genai.LLMPipeline(model_path, device="CPU")
    print("SmolLM3 loaded")
    return SMOLLM3_PIPE


def render_scene() -> str:
    """Render with Cycles CPU (works headless)"""
    output_dir = tempfile.mkdtemp()
    render_path = f"{output_dir}/render_{uuid.uuid4().hex[:8]}.png"

    # Cycles CPU for headless
    bpy.context.scene.render.engine = 'CYCLES'
    bpy.context.scene.cycles.device = 'CPU'
    bpy.context.scene.cycles.samples = 32

    # Render settings
    bpy.context.scene.render.filepath = render_path
    bpy.context.scene.render.image_settings.file_format = 'PNG'
    bpy.context.scene.render.resolution_x = 800
    bpy.context.scene.render.resolution_y = 600
    bpy.context.scene.render.resolution_percentage = 100

    # Render
    bpy.ops.render.render(write_still=True)
    return render_path


def export_glb() -> str:
    output_dir = tempfile.mkdtemp()
    glb_path = f"{output_dir}/scene_{uuid.uuid4().hex[:8]}.glb"
    bpy.ops.export_scene.gltf(filepath=glb_path, export_format='GLB')
    return glb_path


def execute_bpy_code(code: str) -> bool:
    try:
        if "```python" in code:
            code = code.split("```python")[1].split("```")[0]
        elif "```" in code:
            parts = code.split("```")
            if len(parts) > 1:
                code = parts[1]
        code = code.replace("import bpy", "")
        exec(code, {"bpy": bpy, "math": __import__("math")})
        return True
    except Exception as e:
        print(f"Error: {e}")
        return False


def chat_with_blender(message: str, history: list):
    try:
        pipe = load_smollm3()

        prompt = f"""Write bpy Python code for: {message}

Rules:
1. Clear scene: bpy.ops.object.select_all(action='SELECT'); bpy.ops.object.delete()
2. Use bpy.ops.mesh.primitive_* for objects
3. Add camera: bpy.ops.object.camera_add()
4. Add light: bpy.ops.object.light_add(type='SUN')

Only Python code, no explanations."""

        result = pipe.generate(prompt, max_new_tokens=512)
        success = execute_bpy_code(result)

        if success:
            render_path = render_scene()
            return f"Done!\n```python\n{result}\n```", render_path
        else:
            return f"Error:\n```python\n{result}\n```", None

    except Exception as e:
        return f"Error: {e}", None


with gr.Blocks(title="BPY Chat") as demo:
    gr.Markdown("## Blender Chat")

    chatbot = gr.Chatbot(height=300)
    render_output = gr.Image(label="Render Preview")

    with gr.Row():
        msg = gr.Textbox(placeholder="Describe a 3D scene...", show_label=False, scale=9)
        btn = gr.Button("Send", variant="primary", scale=1)

    def respond(message, chat_history):
        if not message.strip():
            return "", chat_history, None
        response, render_path = chat_with_blender(message, chat_history)
        chat_history.append({"role": "user", "content": message})
        chat_history.append({"role": "assistant", "content": response})
        return "", chat_history, render_path

    btn.click(respond, [msg, chatbot], [msg, chatbot, render_output])
    msg.submit(respond, [msg, chatbot], [msg, chatbot, render_output])


if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860, mcp_server=True)