dev-bjoern Claude commited on
Commit
54d8ff9
Β·
1 Parent(s): 367770c

feat: Add full Infinigen Docker setup with HuggingFace Inference API

Browse files

- Dockerfile with Infinigen + Blender (cloned from GitHub)
- Gradio app with Scene Composer, Terrain Engineer, AI Recommendations
- HuggingFace Inference API support via pydantic-ai
- MCP Server enabled

πŸ€– Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>

Files changed (3) hide show
  1. Dockerfile +180 -0
  2. README.md +6 -7
  3. app.py +28 -21
Dockerfile ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Multi-stage build: Builder stage for compilation + Runtime stage for execution
2
+ FROM python:3.11-slim AS builder
3
+
4
+ # Python environment setup for UV
5
+ ENV UV_COMPILE_BYTECODE=1
6
+ ENV UV_LINK_MODE=copy
7
+ ENV UV_CACHE_DIR=/tmp/.uv-cache
8
+
9
+ # System dependencies installation for build stage
10
+ RUN apt-get update && \
11
+ apt-get install -y --no-install-recommends \
12
+ curl \
13
+ ca-certificates \
14
+ tzdata \
15
+ build-essential \
16
+ git \
17
+ cmake \
18
+ ninja-build \
19
+ libgtk-3-dev \
20
+ libpng-dev \
21
+ libjpeg-dev \
22
+ libwebp-dev \
23
+ libtiff5-dev \
24
+ libopenexr-dev \
25
+ libopenblas-dev \
26
+ libx11-dev \
27
+ libavutil-dev \
28
+ libavcodec-dev \
29
+ libavformat-dev \
30
+ libswscale-dev \
31
+ libswresample-dev \
32
+ libssl-dev \
33
+ libva-dev \
34
+ libmfx-dev \
35
+ libgstreamer1.0-dev \
36
+ libgstreamer-plugins-base1.0-dev \
37
+ opencl-headers \
38
+ ocl-icd-opencl-dev \
39
+ xvfb \
40
+ xauth \
41
+ g++ \
42
+ gcc \
43
+ libomp-dev \
44
+ libgomp1 \
45
+ coreutils \
46
+ findutils \
47
+ bash \
48
+ procps \
49
+ # Additional dependencies for terrain compilation
50
+ libc6-dev \
51
+ pkg-config \
52
+ make \
53
+ dos2unix \
54
+ # CUDA dependencies - install NVIDIA CUDA toolkit
55
+ wget \
56
+ software-properties-common \
57
+ && rm -rf /var/lib/apt/lists/*
58
+
59
+ # Install CUDA toolkit for terrain compilation
60
+ RUN wget https://developer.download.nvidia.com/compute/cuda/12.4.0/local_installers/cuda_12.4.0_550.54.14_linux.run -O cuda_installer.run && \
61
+ chmod +x cuda_installer.run && \
62
+ ./cuda_installer.run --no-opengl-libs --no-man-page --no-opengl-libs --override --silent --toolkit && \
63
+ rm cuda_installer.run && \
64
+ echo 'export PATH=/usr/local/cuda/bin${PATH:+:${PATH}}' >> /etc/profile && \
65
+ echo 'export LD_LIBRARY_PATH=/usr/local/cuda/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}' >> /etc/profile
66
+
67
+ # Create user for build process
68
+ RUN useradd -ms /bin/bash user
69
+
70
+ # Install uv
71
+ RUN curl -LsSf https://astral.sh/uv/install.sh | sh
72
+ ENV PATH="/root/.local/bin:/usr/local/bin:/usr/bin:/bin:$PATH"
73
+
74
+ # Create virtual environment
75
+ RUN uv venv /app/.venv
76
+ ENV VIRTUAL_ENV=/app/.venv
77
+ ENV PATH="/app/.venv/bin:$PATH"
78
+
79
+ # Set up working directory
80
+ WORKDIR /app
81
+
82
+ # Clone infinigen from GitHub
83
+ RUN git clone https://github.com/bjoernbethge/infinigen.git /app
84
+
85
+ # Install Python dependencies
86
+ RUN sh -c "ulimit -n 4096 && uv sync --frozen --extra terrain --extra vis"
87
+
88
+ # Compile terrain libraries with CUDA support
89
+ RUN chmod +x scripts/install/compile_terrain.sh && \
90
+ dos2unix scripts/install/compile_terrain.sh && \
91
+ dos2unix infinigen/OcMesher/install.sh && \
92
+ bash -c "set -e; cd /app && bash scripts/install/compile_terrain.sh" && \
93
+ echo "Terrain compilation completed successfully with CUDA support"
94
+
95
+ # Compile Cython terrain libraries
96
+ RUN bash -c "source .venv/bin/activate && python setup.py build_ext --inplace" && \
97
+ echo "Cython terrain compilation completed successfully"
98
+
99
+ # Install additional runtime dependencies
100
+ RUN bash -c "source .venv/bin/activate && pip install PyOpenGL-accelerate"
101
+
102
+ # Install Gradio and HuggingFace dependencies
103
+ RUN sh -c "ulimit -n 4096 && uv add gradio>=5.0.0 pydantic-ai[huggingface]>=1.0.8"
104
+
105
+ # Set ownership
106
+ RUN chown -R user:user /app/.venv && \
107
+ chown -R user:user /root/.local
108
+
109
+ # ============================================
110
+ # Runtime stage - minimal image for execution
111
+ # ============================================
112
+ FROM python:3.11-slim AS runtime
113
+
114
+ # Runtime environment setup
115
+ ENV UV_COMPILE_BYTECODE=1
116
+ ENV UV_CACHE_DIR=/tmp/.uv-cache
117
+ ENV DISPLAY=:99
118
+ ENV BLENDER_HEADLESS=1
119
+
120
+ # Minimal runtime dependencies with NVIDIA GPU support
121
+ RUN apt-get update && \
122
+ apt-get install -y --no-install-recommends \
123
+ curl \
124
+ ca-certificates \
125
+ tzdata \
126
+ libx11-6 \
127
+ libglib2.0-0 \
128
+ libgtk-3-0 \
129
+ libgomp1 \
130
+ libomp5 \
131
+ libopenblas0 \
132
+ libpng16-16 \
133
+ libjpeg62-turbo \
134
+ libtiff6 \
135
+ libwebp7 \
136
+ libopenexr-3-1-30 \
137
+ libssl3 \
138
+ libgstreamer1.0-0 \
139
+ libgstreamer-plugins-base1.0-0 \
140
+ opencl-headers \
141
+ ocl-icd-opencl-dev \
142
+ xvfb \
143
+ xauth \
144
+ bash \
145
+ procps \
146
+ && rm -rf /var/lib/apt/lists/*
147
+
148
+ # Create user (HuggingFace Spaces standard)
149
+ RUN useradd -ms /bin/bash user
150
+
151
+ # Copy built artifacts from builder stage
152
+ COPY --from=builder /app /app
153
+ COPY --from=builder /root/.local /root/.local
154
+
155
+ # Set up environment
156
+ ENV PATH="/app/.venv/bin:/root/.local/bin:/usr/local/bin:/usr/bin:/bin:$PATH"
157
+ ENV VIRTUAL_ENV=/app/.venv
158
+ ENV HOME=/home/user
159
+
160
+ # Set up working directory
161
+ WORKDIR /app
162
+
163
+ # Create startup script for Gradio
164
+ RUN echo '#!/bin/bash\nXvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 &\nexport DISPLAY=:99\ncd /app\nexec python app.py' > /usr/local/bin/start-gradio.sh && \
165
+ chmod +x /usr/local/bin/start-gradio.sh
166
+
167
+ # Copy Gradio app
168
+ COPY app.py /app/app.py
169
+
170
+ # Create logs directory
171
+ RUN mkdir -p /app/logs && chown -R user:user /app
172
+
173
+ # Switch to user
174
+ USER user
175
+
176
+ # Expose Gradio port
177
+ EXPOSE 7860
178
+
179
+ # Default command
180
+ CMD ["/usr/local/bin/start-gradio.sh"]
README.md CHANGED
@@ -3,10 +3,8 @@ title: Infinigen Agents
3
  emoji: 🌍
4
  colorFrom: green
5
  colorTo: blue
6
- sdk: gradio
7
- sdk_version: 6.0.2
8
- python_version: "3.10"
9
- app_file: app.py
10
  pinned: false
11
  license: mit
12
  tags:
@@ -15,12 +13,13 @@ tags:
15
  - 3d
16
  - procedural-generation
17
  - infinigen
18
- short_description: "AI Agents for Procedural 3D World Generation"
 
19
  ---
20
 
21
  # 🌍 Infinigen Agents
22
 
23
- AI-powered agents for procedural 3D world generation using [Infinigen](https://github.com/princeton-vl/infinigen).
24
 
25
  ## Features
26
 
@@ -32,7 +31,7 @@ AI-powered agents for procedural 3D world generation using [Infinigen](https://g
32
 
33
  ## AI Backend
34
 
35
- Uses HuggingFace Inference API by default:
36
  - Model: `openai/gpt-oss-20b`
37
  - Configurable providers: Cerebras, Together, Nebius, Groq
38
 
 
3
  emoji: 🌍
4
  colorFrom: green
5
  colorTo: blue
6
+ sdk: docker
7
+ app_port: 7860
 
 
8
  pinned: false
9
  license: mit
10
  tags:
 
13
  - 3d
14
  - procedural-generation
15
  - infinigen
16
+ - blender
17
+ short_description: "AI Agents for Procedural 3D World Generation with Blender"
18
  ---
19
 
20
  # 🌍 Infinigen Agents
21
 
22
+ AI-powered agents for procedural 3D world generation using [Infinigen](https://github.com/princeton-vl/infinigen) + Blender.
23
 
24
  ## Features
25
 
 
31
 
32
  ## AI Backend
33
 
34
+ Uses HuggingFace Inference API:
35
  - Model: `openai/gpt-oss-20b`
36
  - Configurable providers: Cerebras, Together, Nebius, Groq
37
 
app.py CHANGED
@@ -1,9 +1,16 @@
1
  """
2
  Infinigen Agents - AI-powered procedural 3D generation
 
3
  """
4
  import os
 
5
  import gradio as gr
6
  from typing import Dict, Any
 
 
 
 
 
7
 
8
  # HuggingFace token from Space secrets
9
  HF_TOKEN = os.environ.get("HF_TOKEN")
@@ -19,11 +26,11 @@ def get_model():
19
  if AI_MODEL == "huggingface":
20
  from pydantic_ai.models.huggingface import HuggingFaceModel
21
  from pydantic_ai.providers.huggingface import HuggingFaceProvider
22
-
23
  provider_kwargs = {"api_key": HF_TOKEN}
24
  if HF_PROVIDER:
25
  provider_kwargs["provider_name"] = HF_PROVIDER
26
-
27
  return HuggingFaceModel(HF_MODEL_ID, provider=HuggingFaceProvider(**provider_kwargs))
28
  else:
29
  return f"openai:gpt-4o-mini"
@@ -33,14 +40,14 @@ def compose_scene(scene_type: str, seed: int, complexity: str) -> Dict[str, Any]
33
  """Compose a scene using AI agent."""
34
  try:
35
  from pydantic_ai import Agent
36
-
37
  agent = Agent(
38
  get_model(),
39
  system_prompt=f"""You are a scene composer for Infinigen.
40
  Create a {complexity} complexity {scene_type} scene with seed {seed}.
41
  Respond with JSON containing: scene_type, seed, assets, lighting, camera."""
42
  )
43
-
44
  result = agent.run_sync(f"Create a {scene_type} scene")
45
  return {
46
  "success": True,
@@ -57,14 +64,14 @@ def generate_terrain(terrain_type: str, seed: int, resolution: int) -> Dict[str,
57
  """Generate terrain using AI agent."""
58
  try:
59
  from pydantic_ai import Agent
60
-
61
  agent = Agent(
62
  get_model(),
63
  system_prompt=f"""You are a terrain engineer for Infinigen.
64
  Generate {terrain_type} terrain with resolution {resolution}.
65
  Respond with terrain parameters: heightmap settings, erosion, materials."""
66
  )
67
-
68
  result = agent.run_sync(f"Generate {terrain_type} terrain")
69
  return {
70
  "success": True,
@@ -81,13 +88,13 @@ def get_recommendations(scene_type: str) -> str:
81
  """Get AI recommendations for scene generation."""
82
  try:
83
  from pydantic_ai import Agent
84
-
85
  agent = Agent(
86
  get_model(),
87
  system_prompt="""You are an expert on Infinigen procedural generation.
88
  Provide recommendations for assets, terrain, lighting, and camera setup."""
89
  )
90
-
91
  result = agent.run_sync(f"Recommend settings for a {scene_type} scene in Infinigen")
92
  return str(result.data)
93
  except Exception as e:
@@ -99,10 +106,10 @@ with gr.Blocks(title="Infinigen Agents") as demo:
99
  gr.Markdown("""
100
  # 🌍 Infinigen Agents
101
  **AI-powered procedural 3D world generation**
102
-
103
- Using HuggingFace Inference API with pydantic-ai
104
  """)
105
-
106
  with gr.Tab("Scene Composer"):
107
  with gr.Row():
108
  scene_type = gr.Dropdown(
@@ -112,12 +119,12 @@ with gr.Blocks(title="Infinigen Agents") as demo:
112
  )
113
  scene_seed = gr.Number(label="Seed", value=42)
114
  complexity = gr.Dropdown(["low", "medium", "high"], label="Complexity", value="medium")
115
-
116
  compose_btn = gr.Button("🎬 Compose Scene", variant="primary")
117
  scene_output = gr.JSON(label="Scene Result")
118
-
119
  compose_btn.click(compose_scene, [scene_type, scene_seed, complexity], scene_output)
120
-
121
  with gr.Tab("Terrain Engineer"):
122
  with gr.Row():
123
  terrain_type = gr.Dropdown(
@@ -127,12 +134,12 @@ with gr.Blocks(title="Infinigen Agents") as demo:
127
  )
128
  terrain_seed = gr.Number(label="Seed", value=42)
129
  resolution = gr.Slider(128, 2048, value=512, step=128, label="Resolution")
130
-
131
  terrain_btn = gr.Button("πŸ”οΈ Generate Terrain", variant="primary")
132
  terrain_output = gr.JSON(label="Terrain Result")
133
-
134
  terrain_btn.click(generate_terrain, [terrain_type, terrain_seed, resolution], terrain_output)
135
-
136
  with gr.Tab("AI Recommendations"):
137
  rec_scene_type = gr.Dropdown(
138
  ["forest", "desert", "mountain", "canyon", "coast"],
@@ -141,16 +148,16 @@ with gr.Blocks(title="Infinigen Agents") as demo:
141
  )
142
  rec_btn = gr.Button("πŸ’‘ Get Recommendations", variant="primary")
143
  rec_output = gr.Textbox(label="AI Recommendations", lines=10)
144
-
145
  rec_btn.click(get_recommendations, rec_scene_type, rec_output)
146
-
147
  gr.Markdown(f"""
148
  ---
149
  ### Configuration
150
  - **AI Model**: {AI_MODEL}
151
  - **HF Model**: {HF_MODEL_ID}
152
  - **Provider**: {HF_PROVIDER or 'auto'}
153
-
154
  ### MCP Server
155
  ```json
156
  {{
@@ -165,4 +172,4 @@ with gr.Blocks(title="Infinigen Agents") as demo:
165
 
166
 
167
  if __name__ == "__main__":
168
- demo.launch(mcp_server=True)
 
1
  """
2
  Infinigen Agents - AI-powered procedural 3D generation
3
+ Full version with Infinigen + Blender in Docker container
4
  """
5
  import os
6
+ import sys
7
  import gradio as gr
8
  from typing import Dict, Any
9
+ from pathlib import Path
10
+
11
+ # Add infinigen to path (in Docker container)
12
+ sys.path.insert(0, "/app")
13
+ sys.path.insert(0, "/app/infinigen")
14
 
15
  # HuggingFace token from Space secrets
16
  HF_TOKEN = os.environ.get("HF_TOKEN")
 
26
  if AI_MODEL == "huggingface":
27
  from pydantic_ai.models.huggingface import HuggingFaceModel
28
  from pydantic_ai.providers.huggingface import HuggingFaceProvider
29
+
30
  provider_kwargs = {"api_key": HF_TOKEN}
31
  if HF_PROVIDER:
32
  provider_kwargs["provider_name"] = HF_PROVIDER
33
+
34
  return HuggingFaceModel(HF_MODEL_ID, provider=HuggingFaceProvider(**provider_kwargs))
35
  else:
36
  return f"openai:gpt-4o-mini"
 
40
  """Compose a scene using AI agent."""
41
  try:
42
  from pydantic_ai import Agent
43
+
44
  agent = Agent(
45
  get_model(),
46
  system_prompt=f"""You are a scene composer for Infinigen.
47
  Create a {complexity} complexity {scene_type} scene with seed {seed}.
48
  Respond with JSON containing: scene_type, seed, assets, lighting, camera."""
49
  )
50
+
51
  result = agent.run_sync(f"Create a {scene_type} scene")
52
  return {
53
  "success": True,
 
64
  """Generate terrain using AI agent."""
65
  try:
66
  from pydantic_ai import Agent
67
+
68
  agent = Agent(
69
  get_model(),
70
  system_prompt=f"""You are a terrain engineer for Infinigen.
71
  Generate {terrain_type} terrain with resolution {resolution}.
72
  Respond with terrain parameters: heightmap settings, erosion, materials."""
73
  )
74
+
75
  result = agent.run_sync(f"Generate {terrain_type} terrain")
76
  return {
77
  "success": True,
 
88
  """Get AI recommendations for scene generation."""
89
  try:
90
  from pydantic_ai import Agent
91
+
92
  agent = Agent(
93
  get_model(),
94
  system_prompt="""You are an expert on Infinigen procedural generation.
95
  Provide recommendations for assets, terrain, lighting, and camera setup."""
96
  )
97
+
98
  result = agent.run_sync(f"Recommend settings for a {scene_type} scene in Infinigen")
99
  return str(result.data)
100
  except Exception as e:
 
106
  gr.Markdown("""
107
  # 🌍 Infinigen Agents
108
  **AI-powered procedural 3D world generation**
109
+
110
+ Full version with Infinigen + Blender - Using HuggingFace Inference API
111
  """)
112
+
113
  with gr.Tab("Scene Composer"):
114
  with gr.Row():
115
  scene_type = gr.Dropdown(
 
119
  )
120
  scene_seed = gr.Number(label="Seed", value=42)
121
  complexity = gr.Dropdown(["low", "medium", "high"], label="Complexity", value="medium")
122
+
123
  compose_btn = gr.Button("🎬 Compose Scene", variant="primary")
124
  scene_output = gr.JSON(label="Scene Result")
125
+
126
  compose_btn.click(compose_scene, [scene_type, scene_seed, complexity], scene_output)
127
+
128
  with gr.Tab("Terrain Engineer"):
129
  with gr.Row():
130
  terrain_type = gr.Dropdown(
 
134
  )
135
  terrain_seed = gr.Number(label="Seed", value=42)
136
  resolution = gr.Slider(128, 2048, value=512, step=128, label="Resolution")
137
+
138
  terrain_btn = gr.Button("πŸ”οΈ Generate Terrain", variant="primary")
139
  terrain_output = gr.JSON(label="Terrain Result")
140
+
141
  terrain_btn.click(generate_terrain, [terrain_type, terrain_seed, resolution], terrain_output)
142
+
143
  with gr.Tab("AI Recommendations"):
144
  rec_scene_type = gr.Dropdown(
145
  ["forest", "desert", "mountain", "canyon", "coast"],
 
148
  )
149
  rec_btn = gr.Button("πŸ’‘ Get Recommendations", variant="primary")
150
  rec_output = gr.Textbox(label="AI Recommendations", lines=10)
151
+
152
  rec_btn.click(get_recommendations, rec_scene_type, rec_output)
153
+
154
  gr.Markdown(f"""
155
  ---
156
  ### Configuration
157
  - **AI Model**: {AI_MODEL}
158
  - **HF Model**: {HF_MODEL_ID}
159
  - **Provider**: {HF_PROVIDER or 'auto'}
160
+
161
  ### MCP Server
162
  ```json
163
  {{
 
172
 
173
 
174
  if __name__ == "__main__":
175
+ demo.launch(server_name="0.0.0.0", server_port=7860, mcp_server=True)