File size: 969 Bytes
613c8f7
0e4080b
 
 
 
 
 
 
613c8f7
0e4080b
 
 
 
 
613c8f7
 
 
 
 
 
 
 
 
0e4080b
 
 
 
 
 
 
613c8f7
0e4080b
 
 
 
 
613c8f7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
FROM python:3.9

WORKDIR /app

# Install system dependencies
RUN apt-get update && apt-get install -y \
    build-essential \
    curl \
    git \
    software-properties-common \
    && rm -rf /var/lib/apt/lists/*

# Copy requirements first to leverage Docker cache
COPY requirements.txt .

# Install Python packages
RUN pip install --no-cache-dir -r requirements.txt

# Pre-download the model
RUN python -c "from transformers import AutoTokenizer, AutoModelForCausalLM; \
    model_id='mradermacher/Huihui-gemma-3n-E4B-it-abliterated-GGUF'; \
    tokenizer = AutoTokenizer.from_pretrained(model_id); \
    model = AutoModelForCausalLM.from_pretrained(model_id, device_map='auto')"

# Copy the rest of the application
COPY . .

# Set environment variables
ENV HOST=0.0.0.0
ENV PORT=7860
ENV PYTHONUNBUFFERED=1

# Expose the port HF Spaces expects
EXPOSE 7860

# Start the FastAPI app
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]