Spaces:
Sleeping
Sleeping
# Use Python 3.9 as the base image | |
FROM python:3.9 | |
# Set working directory in the container | |
WORKDIR /app | |
# Create a non-root user and set permissions | |
RUN useradd -m myuser && chown -R myuser:myuser /app | |
USER myuser | |
# Set Hugging Face cache directory | |
ENV HF_HOME=/app/.cache/huggingface | |
# Update PATH for uvicorn | |
ENV PATH="/home/myuser/.local/bin:${PATH}" | |
# Upgrade pip to the latest version | |
RUN pip install --upgrade pip | |
# Copy requirements.txt and install dependencies | |
COPY --chown=myuser:myuser requirements.txt . | |
RUN pip install --no-cache-dir -r requirements.txt | |
# Clear cache and pre-download models | |
RUN rm -rf /app/.cache/huggingface/* && python -c "from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM; pipeline('text-classification', model='Hello-SimpleAI/chatgpt-detector-roberta'); pipeline('text-classification', model='openai-community/roberta-large-openai-detector'); pipeline('text-classification', model='sabaridsnfuji/arabic-ai-text-detector'); AutoTokenizer.from_pretrained('gpt2'); AutoModelForCausalLM.from_pretrained('gpt2'); AutoTokenizer.from_pretrained('aubmindlab/aragpt2-base'); AutoModelForCausalLM.from_pretrained('aubmindlab/aragpt2-base')" | |
# Copy the application code | |
COPY --chown=myuser:myuser . . | |
# Run the FastAPI app with Uvicorn | |
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |