Spaces:
Sleeping
Sleeping
File size: 1,355 Bytes
3b37178 e54458e 8134ad6 e54458e 8134ad6 4e4d843 8134ad6 e54458e b18de50 8134ad6 3b37178 a50bf3c 8134ad6 e54458e 8134ad6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
# Use Python 3.9 as the base image
FROM python:3.9
# Set working directory in the container
WORKDIR /app
# Create a non-root user and set permissions
RUN useradd -m myuser && chown -R myuser:myuser /app
USER myuser
# Set Hugging Face cache directory
ENV HF_HOME=/app/.cache/huggingface
# Update PATH for uvicorn
ENV PATH="/home/myuser/.local/bin:${PATH}"
# Upgrade pip to the latest version
RUN pip install --upgrade pip
# Copy requirements.txt and install dependencies
COPY --chown=myuser:myuser requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Clear cache and pre-download models
RUN rm -rf /app/.cache/huggingface/* && python -c "from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM; pipeline('text-classification', model='Hello-SimpleAI/chatgpt-detector-roberta'); pipeline('text-classification', model='openai-community/roberta-large-openai-detector'); pipeline('text-classification', model='sabaridsnfuji/arabic-ai-text-detector'); AutoTokenizer.from_pretrained('gpt2'); AutoModelForCausalLM.from_pretrained('gpt2'); AutoTokenizer.from_pretrained('aubmindlab/aragpt2-base'); AutoModelForCausalLM.from_pretrained('aubmindlab/aragpt2-base')"
# Copy the application code
COPY --chown=myuser:myuser . .
# Run the FastAPI app with Uvicorn
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |