File size: 1,354 Bytes
72b0049
 
 
 
 
 
 
 
 
bc45070
8886c5e
 
 
72b0049
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff93ce0
 
 
72b0049
 
 
 
 
 
bc45070
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
# Use an official Python runtime as a parent image
FROM python:3.8-slim-buster

# Define the model name as a build argument
ARG MODEL_NAME=WhereIsAI/UAE-Large-V1

# Set the model name and path as environment variables
ENV MODEL_NAME=$MODEL_NAME
ENV MODEL_PATH=/models/${MODEL_NAME}
ENV HF_HOME=/.cache

# Create /.cache directory and make it writable
RUN mkdir /.cache && chmod 777 /.cache

# Install necessary Python packages
COPY requirements.txt /app/requirements.txt
RUN pip install --no-cache-dir -r /app/requirements.txt

# Install Git and Git LFS
RUN apt-get update && apt-get install -y git curl
RUN curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash
RUN apt-get install git-lfs
RUN git lfs install

# Clone the model repository and download the large files
RUN git clone https://huggingface.co/${MODEL_NAME} /models/${MODEL_NAME}
RUN cd /models/${MODEL_NAME} && git lfs pull

# Remove the onnx directory to reduce image size
RUN rm -rf /models/${MODEL_NAME}/onnx

# Copy your FastAPI app and the start script into the container
COPY ./app /app
COPY start.sh /start.sh

# Change the permissions of the start script
RUN chmod +x /start.sh

# Set the working directory
WORKDIR /app

# Expose the FastAPI port
EXPOSE 8080

# Instead of using start.sh, run the application directly
CMD ["python3", "/app/app.py"]