|
#!/bin/bash |
|
|
|
|
|
|
|
set -e |
|
|
|
|
|
if [ -f .env ]; then |
|
export $(cat .env | grep -v '^#' | xargs) |
|
fi |
|
|
|
|
|
RED='\033[0;31m' |
|
GREEN='\033[0;32m' |
|
YELLOW='\033[1;33m' |
|
BLUE='\033[0;34m' |
|
NC='\033[0m' |
|
|
|
print_status() { |
|
echo -e "${BLUE}[INFO]${NC} $1" |
|
} |
|
|
|
print_success() { |
|
echo -e "${GREEN}[SUCCESS]${NC} $1" |
|
} |
|
|
|
print_warning() { |
|
echo -e "${YELLOW}[WARNING]${NC} $1" |
|
} |
|
|
|
print_error() { |
|
echo -e "${RED}[ERROR]${NC} $1" |
|
} |
|
|
|
|
|
DOCKER_USERNAME=${DOCKER_USERNAME:-"drtv"} |
|
IMAGE_NAME=${DOCKER_IMAGE_NAME:-"gpt-local"} |
|
VERSION=${1:-"latest"} |
|
FULL_IMAGE_NAME="$DOCKER_USERNAME/$IMAGE_NAME:$VERSION" |
|
|
|
|
|
build_image() { |
|
print_status "Construyendo imagen Docker: $FULL_IMAGE_NAME" |
|
|
|
|
|
create_optimized_dockerfile |
|
|
|
|
|
if docker build -t "$FULL_IMAGE_NAME" -t "$DOCKER_USERNAME/$IMAGE_NAME:latest" .; then |
|
print_success "Imagen construida exitosamente: $FULL_IMAGE_NAME" |
|
|
|
|
|
local size=$(docker images --format "table {{.Repository}}:{{.Tag}}\t{{.Size}}" | grep "$DOCKER_USERNAME/$IMAGE_NAME" | head -1 | awk '{print $2}') |
|
print_status "Tama帽o de la imagen: $size" |
|
else |
|
print_error "Error construyendo la imagen" |
|
return 1 |
|
fi |
|
} |
|
|
|
|
|
create_optimized_dockerfile() { |
|
cat > Dockerfile << 'EOF' |
|
|
|
FROM python:3.11-slim |
|
|
|
|
|
LABEL maintainer="GPT Local Team" |
|
LABEL description="Sistema de chat GPT local con Hugging Face" |
|
LABEL version="1.0" |
|
|
|
|
|
ENV PYTHONPATH=/app |
|
ENV PYTHONDONTWRITEBYTECODE=1 |
|
ENV PYTHONUNBUFFERED=1 |
|
ENV HF_HOME=/app/.cache/huggingface |
|
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface |
|
ENV TORCH_HOME=/app/.cache/torch |
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
curl \ |
|
git \ |
|
build-essential \ |
|
&& rm -rf /var/lib/apt/lists/* \ |
|
&& apt-get clean |
|
|
|
|
|
WORKDIR /app |
|
|
|
|
|
COPY requirements.txt requirements-dev.txt ./ |
|
|
|
|
|
RUN pip install --no-cache-dir --upgrade pip && \ |
|
pip install --no-cache-dir -r requirements.txt |
|
|
|
|
|
COPY . . |
|
|
|
|
|
RUN mkdir -p models models_cache logs .cache/huggingface .cache/torch |
|
|
|
|
|
RUN chmod +x *.py && \ |
|
chmod +x *.sh 2>/dev/null || true |
|
|
|
|
|
EXPOSE 7860 |
|
|
|
|
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \ |
|
CMD curl -f http://localhost:7860/ || exit 1 |
|
|
|
|
|
CMD ["python3", "main.py"] |
|
EOF |
|
print_success "Dockerfile optimizado creado" |
|
} |
|
|
|
|
|
push_image() { |
|
print_status "Subiendo imagen a Docker Hub: $FULL_IMAGE_NAME" |
|
|
|
|
|
if ! docker info | grep -q "Username: $DOCKER_USERNAME"; then |
|
print_error "No est谩s logueado en Docker Hub como $DOCKER_USERNAME" |
|
print_status "Ejecuta: echo 'tu_token' | docker login -u $DOCKER_USERNAME --password-stdin" |
|
return 1 |
|
fi |
|
|
|
|
|
if docker push "$FULL_IMAGE_NAME"; then |
|
print_success "Imagen $FULL_IMAGE_NAME subida exitosamente" |
|
else |
|
print_error "Error subiendo imagen con tag $VERSION" |
|
return 1 |
|
fi |
|
|
|
|
|
if docker push "$DOCKER_USERNAME/$IMAGE_NAME:latest"; then |
|
print_success "Imagen latest subida exitosamente" |
|
print_status "Imagen disponible en: https://hub.docker.com/r/$DOCKER_USERNAME/$IMAGE_NAME" |
|
else |
|
print_error "Error subiendo imagen latest" |
|
return 1 |
|
fi |
|
} |
|
|
|
|
|
run_container() { |
|
local mode=${1:-"web"} |
|
|
|
print_status "Ejecutando container en modo: $mode" |
|
|
|
|
|
docker stop "$DOCKER_USERNAME-$IMAGE_NAME" 2>/dev/null || true |
|
docker rm "$DOCKER_USERNAME-$IMAGE_NAME" 2>/dev/null || true |
|
|
|
case $mode in |
|
"web") |
|
docker run -d \ |
|
--name "$DOCKER_USERNAME-$IMAGE_NAME" \ |
|
-p 7860:7860 \ |
|
-v "$(pwd)/models_cache:/app/models_cache" \ |
|
-v "$(pwd)/logs:/app/logs" \ |
|
-e HUGGINGFACE_TOKEN="$HUGGINGFACE_TOKEN" \ |
|
-e DEFAULT_MODEL="$DEFAULT_MODEL" \ |
|
-e DEVICE="$DEVICE" \ |
|
"$FULL_IMAGE_NAME" |
|
|
|
print_success "Container web ejecut谩ndose en: http://localhost:7860" |
|
;; |
|
"terminal") |
|
docker run -it \ |
|
--name "$DOCKER_USERNAME-$IMAGE_NAME-terminal" \ |
|
-v "$(pwd)/models_cache:/app/models_cache" \ |
|
-e HUGGINGFACE_TOKEN="$HUGGINGFACE_TOKEN" \ |
|
-e DEFAULT_MODEL="$DEFAULT_MODEL" \ |
|
-e DEVICE="$DEVICE" \ |
|
"$FULL_IMAGE_NAME" \ |
|
python3 chat_terminal.py |
|
;; |
|
"dev") |
|
docker run -it \ |
|
--name "$DOCKER_USERNAME-$IMAGE_NAME-dev" \ |
|
-p 7860:7860 \ |
|
-v "$(pwd):/app" \ |
|
-e HUGGINGFACE_TOKEN="$HUGGINGFACE_TOKEN" \ |
|
-e DEFAULT_MODEL="$DEFAULT_MODEL" \ |
|
-e DEVICE="$DEVICE" \ |
|
"$FULL_IMAGE_NAME" \ |
|
bash |
|
;; |
|
esac |
|
} |
|
|
|
|
|
run_compose() { |
|
local service=${1:-"gpt-local-web"} |
|
|
|
print_status "Ejecutando con docker-compose: $service" |
|
|
|
|
|
if docker-compose up --build -d "$service"; then |
|
print_success "Servicio $service ejecut谩ndose" |
|
|
|
if [ "$service" = "gpt-local-web" ]; then |
|
print_status "Interfaz web disponible en: http://localhost:7860" |
|
fi |
|
|
|
|
|
print_status "Logs del servicio (Ctrl+C para salir):" |
|
docker-compose logs -f "$service" |
|
else |
|
print_error "Error ejecutando docker-compose" |
|
return 1 |
|
fi |
|
} |
|
|
|
|
|
cleanup() { |
|
print_status "Limpiando containers e im谩genes..." |
|
|
|
|
|
docker stop $(docker ps -q --filter "name=$DOCKER_USERNAME-$IMAGE_NAME") 2>/dev/null || true |
|
|
|
|
|
docker rm $(docker ps -aq --filter "name=$DOCKER_USERNAME-$IMAGE_NAME") 2>/dev/null || true |
|
|
|
|
|
docker image prune -f |
|
|
|
print_success "Limpieza completada" |
|
} |
|
|
|
|
|
status() { |
|
print_status "Estado de containers GPT Local:" |
|
|
|
|
|
echo "Containers corriendo:" |
|
docker ps --filter "name=$DOCKER_USERNAME-$IMAGE_NAME" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" |
|
|
|
echo "" |
|
echo "Im谩genes disponibles:" |
|
docker images --filter "reference=$DOCKER_USERNAME/$IMAGE_NAME" --format "table {{.Repository}}\t{{.Tag}}\t{{.Size}}\t{{.CreatedSince}}" |
|
|
|
echo "" |
|
echo "Uso de espacio Docker:" |
|
docker system df |
|
} |
|
|
|
|
|
main() { |
|
local action=${1:-"help"} |
|
local param=${2:-"latest"} |
|
|
|
case $action in |
|
"build") |
|
build_image |
|
;; |
|
"push") |
|
build_image |
|
push_image |
|
;; |
|
"run") |
|
run_container "$param" |
|
;; |
|
"compose") |
|
run_compose "$param" |
|
;; |
|
"cleanup") |
|
cleanup |
|
;; |
|
"status") |
|
status |
|
;; |
|
"full") |
|
build_image |
|
push_image |
|
print_status "馃帀 Pipeline completo ejecutado!" |
|
;; |
|
*) |
|
echo "馃惓 Script de gesti贸n Docker para GPT Local" |
|
echo "" |
|
echo "Uso: $0 [comando] [par谩metro]" |
|
echo "" |
|
echo "Comandos:" |
|
echo " build - Construir imagen Docker" |
|
echo " push - Construir y subir a Docker Hub" |
|
echo " run [web|terminal|dev] - Ejecutar container" |
|
echo " compose [service] - Ejecutar con docker-compose" |
|
echo " cleanup - Limpiar containers e im谩genes" |
|
echo " status - Mostrar estado actual" |
|
echo " full - Pipeline completo (build + push)" |
|
echo "" |
|
echo "Ejemplos:" |
|
echo " $0 build # Construir imagen" |
|
echo " $0 push # Construir y subir" |
|
echo " $0 run web # Ejecutar interfaz web" |
|
echo " $0 run terminal # Ejecutar chat terminal" |
|
echo " $0 compose # Ejecutar con docker-compose" |
|
echo "" |
|
echo "Variables de entorno (archivo .env):" |
|
echo " DOCKER_USERNAME=$DOCKER_USERNAME" |
|
echo " DOCKER_IMAGE_NAME=$IMAGE_NAME" |
|
echo " HUGGINGFACE_TOKEN=***" |
|
;; |
|
esac |
|
} |
|
|
|
|
|
main "$@" |
|
|