Spaces:
Runtime error
Runtime error
# start.sh - Main startup script for the Docker container | |
set -e | |
echo “=============================================” | |
echo “Starting X AI Assistant Space” | |
echo “=============================================” | |
# Function to log with timestamp | |
log() { | |
echo “[$(date ‘+%Y-%m-%d %H:%M:%S’)] $1” | |
} | |
# Function to check if a service is running | |
check_service() { | |
local service_name=$1 | |
local check_command=$2 | |
local max_attempts=$3 | |
local attempt=1 | |
``` | |
log "Checking $service_name..." | |
while [ $attempt -le $max_attempts ]; do | |
if eval $check_command; then | |
log "$service_name is ready!" | |
return 0 | |
fi | |
log "Waiting for $service_name... (attempt $attempt/$max_attempts)" | |
sleep 3 | |
((attempt++)) | |
done | |
log "ERROR: $service_name failed to start after $max_attempts attempts" | |
return 1 | |
``` | |
} | |
# Create necessary directories | |
log “Creating directories…” | |
mkdir -p /app/logs | |
mkdir -p /app/models | |
mkdir -p /app/data | |
mkdir -p /root/.ollama | |
# Start Ollama service in the background | |
log “Starting Ollama service…” | |
ollama serve > /app/logs/ollama.log 2>&1 & | |
OLLAMA_PID=$! | |
log “Ollama started with PID: $OLLAMA_PID” | |
# Wait for Ollama to be ready | |
check_service “Ollama” “curl -s http://localhost:11434/api/tags > /dev/null 2>&1” 20 | |
# Run model setup script | |
log “Running model setup…” | |
if [ -f “/app/model_setup.sh” ]; then | |
bash /app/model_setup.sh | |
else | |
log “No model_setup.sh found, skipping model setup” | |
fi | |
# Start the Python application | |
log “Starting Gradio application…” | |
cd /app | |
# Set Python path | |
export PYTHONPATH=”/app:$PYTHONPATH” | |
# Start the main application | |
python3 app.py & | |
APP_PID=$! | |
log “Gradio app started with PID: $APP_PID” | |
# Wait for the app to be ready | |
check_service “Gradio App” “curl -s http://localhost:7860 > /dev/null 2>&1” 10 | |
log “=============================================” | |
log “🚀 X AI Assistant Space is now running!” | |
log “=============================================” | |
log “Gradio Interface: http://localhost:7860” | |
log “Ollama API: http://localhost:11434” | |
log “=============================================” | |
# Function to handle shutdown | |
cleanup() { | |
log “Shutting down services…” | |
``` | |
if [ ! -z "$APP_PID" ] && kill -0 $APP_PID 2>/dev/null; then | |
log "Stopping Gradio app (PID: $APP_PID)..." | |
kill -TERM $APP_PID | |
wait $APP_PID 2>/dev/null || true | |
fi | |
if [ ! -z "$OLLAMA_PID" ] && kill -0 $OLLAMA_PID 2>/dev/null; then | |
log "Stopping Ollama (PID: $OLLAMA_PID)..." | |
kill -TERM $OLLAMA_PID | |
wait $OLLAMA_PID 2>/dev/null || true | |
fi | |
log "Cleanup completed" | |
exit 0 | |
``` | |
} | |
# Set up signal handlers | |
trap cleanup SIGTERM SIGINT | |
# Monitor processes and keep container running | |
while true; do | |
# Check if Ollama is still running | |
if ! kill -0 $OLLAMA_PID 2>/dev/null; then | |
log “ERROR: Ollama process died unexpectedly” | |
exit 1 | |
fi | |
``` | |
# Check if Gradio app is still running | |
if ! kill -0 $APP_PID 2>/dev/null; then | |
log "ERROR: Gradio app process died unexpectedly" | |
exit 1 | |
fi | |
sleep 10 | |
``` | |
done |