yamanavijayavardhan's picture
fxing llm error
292928f
raw
history blame
37.8 kB
import os
import tempfile
import nltk
import logging
import sys
import builtins
from datetime import datetime
from flask_cors import CORS
from flask import Flask, request, jsonify, render_template, make_response, Response
from werkzeug.utils import secure_filename
from dotenv import load_dotenv
import json
import shutil
from threading import Thread, Event
import queue
import time
from contextlib import contextmanager
# Load environment variables
load_dotenv()
# Create directories in /tmp which is writable
BASE_DIR = '/tmp' # Use direct /tmp path for Hugging Face
log_dir = os.path.join(BASE_DIR, 'app_logs')
cache_dir = os.path.join(BASE_DIR, 'app_cache')
nltk_data_dir = os.path.join(BASE_DIR, 'nltk_data')
gensim_data_dir = os.path.join(BASE_DIR, 'gensim-data')
upload_dir = os.path.join(BASE_DIR, 'uploads')
ans_image_dir = os.path.join(BASE_DIR, 'ans_image')
images_dir = os.path.join(BASE_DIR, 'images')
log_file = os.path.join(log_dir, 'app.log') # Add log file path
# Global variables for model caching and initialization status
global_models = {}
initialization_complete = Event()
notification_queue = queue.Queue()
def ensure_directory(path):
"""Create directory and ensure full permissions with better error handling"""
if os.path.exists(path):
return path
try:
os.makedirs(path, mode=0o777, exist_ok=True)
return path
except Exception as e:
try:
os.makedirs(path, mode=0o755, exist_ok=True)
return path
except Exception as nested_e:
print(f"Warning: Could not create directory {path}: {nested_e}")
return path
# Simplified logging setup
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
handlers=[logging.StreamHandler(sys.stdout)]
)
logger = logging.getLogger(__name__)
def log_print(message, level="INFO"):
if level == "INFO":
logger.info(message)
elif level == "ERROR":
logger.error(message)
elif level == "WARNING":
logger.warning(message)
def initialize_resources():
"""Initialize all required resources"""
try:
# Create essential directories first
for directory in [nltk_data_dir, gensim_data_dir]:
ensure_directory(directory)
# Initialize NLTK
required_nltk_data = ['stopwords', 'punkt', 'wordnet']
for data in required_nltk_data:
try:
nltk.data.find(os.path.join('tokenizers', data))
except LookupError:
try:
log_print(f"Downloading NLTK data: {data}")
nltk.download(data, download_dir=nltk_data_dir, quiet=True)
except Exception as e:
log_print(f"Error downloading NLTK data {data}: {e}", "ERROR")
# Initialize models
try:
get_or_load_model('fasttext')
except Exception as e:
log_print(f"Warning: Could not preload models: {e}", "WARNING")
except Exception as e:
log_print(f"Error during initialization: {e}", "ERROR")
finally:
# Signal that initialization is complete
initialization_complete.set()
# Create essential directories
essential_dirs = [cache_dir, upload_dir, images_dir]
for directory in essential_dirs:
ensure_directory(directory)
# Set environment variables
os.environ['HF_HOME'] = cache_dir
os.environ['GENSIM_DATA_DIR'] = gensim_data_dir
# Add the custom directory to NLTK's search path
nltk.data.path.insert(0, nltk_data_dir)
# Start initialization in background
initialization_thread = Thread(target=initialize_resources, daemon=True)
initialization_thread.start()
from flask import Flask, request, jsonify, render_template
import torch
from werkzeug.utils import secure_filename
from HTR.app import extract_text_from_image
from correct_answer_generation.answer_generation_database_creation import database_creation, answer_generation
from similarity_check.tf_idf.tf_idf_score import create_tfidf_values, tfidf_answer_score
from similarity_check.semantic_meaning_check.semantic import question_vector_sentence, question_vector_word, fasttext_similarity
from similarity_check.llm_based_scoring.llm import llm_score
app = Flask(__name__)
app.config['JSON_SORT_KEYS'] = False
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # 16MB max file size
# Create a temporary directory for file uploads
UPLOAD_FOLDER = tempfile.mkdtemp()
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
logger.info(f"Using temporary upload folder: {UPLOAD_FOLDER}")
# Configure CORS for all origins
CORS(app, resources={
r"/*": {
"origins": "*",
"methods": ["GET", "POST", "OPTIONS"],
"allow_headers": ["Content-Type", "Authorization", "Accept"],
"expose_headers": ["Content-Type"]
}
})
# Global error handler for all exceptions
@app.errorhandler(Exception)
def handle_exception(e):
# Log the error for debugging
app.logger.error(f"Unhandled exception: {str(e)}")
return jsonify({
"status": "error",
"error": "Internal server error",
"message": str(e)
}), 500
# Handle 404 errors
@app.errorhandler(404)
def not_found_error(error):
return jsonify({
"status": "error",
"error": "Not found",
"message": "The requested resource was not found"
}), 404
# Handle 400 Bad Request
@app.errorhandler(400)
def bad_request_error(error):
return jsonify({
"status": "error",
"error": "Bad request",
"message": str(error)
}), 400
@app.route('/')
def index():
try:
response = make_response(render_template('index.html'))
response.headers['Content-Type'] = 'text/html; charset=utf-8'
return response
except Exception as e:
return jsonify({"error": str(e)}), 500
def new_value(value, old_min, old_max, new_min, new_max):
new_value = new_min + ((value - old_min) * (new_max - new_min)) / (old_max - old_min)
return new_value
@app.route('/compute_answers', methods=['POST'])
def compute_answers():
try:
# Check content type
if not request.is_json and not request.form:
return jsonify({
"error": "Invalid request format",
"message": "Request must be either JSON or form data"
}), 400
log_print("\n=== Starting Answer Computation ===")
query_file = request.files.get('query_file')
if not query_file:
return jsonify({
"error": "Missing data",
"message": "Missing query file"
}), 400
try:
queries = query_file.read().decode('utf-8').splitlines()
if not queries:
return jsonify({
"error": "Invalid data",
"message": "No queries found in file"
}), 400
log_print(f"Received queries: {queries}")
except UnicodeDecodeError:
return jsonify({
"error": "Invalid encoding",
"message": "Invalid file encoding"
}), 400
file_type = request.form.get('file_type')
if not file_type:
log_print("Missing file type", "ERROR")
return jsonify({"error": "Missing file type"}), 400
ans_csv_file = request.files.get('ans_csv_file')
if file_type == "csv":
if not ans_csv_file:
log_print("Missing answer CSV file", "ERROR")
return jsonify({"error": "Missing answer CSV file"}), 400
try:
ans_csv_file = ans_csv_file.read().decode('utf-8').splitlines()
c_answers = []
for i in ans_csv_file:
c_answers.append(i.split('\\n'))
log_print(f"Processed CSV answers: {c_answers}")
return jsonify({"answers": c_answers}), 200
except UnicodeDecodeError:
log_print("Invalid CSV file encoding", "ERROR")
return jsonify({"error": "Invalid CSV file encoding"}), 400
c_answers = []
if file_type == 'pdf':
# Create a temporary directory for PDF files
pdf_dir = os.path.join(cache_dir, 'pdf_files')
os.makedirs(pdf_dir, exist_ok=True)
# Save uploaded PDF files
pdf_files = []
for file in request.files.getlist('pdf_files[]'):
if file.filename.endswith('.pdf'):
filename = secure_filename(file.filename)
filepath = os.path.join(pdf_dir, filename)
file.save(filepath)
pdf_files.append(filepath)
if not pdf_files:
log_print("No PDF files uploaded", "ERROR")
return jsonify({"error": "No PDF files uploaded"}), 400
log_print(f"Processing {len(pdf_files)} PDF files")
# Process PDFs
for pdf_file in pdf_files:
database_creation(pdf_file)
# Generate answers
for query in queries:
ans = []
for pdf_file in pdf_files:
ans.append(answer_generation(pdf_file, query))
c_answers.append(ans)
# Clean up PDF directory
try:
shutil.rmtree(pdf_dir)
except Exception as e:
log_print(f"Warning: Could not clean up PDF directory: {e}", "WARNING")
else:
log_print(f"Unsupported file type: {file_type}", "ERROR")
return jsonify({"error": "Unsupported file type"}), 400
log_print(f"Generated answers: {c_answers}")
return jsonify({"answers": c_answers}), 200
except Exception as e:
log_print(f"Error in compute_answers: {str(e)}", "ERROR")
error_msg = str(e).encode('ascii', 'ignore').decode('ascii')
return jsonify({"error": error_msg}), 500
def validate_folder_structure(files):
"""Validate the folder structure of uploaded files"""
try:
# Get unique student folders
student_folders = set()
for file in files:
if not file or not file.filename:
continue
path_parts = file.filename.split('/')
if len(path_parts) >= 2:
student_folders.add(path_parts[-2])
if not student_folders:
return False, "No valid student folders found"
# Check if each student folder has the same number of files
file_counts = {}
for file in files:
if not file or not file.filename:
continue
path_parts = file.filename.split('/')
if len(path_parts) >= 2:
student = path_parts[-2]
file_counts[student] = file_counts.get(student, 0) + 1
if not file_counts:
return False, "No valid files found in student folders"
# Check if all students have the same number of files
counts = list(file_counts.values())
if len(set(counts)) > 1:
return False, "Inconsistent number of files across student folders"
return True, f"Valid folder structure with {len(student_folders)} students and {counts[0]} files each"
except Exception as e:
return False, f"Error validating folder structure: {str(e)}"
@app.route('/notifications')
def notifications():
def generate():
error_count = 0
max_errors = 3
while True:
try:
# Get notification from queue (non-blocking)
try:
notification = notification_queue.get_nowait()
if notification:
yield "data: " + json.dumps(notification) + "\n\n"
error_count = 0 # Reset error count on successful notification
except queue.Empty:
# If no notification, yield empty to keep connection alive
yield "data: " + json.dumps({"type": "ping"}) + "\n\n"
time.sleep(0.5) # Keep the connection alive
except Exception as e:
error_count += 1
error_msg = str(e).encode('ascii', 'ignore').decode('ascii')
log_print(f"Error in notification stream: {error_msg}", "ERROR")
yield "data: " + json.dumps({
"type": "error",
"message": f"Server error: {error_msg}"
}) + "\n\n"
if error_count >= max_errors:
break
return Response(generate(), mimetype='text/event-stream')
@app.route('/compute_marks', methods=['POST'])
def compute_marks():
try:
log_print("\n=== Starting Compute Marks Process ===")
# Wait for initialization to complete
if not initialization_complete.is_set():
log_print("Waiting for initialization...")
wait_for_initialization()
if not initialization_complete.is_set():
log_print("Initialization timeout", "ERROR")
return jsonify({
"status": "error",
"error": "Server initialization",
"message": "Server is still initializing. Please try again in a moment."
}), 503
# Check content type
if not request.is_json and not request.form:
log_print("Invalid request format", "ERROR")
return jsonify({
"status": "error",
"error": "Invalid request format",
"message": "Request must be either JSON or form data"
}), 400
# Get correct answers
correct_answers = request.form.getlist('correct_answers[]')
if not correct_answers:
log_print("No correct answers provided", "ERROR")
return jsonify({
"status": "error",
"error": "Missing data",
"message": "No correct answers provided"
}), 400
log_print(f"Received {len(correct_answers)} correct answers")
# Create TFIDF values for correct answers
try:
log_print("Starting TFIDF value creation...")
notification_queue.put({
"type": "info",
"message": "Starting TFIDF value creation..."
})
max_tfidf = create_tfidf_values(correct_answers)
log_print("TFIDF values created successfully")
notification_queue.put({
"type": "success",
"message": "TFIDF values created successfully"
})
except Exception as e:
error_msg = str(e).encode('ascii', 'ignore').decode('ascii')
log_print(f"TFIDF error: {error_msg}", "ERROR")
notification_queue.put({
"type": "error",
"message": f"TFIDF error: {error_msg}"
})
return jsonify({
"status": "error",
"error": "TFIDF error",
"message": f"Error creating TFIDF values: {error_msg}"
}), 400
# Get all uploaded files
files = request.files.getlist('file')
if not files:
log_print("No files uploaded", "ERROR")
return jsonify({
"status": "error",
"error": "Missing data",
"message": "No files uploaded"
}), 400
log_print(f"Received {len(files)} files")
# Check total file size
total_size = sum(len(file.read()) for file in files)
for file in files:
file.seek(0) # Reset file pointers after reading
max_size = 15 * 1024 * 1024 # 15MB
if total_size > max_size:
log_print(f"File size limit exceeded: {total_size / 1024 / 1024:.1f}MB", "ERROR")
return jsonify({
"status": "error",
"error": "File size limit exceeded",
"message": f"Total file size must be less than 15MB (current size: {total_size / 1024 / 1024:.1f}MB)"
}), 413
# Validate folder structure
is_valid, message = validate_folder_structure(files)
log_print(f"Folder structure validation: {message}")
if not is_valid:
log_print(f"Invalid folder structure: {message}", "ERROR")
return jsonify({
"status": "error",
"error": "Invalid folder structure",
"message": message
}), 400
# Create a temporary directory with proper permissions
try:
base_temp_dir = os.path.join(BASE_DIR, 'temp_processing')
ensure_directory(base_temp_dir)
log_print(f"Created temporary directory: {base_temp_dir}")
except Exception as e:
log_print(f"Error creating temp directory: {str(e)}", "ERROR")
return jsonify({
"status": "error",
"error": "Server error",
"message": "Could not create temporary directory"
}), 500
# Dictionary to store results by student folder and image name
results = {}
failed_files = []
extracted_texts = {}
processed_count = 0
total_files = len(files)
log_print(f"Starting to process {total_files} files...")
try:
# Process each file
for file in files:
try:
# Validate file
if not file or not file.filename:
log_print("Skipping invalid file", "WARNING")
continue
log_print(f"Processing file: {file.filename}")
notification_queue.put({
"type": "info",
"message": f"Processing file: {file.filename}"
})
# Get folder structure from file path
path_parts = file.filename.split('/')
if len(path_parts) < 2:
log_print(f"Invalid file path structure: {file.filename}", "WARNING")
continue
student_folder = path_parts[-2]
filename = path_parts[-1]
log_print(f"Student folder: {student_folder}, Filename: {filename}")
# Validate file type
if not is_valid_image_file(filename):
log_print(f"Invalid file type: {filename}", "WARNING")
failed_files.append({
"file": file.filename,
"error": "Invalid file type. Only .jpg, .jpeg, and .png files are allowed."
})
continue
# Initialize student results if not exists
if student_folder not in results:
results[student_folder] = {}
log_print(f"Initialized results for student: {student_folder}")
# Save and process file with proper permissions
try:
student_dir = os.path.join(base_temp_dir, student_folder)
ensure_directory(student_dir)
# Use a more reliable filename
safe_filename = secure_filename(f"{student_folder}_{filename}")
filepath = os.path.join(student_dir, safe_filename)
# Save file with proper permissions
file.save(filepath)
os.chmod(filepath, 0o666) # Make file readable/writable
log_print(f"Saved file to: {filepath}")
# Extract text
log_print(f"Starting text extraction from: {filepath}")
notification_queue.put({
"type": "info",
"message": f"Starting text extraction from: {filename}"
})
extracted_text = extract_text_from_image(filepath)
if not extracted_text:
log_print(f"No text extracted from: {filepath}", "WARNING")
notification_queue.put({
"type": "warning",
"message": f"No text could be extracted from: {filename}"
})
failed_files.append({
"file": file.filename,
"error": "No text could be extracted from the image"
})
continue
log_print(f"Text extracted successfully from: {filename}")
notification_queue.put({
"type": "success",
"message": f"Text extracted successfully from: {filename}"
})
# Clean up the file immediately after processing
try:
os.remove(filepath)
except Exception as e:
log_print(f"Warning: Could not remove temp file {filepath}: {e}", "WARNING")
if not extracted_text:
log_print(f"No text extracted from: {filepath}", "WARNING")
notification_queue.put({
"type": "warning",
"message": f"No text could be extracted from: {filename}"
})
failed_files.append({
"file": file.filename,
"error": "No text could be extracted from the image"
})
continue
log_print(f"Extracted text: {extracted_text[:100]}...")
# Send notification for extracted text immediately
notification_data = {
"type": "extracted_text",
"filename": filename,
"text": extracted_text[:200] + ("..." if len(extracted_text) > 200 else "")
}
notification_queue.put(notification_data)
log_print(f"Notification queued for {filename}")
# Store extracted text for final response
extracted_texts[filename] = extracted_text
# Clean the extracted text for JSON
extracted_text = extracted_text.encode('ascii', 'ignore').decode('ascii')
# Find best matching answer
best_score = 0
best_answer_index = 0
for i, correct_answer in enumerate(correct_answers):
try:
# Clean the correct answer for comparison
clean_correct_answer = correct_answer.encode('ascii', 'ignore').decode('ascii')
# Calculate similarity scores
log_print("Starting similarity score calculations...")
notification_queue.put({
"type": "info",
"message": "Starting similarity score calculations..."
})
log_print(f"Calculating scores for answer {i+1}...")
notification_queue.put({
"type": "info",
"message": f"Calculating scores for answer {i+1}..."
})
semantic_score = question_vector_sentence(extracted_text, clean_correct_answer)
word_score = question_vector_word(extracted_text, clean_correct_answer)
tfidf_score = tfidf_answer_score(extracted_text, clean_correct_answer, max_tfidf)
ft_score = fasttext_similarity(extracted_text, clean_correct_answer)
llm_marks = llm_score(clean_correct_answer, extracted_text)
# Send individual scores to frontend
notification_queue.put({
"type": "info",
"message": f"Scores for answer {i+1}:\n" +
f"Semantic: {semantic_score:.2f}\n" +
f"Word: {word_score:.2f}\n" +
f"TFIDF: {tfidf_score:.2f}\n" +
f"FastText: {ft_score:.2f}\n" +
f"LLM: {llm_marks:.2f}"
})
combined_score = (
semantic_score * 0.3 +
word_score * 0.2 +
tfidf_score * 0.2 +
ft_score * 0.2 +
llm_marks * 0.1
)
log_print(f"Scores for answer {i+1}: semantic={semantic_score}, word={word_score}, tfidf={tfidf_score}, ft={ft_score}, llm={llm_marks}, combined={combined_score}")
if combined_score > best_score:
best_score = combined_score
best_answer_index = i
except Exception as score_error:
error_msg = str(score_error).encode('ascii', 'ignore').decode('ascii')
log_print(f"Error calculating scores for {filepath}: {error_msg}", "ERROR")
notification_queue.put({
"type": "error",
"message": f"Error calculating scores for {filename}: {error_msg}"
})
failed_files.append({
"file": file.filename,
"error": f"Error calculating scores: {error_msg}"
})
continue
marks = new_value(best_score, 0, 1, 0, 5)
results[student_folder][filename] = round(marks, 2)
log_print(f"Assigned marks for {filename} in {student_folder}: {marks}")
notification_queue.put({
"type": "success",
"message": f"Assigned marks for {filename}: {marks}"
})
# Update progress
processed_count += 1
progress_data = {
"type": "progress",
"processed": processed_count,
"total": total_files,
"current_file": filename
}
notification_queue.put(progress_data)
log_print(f"Progress update: {processed_count}/{total_files} files processed")
except Exception as e:
error_msg = str(e).encode('ascii', 'ignore').decode('ascii')
log_print(f"Error processing file {file.filename}: {error_msg}", "ERROR")
notification_queue.put({
"type": "error",
"message": f"Error processing file {file.filename}: {error_msg}"
})
failed_files.append({
"file": file.filename,
"error": error_msg
})
continue
except Exception as e:
error_msg = str(e).encode('ascii', 'ignore').decode('ascii')
log_print(f"Error processing file {file.filename}: {error_msg}", "ERROR")
notification_queue.put({
"type": "error",
"message": f"Error processing file {file.filename}: {error_msg}"
})
failed_files.append({
"file": file.filename,
"error": error_msg
})
continue
finally:
# Clean up temp directory
try:
shutil.rmtree(base_temp_dir)
log_print("Cleaned up temporary directory")
except Exception as e:
log_print(f"Warning: Could not clean up temporary directory: {e}", "WARNING")
if not results:
log_print("No results computed", "ERROR")
return jsonify({
"status": "error",
"error": "Processing error",
"message": "No results computed"
}), 400
# Clean the results for JSON response
clean_results = {}
for student, scores in results.items():
clean_student = student.encode('ascii', 'ignore').decode('ascii')
clean_scores = {}
for filename, mark in scores.items():
clean_filename = filename.encode('ascii', 'ignore').decode('ascii')
clean_scores[clean_filename] = mark
clean_results[clean_student] = clean_scores
log_print(f"Final results: {clean_results}")
response_data = {
"status": "success",
"results": clean_results,
"failed_files": [{
"file": f["file"].encode('ascii', 'ignore').decode('ascii'),
"error": f["error"].encode('ascii', 'ignore').decode('ascii')
} for f in failed_files],
"extracted_texts": {
filename: text.encode('ascii', 'ignore').decode('ascii')
for filename, text in extracted_texts.items()
}
}
log_print("Sending response...")
response = jsonify(response_data)
response.headers['Content-Type'] = 'application/json'
log_print("=== Compute Marks Process Completed ===")
return response
except Exception as e:
error_msg = str(e).encode('ascii', 'ignore').decode('ascii')
log_print(f"Error in compute_marks: {error_msg}", "ERROR")
return jsonify({
"status": "error",
"error": "Server error",
"message": f"Error computing marks: {error_msg}"
}), 500
@app.route('/check_logs')
def check_logs():
try:
# Ensure log directory exists
ensure_directory(log_dir)
# If log file doesn't exist, create it
if not os.path.exists(log_file):
with open(log_file, 'w') as f:
f.write("Log file created.\n")
# Read last 1000 lines of logs
with open(log_file, 'r') as f:
logs = f.readlines()[-1000:]
return jsonify({
"status": "success",
"logs": "".join(logs)
})
except Exception as e:
log_print(f"Error reading logs: {str(e)}", "ERROR")
return jsonify({
"status": "error",
"error": str(e)
}), 500
def is_valid_image_file(filename):
"""Validate image file extensions and basic format"""
try:
# Check file extension
valid_extensions = {'.jpg', '.jpeg', '.png'}
ext = os.path.splitext(filename)[1].lower()
if ext not in valid_extensions:
return False
return True
except Exception:
return False
def allowed_file(filename, allowed_extensions):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in allowed_extensions
def get_or_load_model(model_name):
"""Get a model from cache or load it if not present"""
if model_name not in global_models:
try:
if model_name == 'fasttext':
from gensim.models import KeyedVectors
log_print(f"Loading {model_name} model...")
model_path = os.path.join(gensim_data_dir, 'fasttext-wiki-news-subwords-300', 'fasttext-wiki-news-subwords-300.gz')
if not os.path.exists(model_path):
from gensim.downloader import load
log_print("Downloading fasttext model...")
global_models[model_name] = load('fasttext-wiki-news-subwords-300')
else:
global_models[model_name] = KeyedVectors.load_word2vec_format(model_path)
log_print(f"Successfully loaded {model_name} model")
elif model_name == 'llm':
# Add your LLM model loading here if needed
pass
except Exception as e:
log_print(f"Error loading {model_name} model: {e}", "ERROR")
raise
return global_models.get(model_name)
def wait_for_initialization(timeout=300):
"""Wait for initialization to complete with timeout"""
if not initialization_complete.wait(timeout):
log_print("Warning: Initialization timed out", "WARNING")
# Add this to the compute_marks route before processing files
@app.before_request
def ensure_initialization():
"""Ensure all resources are initialized before processing requests"""
if request.endpoint in ['compute_marks', 'compute_answers']:
wait_for_initialization()
def cleanup_temp_files():
"""Clean up temporary files with proper error handling"""
try:
# Clean up the temporary processing directory
temp_processing_dir = os.path.join(BASE_DIR, 'temp_processing')
if os.path.exists(temp_processing_dir):
shutil.rmtree(temp_processing_dir, ignore_errors=True)
# Clean up the images directory
if os.path.exists(images_dir):
for file in os.listdir(images_dir):
try:
file_path = os.path.join(images_dir, file)
if os.path.isfile(file_path):
os.unlink(file_path)
except Exception as e:
log_print(f"Warning: Could not delete file {file_path}: {e}", "WARNING")
except Exception as e:
log_print(f"Error cleaning up temporary files: {e}", "ERROR")
if __name__ == '__main__':
try:
# Create essential directories
for directory in essential_dirs:
ensure_directory(directory)
# Configure server for long-running requests
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
app.config['MAX_CONTENT_LENGTH'] = 50 * 1024 * 1024 # 50MB max file size
# Add additional server configurations
app.config['PERMANENT_SESSION_LIFETIME'] = 3600 # 1 hour session lifetime
app.config['SESSION_COOKIE_SECURE'] = True
app.config['SESSION_COOKIE_HTTPONLY'] = True
# Start the Flask app with increased timeout
port = int(os.environ.get('PORT', 7860))
log_print(f"Starting server on port {port}")
log_print("Server configuration:")
log_print(f"- Max content length: {app.config['MAX_CONTENT_LENGTH'] / 1024 / 1024}MB")
log_print(f"- Threaded: True")
log_print(f"- Debug mode: False")
# Run the server with proper configuration
app.run(
host='0.0.0.0',
port=port,
debug=False,
use_reloader=False,
threaded=True
)
except Exception as e:
log_print(f"Fatal error starting server: {str(e)}", "ERROR")
raise
finally:
log_print("Cleaning up temporary files...")
cleanup_temp_files()
log_print("Server shutdown complete")