# core/errors.py | |
USER_FRIENDLY_ERRORS = { | |
"connection refused": "Unable to connect to the local AI server. Is Ollama running?", | |
"503 service unavailable": "The advanced model is warming up (may take 2–4 minutes). Please try again shortly.", | |
"timeout": "Request took too long. Check your internet connection or try again later.", | |
"invalid token": "Authentication failed. Please verify your API keys are correct.", | |
"model not found": "Selected model isn't loaded. Try pulling it with 'ollama pull <model>'." | |
} | |
def translate_error(exception: Exception) -> str: | |
""" | |
Translate raw exception into a user-friendly message. | |
Falls back to original message if no match. | |
""" | |
exc_str = str(exception).lower() | |
for key_phrase, friendly_msg in USER_FRIENDLY_ERRORS.items(): | |
if key_phrase in exc_str: | |
return friendly_msg | |
# Default fallback - show original error but in a friendlier way | |
return f"Something unexpected happened: {str(exception)[:100]}{'...' if len(str(exception)) > 100 else ''}" | |