AI-Life-Coach-Streamlit2 / diagnose_ollama.py
rdune71's picture
Add model selection dropdown and fix model detection logic
758943e
#!/usr/bin/env python3
"""
Diagnostic script to test Ollama connectivity
"""
import requests
from utils.config import config
def test_ollama_connectivity(custom_ngrok_url=None):
"""Test if Ollama is reachable from the current environment"""
test_url = custom_ngrok_url if custom_ngrok_url else config.ollama_host
print("=== Ollama Connectivity Diagnostic ===")
print(f"Testing Ollama Host: {test_url}")
print()
# Headers to skip ngrok browser warning
headers = {
"ngrok-skip-browser-warning": "true",
"User-Agent": "AI-Life-Coach-Diagnostic"
}
# Test 1: Check if we can reach the Ollama host
print("Test 1: Checking Ollama host connectivity...")
try:
response = requests.get(
f"{test_url}/api/tags",
headers=headers,
timeout=10
)
print(f" Status Code: {response.status_code}")
if response.status_code == 200:
print(" βœ“ Successfully connected to Ollama host")
try:
data = response.json()
models = data.get("models", [])
print(f" Available Models: {len(models)}")
for model in models:
print(f" - {model.get('name', 'Unknown model')} (Size: {model.get('size', 'Unknown')})")
if models:
print("\n βœ… Ollama is working correctly with models loaded!")
print(" You should be able to use the AI Life Coach app now.")
else:
print("\n ⚠️ Ollama is running but no models are loaded.")
print(" Try pulling a model: ollama pull mistral")
except Exception as e:
print(f" Error parsing response: {e}")
print(f" Response text: {response.text[:200]}...")
else:
print(f" βœ— Unexpected status code: {response.status_code}")
print(f" Response: {response.text[:200]}...")
except requests.exceptions.Timeout:
print(" βœ— Request timed out (took more than 10 seconds)")
print(" This may indicate network issues or an unresponsive Ollama server")
except requests.exceptions.ConnectionError as e:
print(f" βœ— Connection error: {e}")
print(" This may indicate that the Ollama server is not running or the URL is incorrect")
except Exception as e:
print(f" βœ— Unexpected error: {e}")
print()
print("=== Diagnostic Complete ===")
print()
print("Troubleshooting Tips:")
print("1. If connection fails, verify your Ollama server is running: ollama serve")
print("2. If no models found, pull a model: ollama pull mistral")
print("3. If ngrok issues, verify your tunnel is active and URL is correct")
print("4. If timeout issues, check firewall settings and network connectivity")
if __name__ == "__main__":
# Check if user provided a custom ngrok URL
import sys
custom_url = None
if len(sys.argv) > 1:
custom_url = sys.argv[1]
print(f"Using custom ngrok URL: {custom_url}")
test_ollama_connectivity(custom_url)