File size: 3,256 Bytes
da339ca
 
8b21538
da339ca
 
 
 
 
8b21538
da339ca
 
8b21538
 
da339ca
8b21538
da339ca
 
 
 
 
 
 
 
 
 
 
 
8b21538
da339ca
 
 
 
 
 
 
 
 
 
 
 
758943e
9cf1bcf
758943e
 
 
9cf1bcf
758943e
 
9cf1bcf
da339ca
 
 
 
 
 
 
 
 
9cf1bcf
da339ca
 
9cf1bcf
da339ca
 
 
 
 
9cf1bcf
 
 
 
 
 
da339ca
 
8b21538
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
#!/usr/bin/env python3
"""
Diagnostic script to test Ollama connectivity
"""

import requests
from utils.config import config

def test_ollama_connectivity(custom_ngrok_url=None):
    """Test if Ollama is reachable from the current environment"""
    
    test_url = custom_ngrok_url if custom_ngrok_url else config.ollama_host
    
    print("=== Ollama Connectivity Diagnostic ===")
    print(f"Testing Ollama Host: {test_url}")
    print()
    
    # Headers to skip ngrok browser warning
    headers = {
        "ngrok-skip-browser-warning": "true",
        "User-Agent": "AI-Life-Coach-Diagnostic"
    }
    
    # Test 1: Check if we can reach the Ollama host
    print("Test 1: Checking Ollama host connectivity...")
    try:
        response = requests.get(
            f"{test_url}/api/tags", 
            headers=headers, 
            timeout=10
        )
        print(f"  Status Code: {response.status_code}")
        
        if response.status_code == 200:
            print("  ✓ Successfully connected to Ollama host")
            try:
                data = response.json()
                models = data.get("models", [])
                print(f"  Available Models: {len(models)}")
                for model in models:
                    print(f"    - {model.get('name', 'Unknown model')} (Size: {model.get('size', 'Unknown')})")
                
                if models:
                    print("\n  ✅ Ollama is working correctly with models loaded!")
                    print("  You should be able to use the AI Life Coach app now.")
                else:
                    print("\n  ⚠️  Ollama is running but no models are loaded.")
                    print("  Try pulling a model: ollama pull mistral")
                        
            except Exception as e:
                print(f"  Error parsing response: {e}")
                print(f"  Response text: {response.text[:200]}...")
        else:
            print(f"  ✗ Unexpected status code: {response.status_code}")
            print(f"  Response: {response.text[:200]}...")
            
    except requests.exceptions.Timeout:
        print("  ✗ Request timed out (took more than 10 seconds)")
        print("    This may indicate network issues or an unresponsive Ollama server")
    except requests.exceptions.ConnectionError as e:
        print(f"  ✗ Connection error: {e}")
        print("    This may indicate that the Ollama server is not running or the URL is incorrect")
    except Exception as e:
        print(f"  ✗ Unexpected error: {e}")
    
    print()
    print("=== Diagnostic Complete ===")
    print()
    print("Troubleshooting Tips:")
    print("1. If connection fails, verify your Ollama server is running: ollama serve")
    print("2. If no models found, pull a model: ollama pull mistral")
    print("3. If ngrok issues, verify your tunnel is active and URL is correct")
    print("4. If timeout issues, check firewall settings and network connectivity")

if __name__ == "__main__":
    # Check if user provided a custom ngrok URL
    import sys
    custom_url = None
    if len(sys.argv) > 1:
        custom_url = sys.argv[1]
        print(f"Using custom ngrok URL: {custom_url}")
    
    test_ollama_connectivity(custom_url)