|
|
|
""" |
|
Diagnostic script to test Ollama connectivity |
|
""" |
|
|
|
import requests |
|
from utils.config import config |
|
|
|
def test_ollama_connectivity(custom_ngrok_url=None): |
|
"""Test if Ollama is reachable from the current environment""" |
|
|
|
test_url = custom_ngrok_url if custom_ngrok_url else config.ollama_host |
|
|
|
print("=== Ollama Connectivity Diagnostic ===") |
|
print(f"Testing Ollama Host: {test_url}") |
|
print() |
|
|
|
|
|
headers = { |
|
"ngrok-skip-browser-warning": "true", |
|
"User-Agent": "AI-Life-Coach-Diagnostic" |
|
} |
|
|
|
|
|
print("Test 1: Checking Ollama host connectivity...") |
|
try: |
|
response = requests.get( |
|
f"{test_url}/api/tags", |
|
headers=headers, |
|
timeout=10 |
|
) |
|
print(f" Status Code: {response.status_code}") |
|
|
|
if response.status_code == 200: |
|
print(" β Successfully connected to Ollama host") |
|
try: |
|
data = response.json() |
|
models = data.get("models", []) |
|
print(f" Available Models: {len(models)}") |
|
for model in models: |
|
print(f" - {model.get('name', 'Unknown model')} (Size: {model.get('size', 'Unknown')})") |
|
|
|
if models: |
|
print("\n β
Ollama is working correctly with models loaded!") |
|
print(" You should be able to use the AI Life Coach app now.") |
|
else: |
|
print("\n β οΈ Ollama is running but no models are loaded.") |
|
print(" Try pulling a model: ollama pull mistral") |
|
|
|
except Exception as e: |
|
print(f" Error parsing response: {e}") |
|
print(f" Response text: {response.text[:200]}...") |
|
else: |
|
print(f" β Unexpected status code: {response.status_code}") |
|
print(f" Response: {response.text[:200]}...") |
|
|
|
except requests.exceptions.Timeout: |
|
print(" β Request timed out (took more than 10 seconds)") |
|
print(" This may indicate network issues or an unresponsive Ollama server") |
|
except requests.exceptions.ConnectionError as e: |
|
print(f" β Connection error: {e}") |
|
print(" This may indicate that the Ollama server is not running or the URL is incorrect") |
|
except Exception as e: |
|
print(f" β Unexpected error: {e}") |
|
|
|
print() |
|
print("=== Diagnostic Complete ===") |
|
print() |
|
print("Troubleshooting Tips:") |
|
print("1. If connection fails, verify your Ollama server is running: ollama serve") |
|
print("2. If no models found, pull a model: ollama pull mistral") |
|
print("3. If ngrok issues, verify your tunnel is active and URL is correct") |
|
print("4. If timeout issues, check firewall settings and network connectivity") |
|
|
|
if __name__ == "__main__": |
|
|
|
import sys |
|
custom_url = None |
|
if len(sys.argv) > 1: |
|
custom_url = sys.argv[1] |
|
print(f"Using custom ngrok URL: {custom_url}") |
|
|
|
test_ollama_connectivity(custom_url) |
|
|