Spaces:
Running
Running
File size: 2,095 Bytes
6dfa42c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
#!/usr/bin/env python3
"""
Test script to verify llama.cpp installation
"""
def test_llamacpp_import():
"""Test if llama-cpp-python can be imported"""
try:
from llama_cpp import Llama
print("β
llama-cpp-python imported successfully")
return True
except ImportError as e:
print(f"β Failed to import llama-cpp-python: {e}")
return False
def test_basic_functionality():
"""Test basic llama.cpp functionality without a model"""
try:
from llama_cpp import Llama
print("β
llama.cpp classes accessible")
# Test that we can access the Llama class attributes
print("β
Llama class instantiable (without model)")
return True
except Exception as e:
print(f"β Error testing basic functionality: {e}")
return False
def test_dependencies():
"""Test other required dependencies"""
dependencies = [
"gradio",
"numpy",
"json",
"huggingface_hub"
]
all_good = True
for dep in dependencies:
try:
__import__(dep)
print(f"β
{dep} imported successfully")
except ImportError as e:
print(f"β Failed to import {dep}: {e}")
all_good = False
return all_good
if __name__ == "__main__":
print("Testing llama.cpp installation for Hugging Face Space...")
print("=" * 60)
tests = [
("llama-cpp-python import", test_llamacpp_import),
("Basic functionality", test_basic_functionality),
("Dependencies", test_dependencies),
]
results = []
for test_name, test_func in tests:
print(f"\nπ§ͺ Running: {test_name}")
result = test_func()
results.append(result)
print("\n" + "=" * 60)
print("Test Summary:")
if all(results):
print("π All tests passed! llama.cpp is ready for use.")
else:
print("β οΈ Some tests failed. Check the output above.")
print("This might be expected if running before dependencies are installed.") |