Spaces:
Running
Running
#!/usr/bin/env python3 | |
""" | |
Installation verification script for llama.cpp in Hugging Face Space | |
Run this to verify that llama.cpp is properly installed and configured | |
""" | |
import subprocess | |
import sys | |
import os | |
def run_command(command, description): | |
"""Run a command and return success status""" | |
print(f"π {description}...") | |
try: | |
result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=30) | |
if result.returncode == 0: | |
print(f"β {description} - SUCCESS") | |
if result.stdout.strip(): | |
print(f" Output: {result.stdout.strip()}") | |
return True | |
else: | |
print(f"β {description} - FAILED") | |
if result.stderr.strip(): | |
print(f" Error: {result.stderr.strip()}") | |
return False | |
except subprocess.TimeoutExpired: | |
print(f"β° {description} - TIMEOUT") | |
return False | |
except Exception as e: | |
print(f"β {description} - ERROR: {e}") | |
return False | |
def check_python_version(): | |
"""Check Python version compatibility""" | |
version = sys.version_info | |
print(f"π Python version: {version.major}.{version.minor}.{version.micro}") | |
if version.major >= 3 and version.minor >= 8: | |
print("β Python version is compatible") | |
return True | |
else: | |
print("β Python version should be 3.8 or higher") | |
return False | |
def check_system_packages(): | |
"""Check if required system packages are available""" | |
packages = ["gcc", "g++", "cmake", "make"] | |
results = [] | |
for package in packages: | |
success = run_command(f"which {package}", f"Checking {package}") | |
results.append(success) | |
return all(results) | |
def install_and_test_llamacpp(): | |
"""Install and test llama-cpp-python""" | |
print("\nπ¦ Installing llama-cpp-python...") | |
# Install llama-cpp-python | |
install_success = run_command( | |
f"{sys.executable} -m pip install llama-cpp-python --verbose", | |
"Installing llama-cpp-python" | |
) | |
if not install_success: | |
print("β Failed to install llama-cpp-python") | |
return False | |
# Test import | |
test_success = run_command( | |
f"{sys.executable} -c 'from llama_cpp import Llama; print(\"Import successful\")'", | |
"Testing llama-cpp-python import" | |
) | |
return test_success | |
def main(): | |
"""Main verification function""" | |
print("π llama.cpp Installation Verification for Hugging Face Space") | |
print("=" * 70) | |
checks = [ | |
("Python Version", check_python_version), | |
("System Packages", check_system_packages), | |
("llama-cpp-python Installation", install_and_test_llamacpp), | |
] | |
results = [] | |
for check_name, check_func in checks: | |
print(f"\nπ§ͺ Running: {check_name}") | |
print("-" * 40) | |
result = check_func() | |
results.append(result) | |
print() | |
print("=" * 70) | |
print("π VERIFICATION SUMMARY:") | |
for i, (check_name, _) in enumerate(checks): | |
status = "β PASSED" if results[i] else "β FAILED" | |
print(f" {check_name}: {status}") | |
if all(results): | |
print("\nπ ALL CHECKS PASSED!") | |
print("β llama.cpp is successfully installed and ready to use.") | |
print("\nπ Next steps:") | |
print(" 1. Run 'python test_llamacpp.py' to test the integration") | |
print(" 2. Start your Gradio app with 'python app.py'") | |
print(" 3. Upload a GGUF model file to enable full functionality") | |
else: | |
print("\nβ οΈ SOME CHECKS FAILED!") | |
print("β Please review the errors above and fix them before proceeding.") | |
print("\nπ§ Common solutions:") | |
print(" - Ensure build tools are installed (build-essential, cmake)") | |
print(" - Check that you have sufficient memory and disk space") | |
print(" - Try reinstalling with: pip install --force-reinstall llama-cpp-python") | |
return all(results) | |
if __name__ == "__main__": | |
success = main() | |
sys.exit(0 if success else 1) |