json-structured / install_verify.py
Dev8709's picture
Updated
44b5c36
raw
history blame
4.14 kB
#!/usr/bin/env python3
"""
Installation verification script for llama.cpp in Hugging Face Space
Run this to verify that llama.cpp is properly installed and configured
"""
import subprocess
import sys
import os
def run_command(command, description):
"""Run a command and return success status"""
print(f"πŸ” {description}...")
try:
result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=30)
if result.returncode == 0:
print(f"βœ… {description} - SUCCESS")
if result.stdout.strip():
print(f" Output: {result.stdout.strip()}")
return True
else:
print(f"❌ {description} - FAILED")
if result.stderr.strip():
print(f" Error: {result.stderr.strip()}")
return False
except subprocess.TimeoutExpired:
print(f"⏰ {description} - TIMEOUT")
return False
except Exception as e:
print(f"❌ {description} - ERROR: {e}")
return False
def check_python_version():
"""Check Python version compatibility"""
version = sys.version_info
print(f"🐍 Python version: {version.major}.{version.minor}.{version.micro}")
if version.major >= 3 and version.minor >= 8:
print("βœ… Python version is compatible")
return True
else:
print("❌ Python version should be 3.8 or higher")
return False
def check_system_packages():
"""Check if required system packages are available"""
packages = ["gcc", "g++", "cmake", "make"]
results = []
for package in packages:
success = run_command(f"which {package}", f"Checking {package}")
results.append(success)
return all(results)
def install_and_test_llamacpp():
"""Install and test llama-cpp-python"""
print("\nπŸ“¦ Installing llama-cpp-python...")
# Install llama-cpp-python
install_success = run_command(
f"{sys.executable} -m pip install llama-cpp-python --verbose",
"Installing llama-cpp-python"
)
if not install_success:
print("❌ Failed to install llama-cpp-python")
return False
# Test import
test_success = run_command(
f"{sys.executable} -c 'from llama_cpp import Llama; print(\"Import successful\")'",
"Testing llama-cpp-python import"
)
return test_success
def main():
"""Main verification function"""
print("πŸš€ llama.cpp Installation Verification for Hugging Face Space")
print("=" * 70)
checks = [
("Python Version", check_python_version),
("System Packages", check_system_packages),
("llama-cpp-python Installation", install_and_test_llamacpp),
]
results = []
for check_name, check_func in checks:
print(f"\nπŸ§ͺ Running: {check_name}")
print("-" * 40)
result = check_func()
results.append(result)
print()
print("=" * 70)
print("πŸ“Š VERIFICATION SUMMARY:")
for i, (check_name, _) in enumerate(checks):
status = "βœ… PASSED" if results[i] else "❌ FAILED"
print(f" {check_name}: {status}")
if all(results):
print("\nπŸŽ‰ ALL CHECKS PASSED!")
print("βœ… llama.cpp is successfully installed and ready to use.")
print("\nπŸ“ Next steps:")
print(" 1. Run 'python test_llamacpp.py' to test the integration")
print(" 2. Start your Gradio app with 'python app.py'")
print(" 3. Upload a GGUF model file to enable full functionality")
else:
print("\n⚠️ SOME CHECKS FAILED!")
print("❌ Please review the errors above and fix them before proceeding.")
print("\nπŸ”§ Common solutions:")
print(" - Ensure build tools are installed (build-essential, cmake)")
print(" - Check that you have sufficient memory and disk space")
print(" - Try reinstalling with: pip install --force-reinstall llama-cpp-python")
return all(results)
if __name__ == "__main__":
success = main()
sys.exit(0 if success else 1)