#!/usr/bin/env python3 """ Installation verification script for llama.cpp in Hugging Face Space Run this to verify that llama.cpp is properly installed and configured """ import subprocess import sys import os def run_command(command, description): """Run a command and return success status""" print(f"๐Ÿ” {description}...") try: result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=30) if result.returncode == 0: print(f"โœ… {description} - SUCCESS") if result.stdout.strip(): print(f" Output: {result.stdout.strip()}") return True else: print(f"โŒ {description} - FAILED") if result.stderr.strip(): print(f" Error: {result.stderr.strip()}") return False except subprocess.TimeoutExpired: print(f"โฐ {description} - TIMEOUT") return False except Exception as e: print(f"โŒ {description} - ERROR: {e}") return False def check_python_version(): """Check Python version compatibility""" version = sys.version_info print(f"๐Ÿ Python version: {version.major}.{version.minor}.{version.micro}") if version.major >= 3 and version.minor >= 8: print("โœ… Python version is compatible") return True else: print("โŒ Python version should be 3.8 or higher") return False def check_system_packages(): """Check if required system packages are available""" packages = ["gcc", "g++", "cmake", "make"] results = [] for package in packages: success = run_command(f"which {package}", f"Checking {package}") results.append(success) return all(results) def install_and_test_llamacpp(): """Install and test llama-cpp-python""" print("\n๐Ÿ“ฆ Installing llama-cpp-python...") # Install llama-cpp-python install_success = run_command( f"{sys.executable} -m pip install llama-cpp-python --verbose", "Installing llama-cpp-python" ) if not install_success: print("โŒ Failed to install llama-cpp-python") return False # Test import test_success = run_command( f"{sys.executable} -c 'from llama_cpp import Llama; print(\"Import successful\")'", "Testing llama-cpp-python import" ) return test_success def main(): """Main verification function""" print("๐Ÿš€ llama.cpp Installation Verification for Hugging Face Space") print("=" * 70) checks = [ ("Python Version", check_python_version), ("System Packages", check_system_packages), ("llama-cpp-python Installation", install_and_test_llamacpp), ] results = [] for check_name, check_func in checks: print(f"\n๐Ÿงช Running: {check_name}") print("-" * 40) result = check_func() results.append(result) print() print("=" * 70) print("๐Ÿ“Š VERIFICATION SUMMARY:") for i, (check_name, _) in enumerate(checks): status = "โœ… PASSED" if results[i] else "โŒ FAILED" print(f" {check_name}: {status}") if all(results): print("\n๐ŸŽ‰ ALL CHECKS PASSED!") print("โœ… llama.cpp is successfully installed and ready to use.") print("\n๐Ÿ“ Next steps:") print(" 1. Run 'python test_llamacpp.py' to test the integration") print(" 2. Start your Gradio app with 'python app.py'") print(" 3. Upload a GGUF model file to enable full functionality") else: print("\nโš ๏ธ SOME CHECKS FAILED!") print("โŒ Please review the errors above and fix them before proceeding.") print("\n๐Ÿ”ง Common solutions:") print(" - Ensure build tools are installed (build-essential, cmake)") print(" - Check that you have sufficient memory and disk space") print(" - Try reinstalling with: pip install --force-reinstall llama-cpp-python") return all(results) if __name__ == "__main__": success = main() sys.exit(0 if success else 1)