#!/usr/bin/env python3 """ Installation verification script for llama.cpp in Hugging Face Space Run this to verify that llama.cpp is properly installed and configured """ import subprocess import sys import os def run_command(command, description): """Run a command and return success status""" print(f"๐ {description}...") try: result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=30) if result.returncode == 0: print(f"โ {description} - SUCCESS") if result.stdout.strip(): print(f" Output: {result.stdout.strip()}") return True else: print(f"โ {description} - FAILED") if result.stderr.strip(): print(f" Error: {result.stderr.strip()}") return False except subprocess.TimeoutExpired: print(f"โฐ {description} - TIMEOUT") return False except Exception as e: print(f"โ {description} - ERROR: {e}") return False def check_python_version(): """Check Python version compatibility""" version = sys.version_info print(f"๐ Python version: {version.major}.{version.minor}.{version.micro}") if version.major >= 3 and version.minor >= 8: print("โ Python version is compatible") return True else: print("โ Python version should be 3.8 or higher") return False def check_system_packages(): """Check if required system packages are available""" packages = ["gcc", "g++", "cmake", "make"] results = [] for package in packages: success = run_command(f"which {package}", f"Checking {package}") results.append(success) return all(results) def install_and_test_llamacpp(): """Install and test llama-cpp-python""" print("\n๐ฆ Installing llama-cpp-python...") # Install llama-cpp-python install_success = run_command( f"{sys.executable} -m pip install llama-cpp-python --verbose", "Installing llama-cpp-python" ) if not install_success: print("โ Failed to install llama-cpp-python") return False # Test import test_success = run_command( f"{sys.executable} -c 'from llama_cpp import Llama; print(\"Import successful\")'", "Testing llama-cpp-python import" ) return test_success def main(): """Main verification function""" print("๐ llama.cpp Installation Verification for Hugging Face Space") print("=" * 70) checks = [ ("Python Version", check_python_version), ("System Packages", check_system_packages), ("llama-cpp-python Installation", install_and_test_llamacpp), ] results = [] for check_name, check_func in checks: print(f"\n๐งช Running: {check_name}") print("-" * 40) result = check_func() results.append(result) print() print("=" * 70) print("๐ VERIFICATION SUMMARY:") for i, (check_name, _) in enumerate(checks): status = "โ PASSED" if results[i] else "โ FAILED" print(f" {check_name}: {status}") if all(results): print("\n๐ ALL CHECKS PASSED!") print("โ llama.cpp is successfully installed and ready to use.") print("\n๐ Next steps:") print(" 1. Run 'python test_llamacpp.py' to test the integration") print(" 2. Start your Gradio app with 'python app.py'") print(" 3. Upload a GGUF model file to enable full functionality") else: print("\nโ ๏ธ SOME CHECKS FAILED!") print("โ Please review the errors above and fix them before proceeding.") print("\n๐ง Common solutions:") print(" - Ensure build tools are installed (build-essential, cmake)") print(" - Check that you have sufficient memory and disk space") print(" - Try reinstalling with: pip install --force-reinstall llama-cpp-python") return all(results) if __name__ == "__main__": success = main() sys.exit(0 if success else 1)