import os import subprocess import platform import shutil MODEL_DIR = "models" LLAMA_CPP_DIR = os.path.join(MODEL_DIR, "llama.cpp") GGUF_MODEL = "llama-3-8b-instruct.Q4_K_M.gguf" GGUF_URL = f"https://huggingface.co/TheBloke/Llama-3-8B-Instruct-GGUF/resolve/main/{GGUF_MODEL}" GGUF_PATH = os.path.join(MODEL_DIR, GGUF_MODEL) def run_cmd(command, cwd=None): """Run shell command safely.""" try: print(f"šŸ”§ Executing: {command}") subprocess.run(command, shell=True, check=True, cwd=cwd) except subprocess.CalledProcessError as e: print(f"āŒ Command failed: {command}") raise e def install_tools(): """Install required tools based on OS.""" system = platform.system() print(f"šŸ–„ļø Detected OS: {system}") try: if system == "Linux": run_cmd("sudo apt update && sudo apt install -y git wget build-essential") elif system == "Darwin": # macOS run_cmd("xcode-select --install || true") run_cmd("brew install git wget cmake make || true") elif system == "Windows": print("āš ļø On Windows, please run this script inside Git Bash or WSL.") else: print("āš ļø Unsupported OS. Please install git, wget, make manually.") except Exception as e: print("āš ļø Could not install dependencies automatically. Please install manually.") def clone_llama_cpp(): """Clone llama.cpp if not already cloned.""" if not os.path.exists(LLAMA_CPP_DIR): run_cmd(f"git clone https://github.com/ggerganov/llama.cpp.git", cwd=MODEL_DIR) else: print("āœ… llama.cpp already exists.") def build_llama_cpp(): """Compile llama.cpp (optional on Windows).""" makefile_path = os.path.join(LLAMA_CPP_DIR, "Makefile") if os.path.exists(makefile_path): try: run_cmd("make", cwd=LLAMA_CPP_DIR) print("āœ… llama.cpp built successfully.") except Exception: print("āš ļø Failed to compile llama.cpp. You may need build tools installed.") else: print("āš ļø Makefile not found, skipping build.") def download_model(): """Download GGUF model if not already downloaded.""" if not os.path.exists(GGUF_PATH): print("ā¬‡ļø Downloading LLaMA 3 model...") run_cmd(f"wget {GGUF_URL} -O {GGUF_PATH}") print("āœ… Model downloaded.") else: print("āœ… Model already exists.") def setup(): """Main setup logic.""" os.makedirs(MODEL_DIR, exist_ok=True) install_tools() clone_llama_cpp() build_llama_cpp() download_model() print("\nšŸŽ‰ Setup complete. You are ready to run your local LLaMA agent.") if __name__ == "__main__": setup()