FROM ubuntu:22.04 RUN apt-get update && apt-get install -y \ unzip wget curl python3 python3-pip # Sao chép binary LLaMA đã build COPY llama_bin.zip /app/llama_bin.zip WORKDIR /app # Giải nén binaries RUN unzip llama_bin.zip && rm llama_bin.zip && chmod +x bin/* # Đặt biến môi trường để tìm `libllama.so` ENV LD_LIBRARY_PATH=/app/bin:$LD_LIBRARY_PATH # Sao chép model RUN mkdir -p /models RUN wget -O /models/qwen2.5-0.5b-instruct-q5_k_m.gguf \ https://huggingface.co/Qwen/Qwen2.5-0.5B-Instruct-GGUF/resolve/main/qwen2.5-0.5b-instruct-q5_k_m.gguf # Chạy server CMD ["./bin/llama-server", "-m", "/models/qwen2.5-0.5b-instruct-q5_k_m.gguf", "--port", "8000"]