faq / Dockerfile
brendon-ai's picture
Update Dockerfile
7d01495 verified
# Use Ubuntu as base image for better Ollama compatibility
FROM ubuntu:22.04
# Prevent interactive prompts during package installation
ENV DEBIAN_FRONTEND=noninteractive
# Set working directory
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
curl \
python3 \
python3-pip \
python3-venv \
wget \
ca-certificates \
sudo \
dos2unix \
&& rm -rf /var/lib/apt/lists/*
# Create and activate virtual environment
RUN python3 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# Install Ollama
RUN curl -fsSL https://ollama.ai/install.sh | sh
# Copy application files
COPY app.py .
COPY startup.sh .
# Fix line endings and make startup script executable
RUN dos2unix startup.sh && chmod +x startup.sh
# Set environment variables for Ollama (using /tmp for guaranteed write access)
ENV HOME=/tmp
ENV OLLAMA_HOST=0.0.0.0:11434
ENV OLLAMA_MODELS=/tmp/ollama/models
ENV OLLAMA_HOME=/tmp/ollama
# Expose ports
EXPOSE 7860 11434
# Health check
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 \
CMD curl -f http://localhost:7860/health || exit 1
# Start both Ollama server and FastAPI app
CMD ["/bin/bash", "./startup.sh"]