faq / Dockerfile
brendon-ai's picture
Update Dockerfile
77b4dba verified
raw
history blame
1.35 kB
# Use Ubuntu as base image for better Ollama compatibility
FROM ubuntu:22.04
# Prevent interactive prompts during package installation
ENV DEBIAN_FRONTEND=noninteractive
# Set working directory
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
curl \
python3 \
python3-pip \
python3-venv \
wget \
ca-certificates \
sudo \
&& rm -rf /var/lib/apt/lists/*
# Create and activate virtual environment
RUN python3 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
# Install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# Install Ollama
RUN curl -fsSL https://ollama.ai/install.sh | sh
# Create app directory and set permissions
RUN mkdir -p /app/.ollama && \
chmod 755 /app/.ollama
# Copy application files
COPY app.py .
COPY startup.sh .
# Make startup script executable
RUN chmod +x startup.sh
# Set environment variables for Ollama
ENV OLLAMA_HOST=0.0.0.0:11434
ENV OLLAMA_MODELS=/app/.ollama/models
ENV OLLAMA_HOME=/app/.ollama
# Expose ports
EXPOSE 7860 11434
# Health check
HEALTHCHECK --interval=30s --timeout=30s --start-period=60s --retries=3 \
CMD curl -f http://localhost:7860/health || exit 1
# Start both Ollama server and FastAPI app
CMD ["./startup.sh"]