Roberta_model_Test / Dockerfile
subbunanepalli's picture
Create Dockerfile
dd380c8 verified
# Use an official Python runtime as a parent image
FROM python:3.9-slim
# Set working directory
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
build-essential \
git \
&& rm -rf /var/lib/apt/lists/*
# Create a non-root user
RUN useradd -m -u 1000 appuser
# Copy requirements first to leverage Docker cache
COPY requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Create necessary directories and set permissions
RUN mkdir -p /app/saved_models /app/tokenizer /app/predictions /app/.cache \
&& chown -R appuser:appuser /app
# Switch to non-root user
USER appuser
# Copy the application code
COPY --chown=appuser:appuser . .
# βœ… Pre-download the RoBERTa tokenizer (for offline or cached inference)
RUN python -c "from transformers import RobertaTokenizer; RobertaTokenizer.from_pretrained('roberta-base', cache_dir='/app/.cache')"
# Expose the port the app runs on (7860 for Hugging Face Spaces)
EXPOSE 7860
# Command to run the FastAPI app
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]