Luigi commited on
Commit
4b95538
·
1 Parent(s): f7d60e0
Files changed (1) hide show
  1. Dockerfile +15 -14
Dockerfile CHANGED
@@ -5,22 +5,19 @@ FROM ubuntu:22.04
5
  ENV DEBIAN_FRONTEND=noninteractive
6
  ENV TZ=Etc/UTC
7
 
8
- # Configure Hugging Face cache to a writable directory
9
- ENV XDG_CACHE_HOME=/app/.cache
10
- ENV HF_HOME=/app/.cache/huggingface
11
 
12
  # Use bash with strict modes for debugging
13
  SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
14
 
15
- # Create cache directories
16
- RUN mkdir -p "$XDG_CACHE_HOME" "$HF_HOME"
17
-
18
  # Preseed tzdata to avoid interactive prompt
19
  RUN echo "tzdata tzdata/Areas select Etc" > /tmp/tzdata.seed && \
20
  echo "tzdata tzdata/Zones/Etc select UTC" >> /tmp/tzdata.seed && \
21
  debconf-set-selections /tmp/tzdata.seed
22
 
23
- # 1. Install OS-level dependencies, including pkg-config and git
24
  RUN echo "### STEP 1: Installing OS-level dependencies" && \
25
  apt-get update && \
26
  apt-get install -y --no-install-recommends \
@@ -35,24 +32,28 @@ RUN echo "### STEP 1: Installing OS-level dependencies" && \
35
  python3-opencv && \
36
  rm -rf /var/lib/apt/lists/*
37
 
38
- # 2. Prepare application directory and copy code
39
- RUN echo "### STEP 2: Preparing application directory"
40
  WORKDIR /app
41
  COPY requirements.txt ./
42
  COPY app.py ./
43
- # COPY any other source files or directories as needed
44
 
45
  # 3. Install Python dependencies (excluding llama-cpp-python)
46
- RUN echo "### STEP 3: Installing Python dependencies" && \
47
  pip3 install --upgrade pip && \
48
  pip3 install --no-cache-dir -r requirements.txt
49
 
50
- # 4. Build and install llama-cpp-python from source with OpenBLAS
51
- RUN echo "### STEP 4: Building and installing llama-cpp-python with OpenBLAS" && \
 
 
 
 
 
52
  export CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" && \
53
  pip3 install --no-cache-dir --force-reinstall --no-binary llama-cpp-python llama-cpp-python
54
 
55
- # 5. Finalize and launch
56
  RUN echo "### STEP 5: Finalizing Docker image"
57
  EXPOSE 7860
58
  CMD ["python3", "app.py"]
 
5
  ENV DEBIAN_FRONTEND=noninteractive
6
  ENV TZ=Etc/UTC
7
 
8
+ # Configure Hugging Face and XDG cache to use a writable /tmp directory
9
+ ENV XDG_CACHE_HOME=/tmp/.cache
10
+ ENV HF_HOME=/tmp/.cache/huggingface
11
 
12
  # Use bash with strict modes for debugging
13
  SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
14
 
 
 
 
15
  # Preseed tzdata to avoid interactive prompt
16
  RUN echo "tzdata tzdata/Areas select Etc" > /tmp/tzdata.seed && \
17
  echo "tzdata tzdata/Zones/Etc select UTC" >> /tmp/tzdata.seed && \
18
  debconf-set-selections /tmp/tzdata.seed
19
 
20
+ # 1. Install OS-level dependencies (including pkg-config and git)
21
  RUN echo "### STEP 1: Installing OS-level dependencies" && \
22
  apt-get update && \
23
  apt-get install -y --no-install-recommends \
 
32
  python3-opencv && \
33
  rm -rf /var/lib/apt/lists/*
34
 
35
+ # 2. Prepare application directory
 
36
  WORKDIR /app
37
  COPY requirements.txt ./
38
  COPY app.py ./
39
+ # (Copy any other source files or directories needed)
40
 
41
  # 3. Install Python dependencies (excluding llama-cpp-python)
42
+ RUN echo "### STEP 2: Installing Python dependencies" && \
43
  pip3 install --upgrade pip && \
44
  pip3 install --no-cache-dir -r requirements.txt
45
 
46
+ # 4. Ensure cache directories are writable by runtime user
47
+ RUN echo "### STEP 3: Creating cache directories" && \
48
+ mkdir -p "$XDG_CACHE_HOME" "$HF_HOME" && \
49
+ chmod -R a+rwX "$XDG_CACHE_HOME"
50
+
51
+ # 5. Build and install llama-cpp-python from source with OpenBLAS
52
+ RUN echo "### STEP 4: Building llama-cpp-python with OpenBLAS" && \
53
  export CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" && \
54
  pip3 install --no-cache-dir --force-reinstall --no-binary llama-cpp-python llama-cpp-python
55
 
56
+ # 6. Finalize and launch the app
57
  RUN echo "### STEP 5: Finalizing Docker image"
58
  EXPOSE 7860
59
  CMD ["python3", "app.py"]