Luigi commited on
Commit
47daff0
·
1 Parent(s): 957ece1

update dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +16 -14
Dockerfile CHANGED
@@ -1,7 +1,7 @@
1
- # Use Ubuntu 22.04 as a public base image
2
  FROM ubuntu:22.04
3
 
4
- # Disable interactive prompts and preconfigure timezone
5
  ENV DEBIAN_FRONTEND=noninteractive
6
  ENV TZ=Etc/UTC
7
 
@@ -9,7 +9,7 @@ ENV TZ=Etc/UTC
9
  ENV XDG_CACHE_HOME=/tmp/.cache
10
  ENV HF_HOME=/tmp/.cache/huggingface
11
 
12
- # Use bash with strict modes for debugging
13
  SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
14
 
15
  # Preseed tzdata to avoid interactive prompt
@@ -17,7 +17,7 @@ RUN echo "tzdata tzdata/Areas select Etc" > /tmp/tzdata.seed && \
17
  echo "tzdata tzdata/Zones/Etc select UTC" >> /tmp/tzdata.seed && \
18
  debconf-set-selections /tmp/tzdata.seed
19
 
20
- # STEP 1: Install OS-level dependencies
21
  RUN echo "### STEP 1: Installing OS-level dependencies" && \
22
  apt-get update && \
23
  apt-get install -y --no-install-recommends \
@@ -29,6 +29,7 @@ RUN echo "### STEP 1: Installing OS-level dependencies" && \
29
  git \
30
  python3 \
31
  python3-pip \
 
32
  python3-opencv && \
33
  rm -rf /var/lib/apt/lists/*
34
 
@@ -36,24 +37,25 @@ RUN echo "### STEP 1: Installing OS-level dependencies" && \
36
  WORKDIR /app
37
  COPY requirements.txt ./
38
  COPY app.py ./
39
- # COPY any other source files or directories needed by your app
40
 
41
- # STEP 3: Install Python dependencies (ensure huggingface_hub is listed)
42
  RUN echo "### STEP 3: Installing Python dependencies" && \
43
- python3 -m pip install --upgrade pip && \
44
  pip install --no-cache-dir -r requirements.txt
45
 
46
  # STEP 4: Ensure cache directories are writable
47
- RUN echo "### STEP 4: Creating and permissioning cache directories" && \
48
  mkdir -p "$XDG_CACHE_HOME" "$HF_HOME" && \
49
  chmod -R a+rwX "$XDG_CACHE_HOME" "$HF_HOME"
50
 
51
- # STEP 5: Build and install llama-cpp-python from source with OpenBLAS
52
- RUN echo "### STEP 5: Building llama-cpp-python with OpenBLAS" && \
53
- export CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" && \
54
- pip install --no-cache-dir --force-reinstall --no-binary llama-cpp-python llama-cpp-python==0.2.0
 
 
55
 
56
- # STEP 6: Finalize and launch the application
57
- RUN echo "### STEP 6: Finalizing Docker image"
58
  EXPOSE 7860
59
  CMD ["python3", "app.py"]
 
1
+ # Use Ubuntu 22.04 as a base image
2
  FROM ubuntu:22.04
3
 
4
+ # Disable interactive prompts and set timezone to UTC
5
  ENV DEBIAN_FRONTEND=noninteractive
6
  ENV TZ=Etc/UTC
7
 
 
9
  ENV XDG_CACHE_HOME=/tmp/.cache
10
  ENV HF_HOME=/tmp/.cache/huggingface
11
 
12
+ # Use bash with strict modes for better debugging
13
  SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
14
 
15
  # Preseed tzdata to avoid interactive prompt
 
17
  echo "tzdata tzdata/Zones/Etc select UTC" >> /tmp/tzdata.seed && \
18
  debconf-set-selections /tmp/tzdata.seed
19
 
20
+ # STEP 1: Install OS-level build and runtime dependencies
21
  RUN echo "### STEP 1: Installing OS-level dependencies" && \
22
  apt-get update && \
23
  apt-get install -y --no-install-recommends \
 
29
  git \
30
  python3 \
31
  python3-pip \
32
+ python3-dev \
33
  python3-opencv && \
34
  rm -rf /var/lib/apt/lists/*
35
 
 
37
  WORKDIR /app
38
  COPY requirements.txt ./
39
  COPY app.py ./
40
+ # Copy additional source as needed
41
 
42
+ # STEP 3: Install Python dependencies
43
  RUN echo "### STEP 3: Installing Python dependencies" && \
44
+ python3 -m pip install --upgrade pip setuptools wheel scikit-build-core && \
45
  pip install --no-cache-dir -r requirements.txt
46
 
47
  # STEP 4: Ensure cache directories are writable
48
+ RUN echo "### STEP 4: Configuring cache directories" && \
49
  mkdir -p "$XDG_CACHE_HOME" "$HF_HOME" && \
50
  chmod -R a+rwX "$XDG_CACHE_HOME" "$HF_HOME"
51
 
52
+ # STEP 5: Build and install llama-cpp-python from Git with OpenBLAS support
53
+ RUN echo "### STEP 5: Building llama-cpp-python from source" && \
54
+ git clone --depth 1 --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python && \
55
+ cd /tmp/llama-cpp-python && \
56
+ CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" pip install . && \
57
+ rm -rf /tmp/llama-cpp-python
58
 
59
+ # Expose port and set default command
 
60
  EXPOSE 7860
61
  CMD ["python3", "app.py"]