Spaces:
Running
Running
back to base on hf spaces official image
Browse files- Dockerfile +41 -55
Dockerfile
CHANGED
@@ -1,61 +1,47 @@
|
|
1 |
-
#
|
2 |
-
FROM
|
3 |
|
4 |
-
# Disable interactive prompts and set timezone to UTC
|
5 |
-
ENV DEBIAN_FRONTEND=noninteractive
|
6 |
-
|
|
|
|
|
7 |
|
8 |
-
#
|
9 |
-
ENV XDG_CACHE_HOME=/tmp/.cache
|
10 |
-
ENV HF_HOME=/tmp/.cache/huggingface
|
11 |
-
|
12 |
-
# Use bash with strict modes for better debugging
|
13 |
SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
|
14 |
|
15 |
-
# Preseed tzdata
|
16 |
-
RUN echo "tzdata tzdata/Areas select Etc" > /tmp/tzdata.seed
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
#
|
41 |
-
|
42 |
-
#
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
#
|
48 |
-
RUN echo "### STEP 4: Configuring cache directories" && \
|
49 |
-
mkdir -p "$XDG_CACHE_HOME" "$HF_HOME" && \
|
50 |
-
chmod -R a+rwX "$XDG_CACHE_HOME" "$HF_HOME"
|
51 |
-
|
52 |
-
# STEP 5: Build and install llama-cpp-python from Git with OpenBLAS support
|
53 |
-
RUN echo "### STEP 5: Building llama-cpp-python from source" && \
|
54 |
-
git clone --depth 1 --recurse-submodules https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python && \
|
55 |
-
cd /tmp/llama-cpp-python && \
|
56 |
-
CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" pip install . && \
|
57 |
-
rm -rf /tmp/llama-cpp-python
|
58 |
-
|
59 |
-
# Expose port and set default command
|
60 |
EXPOSE 7860
|
61 |
CMD ["python3", "app.py"]
|
|
|
1 |
+
# 1. Start from the official slim Python image
|
2 |
+
FROM python:3.10-slim
|
3 |
|
4 |
+
# 2. Disable interactive prompts and set timezone to UTC
|
5 |
+
ENV DEBIAN_FRONTEND=noninteractive \
|
6 |
+
TZ=Etc/UTC \
|
7 |
+
XDG_CACHE_HOME=/tmp/.cache \
|
8 |
+
HF_HOME=/tmp/.cache/huggingface
|
9 |
|
10 |
+
# 3. Use bash in strict mode for reliable builds
|
|
|
|
|
|
|
|
|
11 |
SHELL ["/bin/bash", "-euxo", "pipefail", "-c"]
|
12 |
|
13 |
+
# 4. Preseed tzdata, install system deps (git, cmake, build tools, OpenBLAS), then clean up
|
14 |
+
RUN echo "tzdata tzdata/Areas select Etc" > /tmp/tzdata.seed \
|
15 |
+
&& echo "tzdata tzdata/Zones/Etc select UTC" >> /tmp/tzdata.seed \
|
16 |
+
&& apt-get update \
|
17 |
+
&& apt-get install -y --no-install-recommends \
|
18 |
+
tzdata \
|
19 |
+
git \
|
20 |
+
cmake \
|
21 |
+
build-essential \
|
22 |
+
libopenblas-dev \
|
23 |
+
&& rm -rf /var/lib/apt/lists/* \
|
24 |
+
&& cp /usr/share/zoneinfo/Etc/UTC /etc/localtime \
|
25 |
+
&& echo "Etc/UTC" > /etc/timezone
|
26 |
+
|
27 |
+
# 5. Create cache directories with proper permissions
|
28 |
+
RUN mkdir -p "$XDG_CACHE_HOME" "$HF_HOME" \
|
29 |
+
&& chmod -R a+rwX "$XDG_CACHE_HOME" "$HF_HOME"
|
30 |
+
|
31 |
+
# 6. Build and install llama-cpp-python from source with OpenBLAS support
|
32 |
+
RUN git clone --depth 1 --recurse-submodules \
|
33 |
+
https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python \
|
34 |
+
&& cd /tmp/llama-cpp-python \
|
35 |
+
&& CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" pip install . \
|
36 |
+
&& rm -rf /tmp/llama-cpp-python
|
37 |
+
|
38 |
+
# 7. (Optional) Install any other Python deps your app needs
|
39 |
+
# COPY requirements.txt .
|
40 |
+
# RUN pip install --no-cache-dir -r requirements.txt
|
41 |
+
|
42 |
+
# 8. Copy in your application code
|
43 |
+
COPY . .
|
44 |
+
|
45 |
+
# 9. Expose the HF Spaces port and set the default command
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
EXPOSE 7860
|
47 |
CMD ["python3", "app.py"]
|