NitinBot001 commited on
Commit
d0d9e2f
·
verified ·
1 Parent(s): 19bdd67

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +28 -19
Dockerfile CHANGED
@@ -1,28 +1,37 @@
1
- # Dockerfile for Hugging Face Spaces - Optimized for Performance
2
- FROM ollama/ollama:latest
3
 
4
- # Create ollama user and set up directories with proper permissions
5
- RUN useradd -m -u 1000 ollama && \
6
- mkdir -p /home/ollama/.ollama && \
7
- chown -R ollama:ollama /home/ollama/.ollama
 
 
8
 
9
  # Set environment variables for Hugging Face Spaces
10
- ENV OLLAMA_HOST=0.0.0.0:7860
11
- ENV OLLAMA_MODELS=/home/ollama/.ollama/models
 
 
 
12
 
13
  # Performance optimizations
14
- ENV OLLAMA_NUM_PARALLEL=4
15
- ENV OLLAMA_MAX_LOADED_MODELS=1
16
- ENV OLLAMA_CONTEXT_LENGTH=8196
17
- ENV OLLAMA_KEEP_ALIVE=1m
18
- ENV OLLAMA_CPU_THREADS=4
19
 
20
- # Switch to ollama user
21
- USER ollama
22
- WORKDIR /home/ollama
23
 
24
- # Hugging Face Spaces uses port 7860
25
  EXPOSE 7860
26
 
27
- # Start Ollama server
28
- CMD ["serve"]
 
 
 
 
 
1
+ # LocalAI Dockerfile for Hugging Face Spaces
2
+ FROM quay.io/go-skynet/local-ai:latest
3
 
4
+ # Create non-root user for security
5
+ RUN adduser --disabled-password --gecos "" --uid 1000 localai
6
+
7
+ # Create necessary directories
8
+ RUN mkdir -p /build/models /build/galleries /build/config && \
9
+ chown -R localai:localai /build
10
 
11
  # Set environment variables for Hugging Face Spaces
12
+ ENV PORT=7860
13
+ ENV ADDRESS=0.0.0.0
14
+ ENV MODELS_PATH=/build/models
15
+ ENV GALLERIES_PATH=/build/galleries
16
+ ENV CONFIG_PATH=/build/config
17
 
18
  # Performance optimizations
19
+ ENV THREADS=4
20
+ ENV CONTEXT_SIZE=2048
21
+ ENV REBUILD=false
22
+ ENV DEBUG=false
23
+ ENV SINGLE_ACTIVE_BACKEND=true
24
 
25
+ # Switch to non-root user
26
+ USER localai
27
+ WORKDIR /build
28
 
29
+ # Expose port 7860 for Hugging Face Spaces
30
  EXPOSE 7860
31
 
32
+ # Health check
33
+ HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
34
+ CMD curl -f http://localhost:7860/readyz || exit 1
35
+
36
+ # Start LocalAI
37
+ CMD ["local-ai"]