Spaces:
Sleeping
Sleeping
usmansafdarktk
commited on
Commit
·
bd77c32
1
Parent(s):
749dbd9
Fix TRANSFORMERS_CACHE permission issue
Browse files- Dockerfile +14 -2
- main.py +3 -0
- requirements.txt +3 -5
Dockerfile
CHANGED
@@ -1,12 +1,24 @@
|
|
1 |
FROM python:3.10-slim
|
2 |
|
|
|
3 |
WORKDIR /app
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
COPY requirements.txt .
|
6 |
RUN pip install --no-cache-dir -r requirements.txt
|
7 |
|
8 |
-
|
|
|
9 |
|
|
|
10 |
EXPOSE 8000
|
11 |
|
12 |
-
|
|
|
|
1 |
FROM python:3.10-slim
|
2 |
|
3 |
+
# Set working directory
|
4 |
WORKDIR /app
|
5 |
|
6 |
+
# Create a cache directory with appropriate permissions
|
7 |
+
RUN mkdir -p /app/cache && chmod -R 777 /app/cache
|
8 |
+
|
9 |
+
# Set environment variables for Hugging Face cache
|
10 |
+
ENV TRANSFORMERS_CACHE=/app/cache
|
11 |
+
ENV HF_HOME=/app/cache
|
12 |
+
|
13 |
+
# Copy and install requirements
|
14 |
COPY requirements.txt .
|
15 |
RUN pip install --no-cache-dir -r requirements.txt
|
16 |
|
17 |
+
# Copy application files
|
18 |
+
COPY . .
|
19 |
|
20 |
+
# Expose port
|
21 |
EXPOSE 8000
|
22 |
|
23 |
+
# Run the FastAPI application
|
24 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
main.py
CHANGED
@@ -7,6 +7,9 @@ import logging
|
|
7 |
logging.basicConfig(level=logging.INFO)
|
8 |
logger = logging.getLogger(__name__)
|
9 |
|
|
|
|
|
|
|
10 |
app = FastAPI(title="LaMini-LM API",
|
11 |
description="API for text generation using LaMini-GPT-774M", version="1.0.0")
|
12 |
|
|
|
7 |
logging.basicConfig(level=logging.INFO)
|
8 |
logger = logging.getLogger(__name__)
|
9 |
|
10 |
+
# Log cache directory
|
11 |
+
logger.info(f"TRANSFORMERS_CACHE set to: {os.getenv('TRANSFORMERS_CACHE', '/.cache')}")
|
12 |
+
|
13 |
app = FastAPI(title="LaMini-LM API",
|
14 |
description="API for text generation using LaMini-GPT-774M", version="1.0.0")
|
15 |
|
requirements.txt
CHANGED
@@ -1,6 +1,4 @@
|
|
1 |
-
fastapi==0.115.
|
2 |
-
uvicorn==0.
|
3 |
-
transformers==4.
|
4 |
torch==2.4.1
|
5 |
-
python-multipart==0.0.9
|
6 |
-
|
|
|
1 |
+
fastapi==0.115.2
|
2 |
+
uvicorn==0.32.0
|
3 |
+
transformers==4.46.0
|
4 |
torch==2.4.1
|
|
|
|