manuel-l01 commited on
Commit
4da8ba1
Β·
1 Parent(s): 5949542

Initial commit

Browse files
Files changed (2) hide show
  1. Dockerfile +6 -12
  2. api.py +19 -8
Dockerfile CHANGED
@@ -1,31 +1,25 @@
1
  FROM python:3.9-slim
2
 
3
- # Create dedicated user with home directory
4
  RUN useradd -m -u 1000 user
5
 
6
- # Set Hugging Face cache to user's writable directory
7
  ENV HF_HOME=/home/user/.cache/huggingface
8
- ENV TRANSFORMERS_CACHE=/data/huggingface_cache
9
 
10
  # Create cache directory with proper permissions
11
  RUN mkdir -p ${HF_HOME} && chown -R user:user /home/user
12
 
13
- # Set working directory (app will live here)
14
  WORKDIR /app
15
 
16
- # Install dependencies as root
17
  COPY requirements.txt .
18
  RUN pip install --no-cache-dir -r requirements.txt gunicorn
19
 
20
- # Copy app files (maintain ownership)
21
  COPY --chown=user:user . .
22
 
23
- RUN rm -rf /root/.cache/pip
24
-
25
-
26
- # Switch to non-root user
27
  USER user
28
 
29
-
30
  EXPOSE 7860
31
- CMD ["gunicorn", "--workers", "1", "--timeout", "120", "--bind", "0.0.0.0:7860", "api:app"]
 
1
  FROM python:3.9-slim
2
 
3
+ # Create user with UID 1000
4
  RUN useradd -m -u 1000 user
5
 
6
+ # Use writable cache location in user's home directory
7
  ENV HF_HOME=/home/user/.cache/huggingface
8
+ ENV TRANSFORMERS_CACHE=/home/user/.cache/huggingface
9
 
10
  # Create cache directory with proper permissions
11
  RUN mkdir -p ${HF_HOME} && chown -R user:user /home/user
12
 
 
13
  WORKDIR /app
14
 
15
+ # Install dependencies
16
  COPY requirements.txt .
17
  RUN pip install --no-cache-dir -r requirements.txt gunicorn
18
 
19
+ # Copy application files
20
  COPY --chown=user:user . .
21
 
 
 
 
 
22
  USER user
23
 
 
24
  EXPOSE 7860
25
+ CMD ["gunicorn", "--workers", "1", "--timeout", "300", "--bind", "0.0.0.0:7860", "api:app"]
api.py CHANGED
@@ -50,14 +50,25 @@ def midi_to_musicxml(midi_file_path):
50
  raise
51
 
52
  def load_model():
53
- global MODEL
54
- with MODEL_LOCK:
55
- if MODEL is None:
56
- print("⏳ Loading music generation model...")
57
- MODEL = AutoModelForCausalLM.from_pretrained('stanford-crfm/music-small-800k',local_files_only=False, force_download=False) # Prevent re-downloads
58
- # Add .cuda() here if using GPU
59
- print("βœ… Model loaded successfully!")
60
- return MODEL
 
 
 
 
 
 
 
 
 
 
 
61
 
62
  # Model loading setup
63
  MODEL = None
 
50
  raise
51
 
52
  def load_model():
53
+ cache_dir = os.environ.get('HF_HOME', '/home/user/.cache/huggingface')
54
+ print(f"Using cache directory: {cache_dir}")
55
+
56
+ # Verify permissions
57
+ try:
58
+ test_file = os.path.join(cache_dir, "test.txt")
59
+ with open(test_file, "w") as f:
60
+ f.write("test")
61
+ print("βœ… Cache directory is writable")
62
+ except Exception as e:
63
+ print(f"❌ Cache directory not writable: {e}")
64
+
65
+ # Load model
66
+ return AutoModelForCausalLM.from_pretrained(
67
+ 'stanford-crfm/music-small-800k',
68
+ cache_dir=cache_dir,
69
+ local_files_only=False,
70
+ force_download=False
71
+ )
72
 
73
  # Model loading setup
74
  MODEL = None