caohy666 commited on
Commit
d6a1373
·
1 Parent(s): 90790ea

<fix> using whl for installing flash-attn.

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. app.sh +0 -9
  3. requirements.txt +1 -1
README.md CHANGED
@@ -5,7 +5,7 @@ colorFrom: green
5
  colorTo: gray
6
  sdk: gradio
7
  sdk_version: 5.33.2
8
- app_file: app.sh
9
  pinned: false
10
  license: apache-2.0
11
  short_description: huggingface space for DRA-Ctrl.
 
5
  colorTo: gray
6
  sdk: gradio
7
  sdk_version: 5.33.2
8
+ app_file: app.py
9
  pinned: false
10
  license: apache-2.0
11
  short_description: huggingface space for DRA-Ctrl.
app.sh DELETED
@@ -1,9 +0,0 @@
1
- #!/bin/bash
2
- set -e
3
-
4
- apt-get update && apt-get install -y git build-essential python3-dev ninja-build
5
-
6
- pip install -r requirements.txt
7
- pip install flash-attn==2.7.3 --no-build-isolation
8
-
9
- python app.py
 
 
 
 
 
 
 
 
 
 
requirements.txt CHANGED
@@ -2,7 +2,7 @@ torch==2.5.1
2
  torchvision==0.20.1
3
  diffusers==0.33.1
4
  transformers==4.45.0
5
- # flash-attn==2.7.3
6
  gradio
7
  omegaconf
8
  peft
 
2
  torchvision==0.20.1
3
  diffusers==0.33.1
4
  transformers==4.45.0
5
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu12torch2.5cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
6
  gradio
7
  omegaconf
8
  peft