ginipick commited on
Commit
6c32331
·
verified ·
1 Parent(s): 024290d

Update requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +22 -9
requirements.txt CHANGED
@@ -1,15 +1,28 @@
1
  torch==2.2.0
2
  torchaudio==2.2.0 --index-url https://download.pytorch.org/whl/cu118
3
- omegaconf
4
- einops
5
  numpy<2
6
- transformers
7
- sentencepiece
8
- tqdm
9
- tensorboard
10
  descript-audiotools>=0.7.2
11
- descript-audio-codec
12
  scipy==1.10.1
13
  huggingface-hub==0.25.2
14
- wheel
15
- #https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu11torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  torch==2.2.0
2
  torchaudio==2.2.0 --index-url https://download.pytorch.org/whl/cu118
3
+ omegaconf>=2.3.0
4
+ einops>=0.6.1
5
  numpy<2
6
+ transformers>=4.31.0
7
+ sentencepiece>=0.1.99
8
+ tqdm>=4.65.0
9
+ tensorboard>=2.13.0
10
  descript-audiotools>=0.7.2
11
+ descript-audio-codec>=0.3.0
12
  scipy==1.10.1
13
  huggingface-hub==0.25.2
14
+ wheel>=0.40.0
15
+ gradio>=4.0.0
16
+ pandas>=2.0.0
17
+ librosa>=0.10.0
18
+ soundfile>=0.12.1
19
+ ffmpeg-python>=0.2.0
20
+ accelerate>=0.21.0
21
+ bitsandbytes>=0.41.0
22
+ safetensors>=0.3.1
23
+ tokenizers>=0.13.3
24
+ pyyaml>=6.0
25
+ regex>=2023.6.3
26
+ requests>=2.31.0
27
+ typing_extensions>=4.5.0
28
+ #https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.3/flash_attn-2.7.3+cu11torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whlhl