-
Notifications
You must be signed in to change notification settings - Fork 7
/
version_rec.text
34 lines (27 loc) · 1.27 KB
/
version_rec.text
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
DinoV2:
Should use train.py and this new versions of transformers support llama's flash attention.
dependencies = [
"torch==2.0.1", "torchvision==0.15.2",
"transformers==4.38.2", "tokenizers==0.15.2", "sentencepiece==0.1.99", "shortuuid",
"accelerate==0.27.2", "peft==0.4.0", "bitsandbytes==0.41.0",
"pydantic<2,>=1", "markdown2[all]", "numpy", "scikit-learn==1.2.2",
"gradio==3.35.2", "gradio_client==0.2.9",
"requests", "httpx==0.24.0", "uvicorn", "fastapi",
"einops==0.6.1", "einops-exts==0.0.4", "timm==0.6.13",
"diffusers==0.19.0", "xformers==0.0.22", "invisible-watermark==0.2.0"
]
[project.optional-dependencies]
train = ["deepspeed==0.12.6", "ninja"]
OpenCLIP:
dependencies = [
"torch==2.0.1", "torchvision==0.15.2",
"transformers==4.31.0", "tokenizers>=0.12.1,<0.14", "sentencepiece==0.1.99", "shortuuid",
"accelerate==0.21.0", "peft==0.4.0", "bitsandbytes==0.41.0",
"pydantic<2,>=1", "markdown2[all]", "numpy", "scikit-learn==1.2.2",
"gradio==3.35.2", "gradio_client==0.2.9",
"requests", "httpx==0.24.0", "uvicorn", "fastapi",
"einops==0.6.1", "einops-exts==0.0.4", "timm==0.6.13",
"diffusers==0.19.0", "xformers==0.0.22", "invisible-watermark==0.2.0"
]
[project.optional-dependencies]
train = ["deepspeed==0.9.5", "ninja"]