forked from LazyAGI/LazyLLM
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pyproject.toml
140 lines (136 loc) · 3.57 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
[tool.poetry]
name = "lazyllm"
version = "0.2.3"
description = "A Low-code Development Tool For Building Multi-agent LLMs Applications."
authors = ["wangzhihong <[email protected]>"]
license = "Apache-2.0 license"
readme = "README.md"
include = [
"lazyllm/pyproject.toml",
]
[tool.poetry.dependencies]
python = "^3.10"
appdirs = "*"
loralib = "*"
toml = "*"
fastapi = ">=0.111.0"
loguru = ">=0.7.2"
pydantic = ">=2.5.0"
requests = ">=2.32.2"
uvicorn = "<0.29.0"
cloudpickle = ">=3.0.0"
flake8 = ">=7.0.0"
gradio = ">=3.48.0"
gradio-client = ">=0.6.1"
protobuf = ">=3.20.1"
setuptools = "<70.0.0"
docstring-parser = "^0.16"
json5 = "^0.9.25"
tiktoken = "^0.7.0"
spacy = "<=3.7.5"
chromadb = "^0.5.5"
bm25s = "^0.1.10"
pystemmer = "^2.2.0.1"
nltk = "^3.8.1"
jieba = ">=0.42.1"
pyjwt = ">=2.8.0"
sentence-transformers = "^3.0.1"
sentencepiece = "^0.2.0"
modelscope = "^1.17.1"
psycopg2-binary = "^2.9.9"
sqlalchemy = "^2.0.34"
psutil = "^6.0.0"
pypdf = "^5.0.0"
pytest = "^8.3.3"
numpy = "==1.26.4"
redis = { version = ">=5.0.4", optional = true }
huggingface-hub = { version = ">=0.23.1", optional = true }
pandas = { version = ">=2.2.2", optional = true }
rank-bm25 = { version = ">=0.2.2", optional = true }
redisvl = { version = ">=0.1.3", optional = true }
datasets = { version = ">=2.18.0", optional = true }
deepspeed = { version = ">=0.12.3", optional = true }
fire = { version = ">=0.6.0", optional = true }
peft = { version = ">=0.3.0", optional = true }
torch = { version = ">=2.1.2", optional = true }
transformers = { version = ">=4.41.1", optional = true }
collie-lm = { version = ">=1.0.7", optional = true }
faiss-cpu = { version = ">=1.8.0", optional = true }
google = { version = ">=3.0.0", optional = true }
scikit-learn = { version = ">=1.5.0", optional = true }
tensorboard = { version = ">=2.16.2", optional = true }
tensorboard-data-server = { version = ">=0.7.2", optional = true }
torchvision = { version = ">=0.16.2", optional = true }
vllm = {version = "==0.5.0", optional = true}
wandb = { version = ">=0.17.0", optional = true }
chattts = {version = "^0.1.1", optional = true}
funasr = {version = "^1.1.4", optional = true}
lmdeploy = {version = "^0.5.3", optional = true}
timm = {version = "^1.0.8", optional = true}
diffusers = {version = "^0.30.0", optional = true}
sortedcontainers = {version = "^2.4.0", optional = true}
flash-attn = {version = "<=2.6.2", optional = true}
lightllm = {version = "^0.0.1", optional = true}
lazyllm-llamafactory = {version = "^0.8.3.dev0", optional = true}
rotary-embedding-torch = {version = "^0.8.3", optional = true}
[tool.poetry.extras]
standard = [
"datasets",
"deepspeed",
"faiss-cpu",
"fire",
"google",
"pandas",
"peft",
"rank-bm25",
"scikit-learn",
"torch",
"torchvision",
"transformers",
"vllm",
"wandb",
"chattts",
"funasr",
"lmdeploy",
"timm",
"diffusers",
"lazyllm-llamafactory",
"rotary-embedding-torch"
]
full = [
"datasets",
"deepspeed",
"faiss-cpu",
"fire",
"google",
"pandas",
"peft",
"rank-bm25",
"scikit-learn",
"torch",
"torchvision",
"transformers",
"vllm",
"wandb",
"chattts",
"funasr",
"lmdeploy",
"timm",
"diffusers",
"redis",
"huggingface-hub",
"redisvl",
"collie-lm",
"tensorboard",
"tensorboard-data-server",
"sortedcontainers",
"flash-attn",
"lazyllm-llamafactory",
"rotary-embedding-torch",
"lightllm"
]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
lazyllm = "lazyllm.cli.main:main"