-
Notifications
You must be signed in to change notification settings - Fork 1
/
pyproject.toml
77 lines (69 loc) · 2.2 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
[tool.poetry]
name = "llm-evaluate"
version = "0.0.1"
description = ""
authors = ["Harsh Saini <[email protected]>"]
readme = "README.md"
[tool.poetry.dependencies]
python = ">=3.10,<3.12"
transformers = ">=4.41.0,<5.0.0"
evaluate = {extras = ["template"], version = ">=0.4.0,<1.0.0"}
yarl = ">=1.9.2,<2.0.0"
scikit-learn = ">=1.2.2,<2.0.0"
typer = ">=0.12.5,<1.0.0"
pyyaml = ">=6.0.1,<7.0.0"
nltk = ">=3.8.1,<4.0.0"
rouge-score = ">=0.1.2,<1.0.0"
accelerate = ">=0.25.0,<1.0.0"
openai = ">=1.0.0,<2.0.0"
tiktoken = ">=0.3.3,<1.0.0"
backoff = ">=2.2.1,<3.0.0"
datasets = ">=2.12.0,<3.0.0"
sentencepiece = ">=0.2.0,<1.0.0"
peft = ">=0.6.2,<1.0.0"
safetensors = ">=0.4.1,<1.0.0"
google-cloud-aiplatform = ">=1.26.0,<2.0.0"
pandas = ">=1.5.0,<3.0.0"
numpy = ">=1.24.0,<2.0.0"
fuzzywuzzy = ">=0.18.0,<1.0.0"
hydra-core = ">=1.3.2,<2.0.0"
pynvml = ">=11.5.0,<12.0.0"
llama-cpp-python = ">=0.2.79,<0.3.0"
auto-gptq = [
{ platform = "linux", version = ">=0.6.0,<1.0.0" }
]
autoawq = [
{ platform = "linux", version = ">=0.1.7,<1.0.0" }
]
bitsandbytes = [
{ platform = "linux", version = ">=0.41.3,<1.0.0" }
]
optimum = ">=1.13.0,<2.0.0"
flash-attn = [
{platform = "linux", url = "https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl", python = ">=3.10,<3.11"},
{platform = "linux", url = "https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl", python = ">=3.11,<3.12"}
]
torch = ">=2.2.0,<2.3.0"
anthropic = {extras = ["vertex"], version = ">=0.26.0,<1.0.0"}
[tool.poetry.group.dev.dependencies]
pytest = ">=7.3.1,<8.0.0"
jupyterlab = ">=4.0.5,<5.0.0"
ipywidgets = ">=8.1.2,<9.0.0"
[tool.pytest.ini_options]
addopts = "-Werror"
[tool.poetry.scripts]
llm-evaluate = 'llm_evaluate.cli:main'
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[[tool.poetry.source]]
name = "pypi"
priority = "primary"
[[tool.poetry.source]]
name = "nvidia-pypi"
url = "https://pypi.nvidia.com"
priority = "supplemental"
[[tool.poetry.source]]
name = "torch-cpu"
url = "https://download.pytorch.org/whl/cpu"
priority = "supplemental"