-
Notifications
You must be signed in to change notification settings - Fork 215
/
Cargo.toml
138 lines (115 loc) · 3.03 KB
/
Cargo.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
[package]
name = "rust-bert"
version = "0.23.0"
authors = ["Guillaume Becquin <guillaume.becquin@gmail.com>"]
edition = "2018"
description = "Ready-to-use NLP pipelines and language models"
repository = "https://github.com/guillaume-be/rust-bert"
documentation = "https://docs.rs/rust-bert"
license = "Apache-2.0"
readme = "README.md"
build = "build.rs"
keywords = [
"nlp",
"deep-learning",
"machine-learning",
"transformers",
"translation",
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "rust_bert"
path = "src/lib.rs"
crate-type = ["lib"]
[[bin]]
name = "convert-tensor"
path = "src/convert-tensor.rs"
doc = false
[[bench]]
name = "sst2_benchmark"
harness = false
[[bench]]
name = "squad_benchmark"
harness = false
[[bench]]
name = "summarization_benchmark"
harness = false
[[bench]]
name = "translation_benchmark"
harness = false
[[bench]]
name = "generation_benchmark"
harness = false
[[bench]]
name = "tensor_operations_benchmark"
harness = false
[[bench]]
name = "token_classification_benchmark"
harness = false
[profile.bench]
opt-level = 3
[features]
default = ["remote", "default-tls"]
doc-only = ["tch/doc-only"]
all-tests = []
remote = ["cached-path", "dirs", "lazy_static"]
download-libtorch = ["tch/download-libtorch"]
onnx = ["ort", "ndarray"]
rustls-tls = ["cached-path/rustls-tls"]
default-tls = ["cached-path/default-tls"]
hf-tokenizers = ["tokenizers"]
[package.metadata.docs.rs]
features = ["doc-only"]
[dependencies]
rust_tokenizers = "8.1.1"
tch = { version = "0.17.0", features = ["download-libtorch"] }
serde_json = "1"
serde = { version = "1", features = ["derive"] }
ordered-float = "4.2.0"
uuid = { version = "1", features = ["v4"] }
thiserror = "1"
half = "2"
regex = "1.10"
cached-path = { version = "0.6", default-features = false, optional = true }
dirs = { version = "5", optional = true }
lazy_static = { version = "1", optional = true }
ort = { version = "1.16.3", optional = true, default-features = false, features = [
"half",
] }
ndarray = { version = "0.15", optional = true }
tokenizers = { version = "0.20", optional = true, default-features = false, features = [
"onig",
] }
[dev-dependencies]
anyhow = "1"
csv = "1"
criterion = "0.5"
tokio = { version = "1.35", features = ["sync", "rt-multi-thread", "macros"] }
tempfile = "3"
itertools = "0.13.0"
tracing-subscriber = { version = "0.3", default-features = false, features = [
"env-filter",
"fmt",
] }
ort = { version = "1.16.3", features = ["load-dynamic"] }
[[example]]
name = "onnx-masked-lm"
required-features = ["onnx"]
[[example]]
name = "onnx-question-answering"
required-features = ["onnx"]
[[example]]
name = "onnx-sequence-classification"
required-features = ["onnx"]
[[example]]
name = "onnx-text-generation"
required-features = ["onnx"]
[[example]]
name = "onnx-token-classification"
required-features = ["onnx"]
[[example]]
name = "onnx-translation"
required-features = ["onnx"]
[[example]]
name = "generation_gpt2_hf_tokenizers"
required-features = ["hf-tokenizers"]