-
Notifications
You must be signed in to change notification settings - Fork 112
/
setup.py
104 lines (96 loc) · 3.36 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import os
import re
import subprocess
from setuptools import find_namespace_packages, setup
# Ensure we match the version set in optimum/intel/version.py
try:
filepath = "optimum/intel/version.py"
with open(filepath) as version_file:
(__version__,) = re.findall('__version__ = "(.*)"', version_file.read())
if __version__.endswith(".dev0"):
dev_version_id = ""
try:
repo_root = os.path.dirname(os.path.realpath(__file__))
dev_version_id = (
subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], cwd=repo_root) # nosec
.strip()
.decode()
)
dev_version_id = "+" + dev_version_id
except subprocess.CalledProcessError:
pass
__version__ = __version__ + dev_version_id
except Exception as error:
assert False, "Error: Could not open '%s' due %s\n" % (filepath, error)
INSTALL_REQUIRE = [
"torch>=1.11",
"optimum~=1.23",
"transformers>=4.36,<4.47",
"datasets>=1.4.0",
"sentencepiece",
"setuptools",
"scipy",
"onnx",
]
TESTS_REQUIRE = [
"accelerate",
"pytest>=7.2.0,<8.0.0",
"parameterized",
"Pillow",
"evaluate",
"diffusers",
"py-cpuinfo",
"sacremoses",
"torchaudio",
"rjieba",
"timm",
"invisible-watermark>=0.2.0",
"transformers_stream_generator",
"einops",
"tiktoken",
"sentence-transformers",
"open_clip_torch>=2.26.1",
"peft",
]
QUALITY_REQUIRE = ["black~=23.1", "ruff==0.4.4"]
EXTRAS_REQUIRE = {
"nncf": ["nncf>=2.11.0"],
"openvino": ["nncf>=2.11.0", "openvino==2024.5.0", "openvino-tokenizers==2024.5.0"],
"neural-compressor": ["neural-compressor[pt]>3.0", "accelerate", "transformers<4.46"],
"ipex": ["intel-extension-for-pytorch", "transformers>=4.39,<4.45"],
"diffusers": ["diffusers"],
"quality": QUALITY_REQUIRE,
"tests": TESTS_REQUIRE,
}
setup(
name="optimum-intel",
version=__version__,
description="Optimum Library is an extension of the Hugging Face Transformers library, providing a framework to "
"integrate third-party libraries from Hardware Partners and interface with their specific "
"functionality.",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords="transformers, quantization, pruning, knowledge distillation, optimization, training",
url="https://www.intel.com",
author="HuggingFace Inc. Special Ops Team",
author_email="hardware@huggingface.co",
license="Apache",
packages=find_namespace_packages(include=["optimum*"]),
install_requires=INSTALL_REQUIRE,
extras_require=EXTRAS_REQUIRE,
include_package_data=True,
zip_safe=False,
entry_points={"console_scripts": ["optimum-cli=optimum.commands.optimum_cli:main"]},
)