This repository has been archived by the owner on Dec 16, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2.3k
/
setup.py
107 lines (97 loc) · 3.59 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
from collections import defaultdict
from setuptools import find_packages, setup
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
def parse_requirements_file(path, allowed_extras: set = None, include_all_extra: bool = True):
requirements = []
extras = defaultdict(list)
with open(path) as requirements_file:
import re
def fix_url_dependencies(req: str) -> str:
"""Pip and setuptools disagree about how URL dependencies should be handled."""
m = re.match(
r"^(git\+)?(https|ssh)://(git@)?github\.com/([\w-]+)/(?P<name>[\w-]+)\.git", req
)
if m is None:
return req
else:
return f"{m.group('name')} @ {req}"
for line in requirements_file:
line = line.strip()
if line.startswith("#") or len(line) <= 0:
continue
req, *needed_by = line.split("# needed by:")
req = fix_url_dependencies(req.strip())
if needed_by:
for extra in needed_by[0].strip().split(","):
extra = extra.strip()
if allowed_extras is not None and extra not in allowed_extras:
raise ValueError(f"invalid extra '{extra}' in {path}")
extras[extra].append(req)
if include_all_extra and req not in extras["all"]:
extras["all"].append(req)
else:
requirements.append(req)
return requirements, extras
integrations = {"checklist"}
# Load requirements.
install_requirements, extras = parse_requirements_file(
"requirements.txt", allowed_extras=integrations
)
dev_requirements, dev_extras = parse_requirements_file(
"dev-requirements.txt", allowed_extras={"examples"}, include_all_extra=False
)
extras["dev"] = dev_requirements
extras.update(dev_extras)
# version.py defines the VERSION and VERSION_SHORT variables.
# We use exec here so we don't import allennlp whilst setting up.
VERSION = {} # type: ignore
with open("allennlp/version.py", "r") as version_file:
exec(version_file.read(), VERSION)
setup(
name="allennlp",
version=VERSION["VERSION"],
description="An open-source NLP research library, built on PyTorch.",
long_description=open("README.md", encoding="utf-8").read(),
long_description_content_type="text/markdown",
classifiers=[
"Intended Audience :: Science/Research",
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords="allennlp NLP deep learning machine reading",
url="https://github.com/allenai/allennlp",
author="Allen Institute for Artificial Intelligence",
author_email="[email protected]",
license="Apache",
packages=find_packages(
exclude=[
"*.tests",
"*.tests.*",
"tests.*",
"tests",
"test_fixtures",
"test_fixtures.*",
"benchmarks",
"benchmarks.*",
]
),
install_requires=install_requirements,
extras_require=extras,
entry_points={"console_scripts": ["allennlp=allennlp.__main__:run"]},
include_package_data=True,
python_requires=">=3.7.1",
zip_safe=False,
)