forked from explosion/spacy-transformers
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.cfg
116 lines (105 loc) · 3.12 KB
/
setup.cfg
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
[metadata]
version = 1.1.7
description = spaCy pipelines for pre-trained BERT and other transformers
url = https://spacy.io
author = Explosion
author_email = [email protected]
license = MIT
long_description = file: README.md
long_description_content_type = text/markdown
classifiers =
Development Status :: 5 - Production/Stable
Environment :: Console
Intended Audience :: Developers
Intended Audience :: Science/Research
Topic :: Scientific/Engineering
Topic :: Scientific/Engineering :: Artificial Intelligence
License :: OSI Approved :: MIT License
Operating System :: POSIX :: Linux
Operating System :: MacOS :: MacOS X
Operating System :: Microsoft :: Windows
Programming Language :: Python :: 3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
[options]
zip_safe = false
include_package_data = true
python_requires = >=3.6
install_requires =
spacy>=3.1.3,<4.0.0
transformers>=3.4.0,<4.21.0
torch>=1.6.0
srsly>=2.4.0,<3.0.0
dataclasses>=0.6,<1.0; python_version < "3.7"
spacy-alignments>=0.7.2,<1.0.0
setup_requires =
setuptools
[options.extras_require]
cuda =
cupy>=5.0.0b4
cuda80 =
cupy-cuda80>=5.0.0b4
cuda90 =
cupy-cuda90>=5.0.0b4
cuda91 =
cupy-cuda91>=5.0.0b4
cuda92 =
cupy-cuda92>=5.0.0b4
cuda100 =
cupy-cuda100>=5.0.0b4
cuda101 =
cupy-cuda101>=5.0.0b4
cuda102 =
cupy-cuda102>=5.0.0b4
cuda110 =
cupy-cuda110>=5.0.0b4
cuda111 =
cupy-cuda111>=5.0.0b4
cuda112 =
cupy-cuda112>=5.0.0b4
[options.entry_points]
spacy_factories =
transformer = spacy_transformers.pipeline_component:make_transformer
spacy_architectures =
spacy-transformers.TransformerListener.v1 = spacy_transformers:architectures.transformer_listener_tok2vec_v1
spacy-transformers.Tok2VecTransformer.v1 = spacy_transformers:architectures.transformer_tok2vec_v1
spacy-transformers.Tok2VecTransformer.v2 = spacy_transformers:architectures.transformer_tok2vec_v2
spacy-transformers.Tok2VecTransformer.v3 = spacy_transformers:architectures.transformer_tok2vec_v3
spacy-transformers.TransformerModel.v1 = spacy_transformers:architectures.create_TransformerModel_v1
spacy-transformers.TransformerModel.v2 = spacy_transformers:architectures.create_TransformerModel_v2
spacy-transformers.TransformerModel.v3 = spacy_transformers:architectures.create_TransformerModel_v3
[bdist_wheel]
universal = true
[sdist]
formats = gztar
[flake8]
ignore = E203, E266, E501, E731, W503
max-line-length = 80
select = B,C,E,F,W,T4,B9
exclude =
.env,
.git,
__pycache__,
[mypy]
ignore_missing_imports = True
no_implicit_optional = True
plugins = pydantic.mypy, thinc.mypy
[coverage:run]
[coverage:report]
omit =
**/tests/*
**/_vendorized/*
**/about.py
exclude_lines =
pragma: no cover
# Don't complain about missing debug-only code:
def __unicode__
def __repr__
if self\.debug
# Don't complain if tests don't hit defensive assertion code:
raise AssertionError
raise NotImplementedError
# Don't complain if non-runnable code isn't run:
if 0:
if __name__ == .__main__.:
show_missing = True