forked from allenai/allennlp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
requirements.txt
128 lines (88 loc) · 2.88 KB
/
requirements.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# Library dependencies for the python code. You need to install these with
# `pip install -r requirements.txt` before you can run this.
#### ESSENTIAL LIBRARIES FOR MAIN FUNCTIONALITY ####
# This installs Pytorch for CUDA 8 only. If you are using a newer version,
# please visit http://pytorch.org/ and install the relevant version.
torch>=0.4.0,<0.5.0
# Parameter parsing (but not on Windows).
jsonnet==0.10.0 ; sys.platform != 'win32'
# Adds an @overrides decorator for better documentation and error checking when using subclasses.
overrides
# Used by some old code. We moved away from it because it's too slow, but some old code still
# imports this.
nltk
# Mainly used for the faster tokenizer.
spacy>=2.0,<2.1
# Used by span prediction models.
numpy
# Used for reading configuration info out of numpy-style docstrings.
numpydoc==0.8.0
# Used in coreference resolution evaluation metrics.
scipy
scikit-learn
# Write logs for training visualisation with the Tensorboard application
# Install the Tensorboard application separately (part of tensorflow) to view them.
tensorboardX==1.2
# Required by torch.utils.ffi
cffi==1.11.2
# aws commandline tools for running on Docker remotely.
# second requirement is to get botocore < 1.11, to avoid the below bug
awscli>=1.11.91
# Accessing files from S3 directly.
boto3
# REST interface for models
flask==0.12.4
flask-cors==3.0.3
gevent==1.3.5
# Used by semantic parsing code to strip diacritics from unicode strings.
unidecode
# Used by semantic parsing code to parse SQL
parsimonious==0.8.0
#### LIBRARIES USED IN SCRIPTS ####
# Used for downloading datasets over HTTP
requests>=2.18
# progress bars in data cleaning scripts
tqdm>=4.19
# In SQuAD eval script, we use this to see if we likely have some tokenization problem.
editdistance
# For pretrained model weights
h5py
# For timezone utilities
pytz==2017.3
#### ESSENTIAL TESTING-RELATED PACKAGES ####
# We'll use pytest to run our tests; this isn't really necessary to run the code, but it is to run
# the tests. With this here, you can run the tests with `py.test` from the base directory.
pytest
# Allows marking tests as flaky, to be rerun if they fail
flaky
# Required to mock out `requests` calls
responses>=0.7
# Reads Universal Dependencies files.
conllu==0.11
#### TESTING-RELATED PACKAGES ####
# Checks style, syntax, and other useful errors.
pylint==1.8.1
# Tutorial notebooks
jupyter
# Static type checking
mypy==0.521
# Allows generation of coverage reports with pytest.
pytest-cov
# Allows codecov to generate coverage reports
coverage
codecov
# Required to run sanic tests
aiohttp
# For mocking s3.
moto==1.3.4
#### DOC-RELATED PACKAGES ####
# Builds our documentation.
sphinx==1.5.3
# Watches the documentation directory and rebuilds on changes.
sphinx-autobuild
# doc theme
sphinx_rtd_theme
# Only used to convert our readme to reStructuredText on Pypi.
pypandoc
# Pypi uploads
twine==1.11.0