forked from allenai/allennlp
-
Notifications
You must be signed in to change notification settings - Fork 3
/
requirements.txt
137 lines (94 loc) · 3.2 KB
/
requirements.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
# Library dependencies for the python code. You need to install these with
# `pip install -r requirements.txt` before you can run this.
# NOTE: all essential packages must be placed under a section named 'ESSENTIAL ...'
# so that the script `./scripts/check_requirements_and_setup.py` can find them.
#### ESSENTIAL LIBRARIES FOR MAIN FUNCTIONALITY ####
# This installs Pytorch for CUDA 8 only. If you are using a newer version,
# please visit https://pytorch.org/ and install the relevant version.
# For now AllenNLP works with both PyTorch 1.0 and 0.4.1. Expect that in
# the future only >=1.0 will be supported.
torch>=0.4.1,<1.2
# Parameter parsing (but not on Windows).
jsonnet>=0.10.0 ; sys.platform != 'win32'
# Adds an @overrides decorator for better documentation and error checking when using subclasses.
overrides
# Used by some old code. We moved away from it because it's too slow, but some old code still
# imports this.
nltk
# Mainly used for the faster tokenizer.
spacy>=2.0.18,<2.2
# Used by span prediction models.
numpy
# Used for reading configuration info out of numpy-style docstrings.
numpydoc>=0.8.0
# Used in coreference resolution evaluation metrics.
scipy
scikit-learn
# Write logs for training visualisation with the Tensorboard application
# Install the Tensorboard application separately (part of tensorflow) to view them.
tensorboardX>=1.2
# Accessing files from S3 directly.
boto3
# REST interface for models
flask>=1.0.2
flask-cors>=3.0.7
gevent>=1.3.6
# Used by semantic parsing code to strip diacritics from unicode strings.
unidecode
# Used by semantic parsing code to parse SQL
parsimonious>=0.8.0
# Used by semantic parsing code to format and postprocess SQL
sqlparse>=0.2.4
# For text normalization
ftfy
word2number>=1.1
# To use the BERT model
pytorch-pretrained-bert>=0.6.0
# For caching processed data
jsonpickle
#### ESSENTIAL LIBRARIES USED IN SCRIPTS ####
# Plot graphs for learning rate finder
matplotlib>=2.2.3
# Used for downloading datasets over HTTP
requests>=2.18
# progress bars in data cleaning scripts
tqdm>=4.19
# In SQuAD eval script, we use this to see if we likely have some tokenization problem.
editdistance
# For pretrained model weights
h5py
# For timezone utilities
pytz>=2017.3
# Reads Universal Dependencies files.
conllu==1.3.1
#### ESSENTIAL TESTING-RELATED PACKAGES ####
# We'll use pytest to run our tests; this isn't really necessary to run the code, but it is to run
# the tests. With this here, you can run the tests with `py.test` from the base directory.
pytest
# Allows marking tests as flaky, to be rerun if they fail
flaky
# Required to mock out `requests` calls
responses>=0.7
#### TESTING-RELATED PACKAGES ####
# Checks style, syntax, and other useful errors.
pylint==1.9.4
# Static type checking
mypy==0.521
# Allows generation of coverage reports with pytest.
pytest-cov
# Allows codecov to generate coverage reports
coverage
codecov
# Required to run sanic tests
aiohttp
#### DOC-RELATED PACKAGES ####
# Builds our documentation.
sphinx>=2.1.1
# Watches the documentation directory and rebuilds on changes.
sphinx-autobuild
# doc theme
sphinx_rtd_theme
# Only used to convert our readme to reStructuredText on Pypi.
pypandoc
# Pypi uploads
twine>=1.11.0