Skip to content

Commit 46eb863

Browse files
authored
Merge pull request #1 from EducationalTestingService/release/2.11.0
Release/2.11.0
2 parents 80fb606 + 34c53d0 commit 46eb863

File tree

2 files changed

+143
-2
lines changed

2 files changed

+143
-2
lines changed

.flake8

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@ max-line-length = 115
33

44
ignore =
55
# these rules don't play well with black
6-
E203 # whitespace before :
7-
W503 # line break before binary operator
6+
# whitespace before :
7+
E203,
8+
# line break before binary operator
9+
W503,
810

911
exclude =
1012
build/**

conda-recipe/allennlp/meta.yaml

Lines changed: 139 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
{% set tests_to_skip = "_not_a_real_test" %}
2+
{% set version = "2.11.0" %}
3+
4+
{% set spacy_model = "en_core_web_sm" %}
5+
6+
package:
7+
name: allennlp-split
8+
version: {{ version }}
9+
10+
source:
11+
path: ../../../allennlp
12+
13+
build:
14+
number: 0
15+
skip: true # [win or py<37]
16+
17+
requirements:
18+
build:
19+
- python # [build_platform != target_platform]
20+
- cross-python_{{ target_platform }} # [build_platform != target_platform]
21+
- {{ compiler('c') }}
22+
- sysroot_linux-64 ==2.17 # [linux64]
23+
24+
outputs:
25+
- name: allennlp
26+
build:
27+
script: python -m pip install . --no-deps -vv
28+
entry_points:
29+
- allennlp=allennlp.__main__:run
30+
requirements:
31+
build:
32+
- python # [build_platform != target_platform]
33+
- cross-python_{{ target_platform }} # [build_platform != target_platform]
34+
- {{ compiler('c') }}
35+
host:
36+
- python
37+
- pip
38+
run:
39+
- python
40+
# see https://github.com/allenai/allennlp/blame/v{{ version }}/setup.py
41+
# (as of 2.10.0, removed pytest & obsolete dataclasses compared
42+
# to upstream, plus all their pins to get working versions)
43+
- base58
44+
- cached_path >=1.1.3
45+
- dill
46+
- fairscale ==0.4.6
47+
- filelock >=3.3
48+
- h5py >=3.6
49+
- huggingface_hub >=0.0.16
50+
- jsonnet >=0.10.0 # [not win]
51+
- python-lmdb >=1.2.1
52+
- more-itertools >=8.12
53+
- nltk >=3.6.5
54+
- numpy
55+
- pytorch >=1.8.0
56+
- requests >=2.28
57+
- scikit-learn >=1.0.1
58+
- scipy >=1.7.3
59+
- sentencepiece >=0.1.96
60+
- spacy >=2.1.0
61+
- tensorboardx >=1.2
62+
- termcolor =1.1.*
63+
- torchvision >=0.8.1
64+
- tqdm >=4.62
65+
- transformers >=4.1
66+
- wandb >=0.10
67+
# transitive spacy-dep; make sure it's high enough to not be broken
68+
- typer >=0.4.1
69+
70+
# checklist-specific tests in separate output
71+
{% set tests_to_skip = "_not_a_real_test" %}
72+
# some issue with too much forking (but is flaky, doesn't always appear):
73+
# `The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...`
74+
{% set tests_to_skip = tests_to_skip + " or test_multiprocess_data_loader" %} # [osx]
75+
{% set tests_to_skip = tests_to_skip + " or test_with_multi_process_loading" %} # [osx]
76+
# fails on osx with `process 0 terminated with signal SIGSEGV` in torch.multiprocess
77+
{% set tests_to_skip = tests_to_skip + " or test_train_model_distributed_without_sharded_reader" %} # [osx]
78+
79+
test:
80+
requires:
81+
- flaky
82+
- git
83+
- git-lfs
84+
- matplotlib
85+
- pytest
86+
- responses >=0.12.1
87+
- spacy-model-{{ spacy_model }}
88+
source_files:
89+
- tests/
90+
- test_fixtures/
91+
# needed by one of the tests
92+
- README.md
93+
commands:
94+
- python -m spacy link {{ spacy_model }} {{ spacy_model }}
95+
# test that dummy command works even without checklist package
96+
- allennlp checklist
97+
- allennlp test-install
98+
- pytest tests/ -v -k "not ({{ tests_to_skip }})" --ignore-glob=*checklist*
99+
imports:
100+
- allennlp
101+
- allennlp.commands
102+
- allennlp.common
103+
- allennlp.common.testing
104+
- allennlp.data
105+
- allennlp.data.dataset_readers
106+
- allennlp.data.dataset_readers.dataset_utils
107+
- allennlp.data.fields
108+
- allennlp.data.samplers
109+
- allennlp.data.token_indexers
110+
- allennlp.data.tokenizers
111+
- allennlp.models
112+
- allennlp.modules
113+
- allennlp.modules.attention
114+
- allennlp.modules.matrix_attention
115+
- allennlp.modules.seq2seq_encoders
116+
- allennlp.modules.seq2vec_encoders
117+
- allennlp.modules.span_extractors
118+
- allennlp.modules.text_field_embedders
119+
- allennlp.modules.token_embedders
120+
- allennlp.nn
121+
- allennlp.nn.regularizers
122+
- allennlp.predictors
123+
- allennlp.tools
124+
- allennlp.training
125+
- allennlp.training.learning_rate_schedulers
126+
- allennlp.training.metrics
127+
- allennlp.training.momentum_schedulers
128+
129+
about:
130+
home: https://allennlp.org/
131+
license: Apache-2.0
132+
license_family: APACHE
133+
license_file: LICENSE
134+
summary: An open-source NLP research library, built on PyTorch.
135+
description: |
136+
An Apache 2.0 NLP research library, built on PyTorch, for developing state-of-the-art
137+
deep learning models on a wide variety of linguistic tasks.
138+
doc_url: https://allenai.github.io/allennlp-docs/
139+
dev_url: https://github.com/allenai/allennlp

0 commit comments

Comments
 (0)