Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Run some slow tests on the self-hosted runner (#5161)
Browse files Browse the repository at this point in the history
* make checklist test run faster

* mark some more tests as GPU

* move t5 download
  • Loading branch information
epwalsh authored Apr 27, 2021
1 parent 9091580 commit 12f5b0f
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 5 deletions.
8 changes: 7 additions & 1 deletion tests/commands/checklist_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from allennlp.commands import main
from allennlp.commands.checklist import CheckList
from allennlp.common.testing import AllenNlpTestCase
from allennlp.common.testing import AllenNlpTestCase, requires_gpu


class TestCheckList(AllenNlpTestCase):
Expand Down Expand Up @@ -39,6 +39,8 @@ def test_add_checklist_subparser(self):
assert args.output_file == "/dev/null"
assert args.cuda_device == 0

# Mark this as GPU so it runs on a self-hosted runner, which will be a lot faster.
@requires_gpu
def test_works_with_known_model(self):

sys.argv = [
Expand All @@ -48,6 +50,10 @@ def test_works_with_known_model(self):
str(self.task),
"--task-suite-args",
'{"positive": 1, "negative": 0}',
"--max-examples",
"1",
"--cuda-device",
"0",
]

main()
3 changes: 1 addition & 2 deletions tests/common/file_utils_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,9 +599,8 @@ def test_cached_download_no_user_or_org(self):
assert meta.resource == "hf://t5-small/config.json"

def test_snapshot_download_no_user_or_org(self):
path = cached_path("hf://t5-small", cache_dir=self.TEST_DIR)
path = cached_path("hf://t5-small")
assert os.path.isdir(path)
assert pathlib.Path(os.path.dirname(path)) == self.TEST_DIR
assert os.path.isfile(path + ".json")
meta = _Meta.from_path(path + ".json")
assert meta.resource == "hf://t5-small"
3 changes: 3 additions & 0 deletions tests/modules/transformer/t5_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
from transformers.models import t5 as hf_t5

from allennlp.modules.transformer.t5 import T5
from allennlp.common.testing import requires_gpu


# Mark this as GPU so it runs on a self-hosted runner, which will be a lot faster.
@requires_gpu
@pytest.mark.parametrize(
"pretrained_model_name",
[
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from allennlp.sanity_checks.task_checklists.sentiment_analysis_suite import SentimentAnalysisSuite
from allennlp.common.testing import AllenNlpTestCase
from allennlp.common.testing import AllenNlpTestCase, requires_gpu
from allennlp.models.archival import load_archive
from allennlp.predictors import Predictor

Expand All @@ -12,6 +12,8 @@ def setup_method(self):
)
self.predictor = Predictor.from_archive(archive)

# Mark this as GPU so it runs on a self-hosted runner, which will be a lot faster.
@requires_gpu
def test_run(self):
data = [
"This is really good",
Expand All @@ -22,4 +24,4 @@ def test_run(self):
"I have visited the place for 3 years; great food!",
]
suite = SentimentAnalysisSuite(add_default_tests=True, data=data)
suite.run(self.predictor, max_examples=10)
suite.run(self.predictor, max_examples=1)

0 comments on commit 12f5b0f

Please sign in to comment.