From cab8b81348d384704978cc9117457c642ba7c9d5 Mon Sep 17 00:00:00 2001 From: Lysandre Debut Date: Mon, 11 Apr 2022 14:59:47 -0300 Subject: [PATCH] Run the scheduled tests --- .github/workflows/self-scheduled.yml | 35 +++++++++++++++---- setup.py | 5 ++- src/transformers/dependency_versions_table.py | 2 +- 3 files changed, 31 insertions(+), 11 deletions(-) diff --git a/.github/workflows/self-scheduled.yml b/.github/workflows/self-scheduled.yml index 491169be808d4e..4550cfe5759d71 100644 --- a/.github/workflows/self-scheduled.yml +++ b/.github/workflows/self-scheduled.yml @@ -1,6 +1,9 @@ name: Self-hosted runner (scheduled) on: + push: + branches: + - post-action-build-test-tokenizers-* repository_dispatch: schedule: - cron: "0 2 * * *" @@ -23,8 +26,11 @@ jobs: machines: [multi-gpu-docker, single-gpu-docker] runs-on: ${{ matrix.machines }} container: - image: huggingface/transformers-all-latest-gpu + image: huggingface/internal-transformers-all-latest-gpu-tokenizers-main options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: @@ -65,8 +71,11 @@ jobs: machines: [multi-gpu-docker, single-gpu-docker] runs-on: ${{ matrix.machines }} container: - image: huggingface/transformers-all-latest-gpu + image: huggingface/internal-transformers-all-latest-gpu-tokenizers-main options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} needs: setup steps: - name: Echo folder ${{ matrix.folders }} @@ -96,8 +105,11 @@ jobs: name: Examples directory runs-on: [self-hosted, single-gpu-docker] container: - image: huggingface/transformers-all-latest-gpu + image: huggingface/internal-transformers-all-latest-gpu-tokenizers-main options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} needs: setup steps: - name: Update clone @@ -130,8 +142,11 @@ jobs: machines: [multi-gpu-docker, single-gpu-docker] runs-on: ${{ matrix.machines }} container: - image: huggingface/transformers-pytorch-gpu + image: huggingface/internal-transformers-pytorch-gpu-tokenizers-main options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} needs: setup steps: - name: Update clone @@ -165,8 +180,11 @@ jobs: machines: [multi-gpu-docker, single-gpu-docker] runs-on: ${{ matrix.machines }} container: - image: huggingface/transformers-tensorflow-gpu + image: huggingface/internal-transformers-tensorflow-gpu-tokenizers-main options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} needs: setup steps: - name: Update clone @@ -202,8 +220,11 @@ jobs: runs-on: ${{ matrix.machines }} needs: setup container: - image: huggingface/transformers-pytorch-deepspeed-latest-gpu + image: huggingface/internal-transformers-pytorch-deepspeed-latest-gpu-tokenizers-main options: --gpus all --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ + credentials: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} steps: - name: Update clone working-directory: /workspace/transformers @@ -247,7 +268,7 @@ jobs: env: CI_SLACK_BOT_TOKEN: ${{ secrets.CI_SLACK_BOT_TOKEN }} CI_SLACK_CHANNEL_ID: ${{ secrets.CI_SLACK_CHANNEL_ID }} - CI_SLACK_CHANNEL_ID_DAILY: ${{ secrets.CI_SLACK_CHANNEL_ID_DAILY }} + CI_SLACK_CHANNEL_ID_DAILY: ${{ secrets.CI_SLACK_CHANNEL_DUMMY_TESTS }} CI_SLACK_CHANNEL_DUMMY_TESTS: ${{ secrets.CI_SLACK_CHANNEL_DUMMY_TESTS }} run: | pip install slack_sdk diff --git a/setup.py b/setup.py index 4d386ae00825c0..ac459832449acc 100644 --- a/setup.py +++ b/setup.py @@ -151,7 +151,7 @@ "tf2onnx", "timeout-decorator", "timm", - "tokenizers>=0.11.1,!=0.11.3,<0.13", + "tokenizers", "torch>=1.0", "torchaudio", "pyctcdecode>=0.3.0", @@ -315,8 +315,7 @@ def run(self): extras['testing'] + extras['torch'] + extras["sentencepiece"] - + extras["tokenizers"] - + extras["torch-speech"] + + extras["tokenizers"] + extras["torch-speech"] + extras["vision"] + extras["integrations"] + extras["timm"] diff --git a/src/transformers/dependency_versions_table.py b/src/transformers/dependency_versions_table.py index 334103c20a560d..011b59d94f0cae 100644 --- a/src/transformers/dependency_versions_table.py +++ b/src/transformers/dependency_versions_table.py @@ -61,7 +61,7 @@ "tf2onnx": "tf2onnx", "timeout-decorator": "timeout-decorator", "timm": "timm", - "tokenizers": "tokenizers>=0.11.1,!=0.11.3,<0.13", + "tokenizers": "tokenizers", "torch": "torch>=1.0", "torchaudio": "torchaudio", "pyctcdecode": "pyctcdecode>=0.3.0",