Skip to content

nv-v100-legacy

nv-v100-legacy #349

name: nv-v100-legacy
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
pull_request:
paths:
- 'mii/__init__.py'
- 'mii/legacy/**'
- 'tests/legacy/**'
- '.github/workflows/nv-v100-legacy.yml'
- 'requirements/**'
- 'setup.py'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
unit-tests:
runs-on: [self-hosted, nvidia, cu117, v100]
steps:
- uses: actions/checkout@v4
- id: setup-venv
uses: ./.github/workflows/setup-venv
- name: Install pytorch
run: |
pip3 install -U --cache-dir /blob/torch_cache/ torch --index-url https://download.pytorch.org/whl/cu118
python -c "import torch; print('torch:', torch.__version__, torch)"
python -c "import torch; print('CUDA available:', torch.cuda.is_available())"
- name: Install dependencies
run: |
pip install git+https://github.com/microsoft/DeepSpeed.git
pip install git+https://github.com/huggingface/[email protected]
pip install -U accelerate
ds_report
- name: Python environment
run: |
pip list
- name: Install MII
run: |
pip install .[dev]
- name: Unit tests
run: |
unset TORCH_CUDA_ARCH_LIST # only jit compile for current arch
if [[ -d ./torch-extensions ]]; then rm -rf ./torch-extensions; fi
cd tests/legacy
TRANSFORMERS_CACHE=/blob/transformers_cache/ TORCH_EXTENSIONS_DIR=./torch-extensions pytest --color=yes --durations=0 --verbose ./