Skip to content

(feat): io submodule #2890

(feat): io submodule

(feat): io submodule #2890

Workflow file for this run

name: AWS GPU
on:
push:
branches: [main, "[0-9]+.[0-9]+.x"]
pull_request:
types:
- labeled
- opened
- synchronize
# Cancel the job if new commits are pushed
# https://stackoverflow.com/questions/66335225/how-to-cancel-previous-runs-in-the-pr-when-you-push-new-commitsupdate-the-curre
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
# There are two jobs:
# 1. `check` determines if the second job (`test`) will be run (through a job dependency).
# 2. `test` runs on an AWS runner and executes the GPU tests.
jobs:
# If the `skip-gpu-ci` label is set, this job is skipped, and consequently the `test` job too.
# If the `run-gpu-ci` label is set or we reacted to a `push` event, this job succeeds (and `test` is run).
# If neither is set, this job fails, `test` is skipped, and the whole workflow fails.
check:
name: "Triage: Check if GPU tests are allowed to run"
if: (!contains(github.event.pull_request.labels.*.name, 'skip-gpu-ci'))
runs-on: ubuntu-latest
steps:
- uses: flying-sheep/check@v1
with:
success: ${{ github.event_name == 'push' || contains(github.event.pull_request.labels.*.name, 'run-gpu-ci') }}
# If `check` wasn’t skipped or failed, start an AWS runner and run the GPU tests on it.
test:
name: GPU Tests
needs: check
runs-on: "cirun-aws-gpu--${{ github.run_id }}"
# Setting a timeout of 30 minutes, as the AWS costs money
# At time of writing, a typical run takes about 5 minutes
timeout-minutes: 30
defaults:
run:
shell: bash -el {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Nvidia SMI sanity check
run: nvidia-smi
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install UV
uses: hynek/setup-cached-uv@v2
with:
cache-dependency-path: pyproject.toml
- name: Install AnnData
run: uv pip install --system -e ".[dev,test,cu12]" llvmlite>=0.43
- name: Env list
run: pip list
- name: Run test
run: pytest -m gpu --cov --cov-report=xml --cov-context=test
- uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
verbose: true