Skip to content

[pre-commit.ci] pre-commit suggestions #1180

[pre-commit.ci] pre-commit suggestions

[pre-commit.ci] pre-commit suggestions #1180

Workflow file for this run

name: UnitTests
on:
push:
branches: [main, "release/*"]
pull_request:
branches: [main, "release/*"]
defaults:
run:
shell: bash
jobs:
pytester:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: ["ubuntu-22.04", "macos-13", "windows-2022"]
python-version: ["3.8", "3.10", "3.12"]
requires: ["oldest", "latest"]
exclude:
- { requires: "oldest", python-version: "3.12" }
timeout-minutes: 35
steps:
- name: Checkout 🛎
uses: actions/checkout@v4
with:
submodules: recursive
- name: Set up Python 🐍 ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Set oldest dependencies
# todo: this is strange to use itself :/
if: matrix.requires == 'oldest'
timeout-minutes: 20
run: |
pip install -e '.[cli]'
python -m lightning_utilities.cli requirements set-oldest
- name: Complex 💽 caching
uses: ./.github/actions/cache
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
timeout-minutes: 20
run: |
pip install -e . -U -r requirements/_tests.txt \
-f https://download.pytorch.org/whl/cpu/torch_stable.html
pip --version
pip list
- name: Print 🖨️ dependencies
uses: ./.github/actions/pip-list
- name: Unittest and coverage
uses: ./.github/actions/unittesting
with:
python-version: ${{ matrix.python-version }}
dirs: "unittests"
pkg-name: "lightning_utilities"
pytest-args: "--timeout=120"
- name: Upload coverage to Codecov
uses: codecov/[email protected]
continue-on-error: true
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage.xml
flags: unittests
env_vars: OS,PYTHON
name: codecov-umbrella
fail_ci_if_error: false
- name: test Scripts
working-directory: ./tests
run: python -m pytest scripts --durations=50 --timeout=120
testing-guardian:
runs-on: ubuntu-latest
needs: pytester
if: always()
steps:
- run: echo "${{ needs.pytester.result }}"
- name: failing...
if: needs.pytester.result == 'failure'
run: exit 1
- name: cancelled or skipped...
if: contains(fromJSON('["cancelled", "skipped"]'), needs.pytester.result)
timeout-minutes: 1
run: sleep 90