Skip to content

chore(dependencies): update dependency pillow to v10 - autoclosed #727

chore(dependencies): update dependency pillow to v10 - autoclosed

chore(dependencies): update dependency pillow to v10 - autoclosed #727

Workflow file for this run

name: Main workflow
on: [push, pull_request]
jobs:
pip-install:
name: pip install
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- run: python -m pip install -U pip
- run: pip install .
twine-check:
name: twine check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- run: pip install poetry twine
- run: poetry install
- run: poetry build
- run: twine check dist/pixelmatch*
black:
name: black
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- run: pip install poetry
- run: poetry install
- run: poetry run black --check .
pytest:
name: Test on Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v3
with:
lfs: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- run: pip install poetry
- run: poetry install
- run: poetry run pytest --mypy --cov=pixelmatch --cov-report xml --cov-report term-missing --benchmark-disable
- uses: codecov/[email protected]
bench:
name: Run Benchmark on Python 3.10
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
lfs: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- run: pip install poetry
- run: poetry install
- run: poetry run pytest --benchmark-json output.json
- name: Download previous benchmark data
uses: actions/cache@v3
with:
path: ./cache
key: ${{ runner.os }}-benchmark
- name: Store benchmark result
uses: rhysd/github-action-benchmark@v1
with:
tool: "pytest"
output-file-path: output.json
external-data-json-path: ./cache/benchmark-data.json
fail-on-alert: true