Implement heapq for cookie expire times #5969
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
merge_group: | |
push: | |
branches: | |
- 'master' | |
- '[0-9].[0-9]+' # matches to backport branches, e.g. 3.6 | |
tags: [ 'v*' ] | |
pull_request: | |
branches: | |
- 'master' | |
- '[0-9].[0-9]+' | |
schedule: | |
- cron: '0 6 * * *' # Daily 6AM UTC build | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} | |
cancel-in-progress: true | |
env: | |
COLOR: yes | |
FORCE_COLOR: 1 # Request colored output from CLI tools supporting it | |
MYPY_FORCE_COLOR: 1 | |
PY_COLORS: 1 | |
permissions: {} | |
jobs: | |
lint: | |
permissions: | |
contents: read # to fetch code (actions/checkout) | |
name: Linter | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: >- | |
Verify that `requirements/runtime-deps.in` | |
is in sync with `setup.cfg` | |
run: | | |
set -eEuo pipefail | |
make sync-direct-runtime-deps | |
git diff --exit-code -- requirements/runtime-deps.in | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.11 | |
- name: Cache PyPI | |
uses: actions/[email protected] | |
with: | |
key: pip-lint-${{ hashFiles('requirements/*.txt') }} | |
path: ~/.cache/pip | |
restore-keys: | | |
pip-lint- | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install dependencies | |
run: | | |
python -m pip install -r requirements/lint.in -c requirements/lint.txt | |
- name: Install self | |
run: | | |
python -m pip install . -c requirements/runtime-deps.txt | |
env: | |
AIOHTTP_NO_EXTENSIONS: 1 | |
- name: Run mypy | |
run: | | |
make mypy | |
- name: Run slotscheck | |
run: | | |
# Some extra requirements are needed to ensure all modules | |
# can be scanned by slotscheck. | |
pip install -r requirements/base.in -c requirements/base.txt | |
slotscheck -v -m aiohttp | |
- name: Install libenchant | |
run: | | |
sudo apt install libenchant-2-dev | |
- name: Install spell checker | |
run: | | |
pip install -r requirements/doc-spelling.in -c requirements/doc-spelling.txt | |
- name: Run docs spelling | |
run: | | |
# towncrier --yes # uncomment me after publishing a release | |
make doc-spelling | |
- name: Build package | |
run: | | |
python -m build | |
env: | |
AIOHTTP_NO_EXTENSIONS: 1 | |
- name: Run twine checker | |
run: | | |
twine check --strict dist/* | |
- name: Making sure that CONTRIBUTORS.txt remains sorted | |
run: | | |
LC_ALL=C sort --check --ignore-case CONTRIBUTORS.txt | |
gen_llhttp: | |
permissions: | |
contents: read # to fetch code (actions/checkout) | |
name: Generate llhttp sources | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Cache llhttp generated files | |
uses: actions/[email protected] | |
id: cache | |
with: | |
key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} | |
path: vendor/llhttp/build | |
- name: Setup NodeJS | |
if: steps.cache.outputs.cache-hit != 'true' | |
uses: actions/setup-node@v4 | |
with: | |
node-version: 18 | |
- name: Generate llhttp sources | |
if: steps.cache.outputs.cache-hit != 'true' | |
run: | | |
make generate-llhttp | |
- name: Upload llhttp generated files | |
uses: actions/upload-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build | |
if-no-files-found: error | |
test: | |
permissions: | |
contents: read # to fetch code (actions/checkout) | |
name: Test | |
needs: gen_llhttp | |
strategy: | |
matrix: | |
pyver: [3.9, '3.10', '3.11', '3.12'] | |
no-extensions: ['', 'Y'] | |
os: [ubuntu, macos, windows] | |
experimental: [false] | |
exclude: | |
- os: macos | |
no-extensions: 'Y' | |
- os: windows | |
no-extensions: 'Y' | |
include: | |
- pyver: pypy-3.9 | |
no-extensions: 'Y' | |
os: ubuntu | |
experimental: false | |
- os: ubuntu | |
pyver: "3.13" | |
experimental: true | |
no-extensions: 'Y' | |
fail-fast: true | |
runs-on: ${{ matrix.os }}-latest | |
continue-on-error: ${{ matrix.experimental }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Setup Python ${{ matrix.pyver }} | |
id: python-install | |
uses: actions/setup-python@v5 | |
with: | |
allow-prereleases: true | |
python-version: ${{ matrix.pyver }} | |
- name: Get pip cache dir | |
id: pip-cache | |
run: | | |
echo "dir=$(pip cache dir)" >> "${GITHUB_OUTPUT}" | |
shell: bash | |
- name: Cache PyPI | |
uses: actions/[email protected] | |
with: | |
key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} | |
path: ${{ steps.pip-cache.outputs.dir }} | |
restore-keys: | | |
pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}- | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install dependencies | |
run: | | |
python -m pip install -r requirements/test.in -c requirements/test.txt | |
- name: Restore llhttp generated files | |
if: ${{ matrix.no-extensions == '' }} | |
uses: actions/download-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build/ | |
- name: Cythonize | |
if: ${{ matrix.no-extensions == '' }} | |
run: | | |
make cythonize | |
- name: Install self | |
env: | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
run: python -m pip install -e . | |
- name: Run unittests | |
env: | |
COLOR: yes | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
PIP_USER: 1 | |
run: >- | |
PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}" | |
pytest --junitxml=junit.xml | |
shell: bash | |
- name: Re-run the failing tests with maximum verbosity | |
if: failure() | |
env: | |
COLOR: yes | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
run: >- # `exit 1` makes sure that the job remains red with flaky runs | |
pytest --no-cov -vvvvv --lf && exit 1 | |
shell: bash | |
- name: Run dev_mode tests | |
env: | |
COLOR: yes | |
AIOHTTP_NO_EXTENSIONS: ${{ matrix.no-extensions }} | |
PIP_USER: 1 | |
run: python -X dev -m pytest -m dev_mode --cov-append | |
shell: bash | |
- name: Turn coverage into xml | |
env: | |
COLOR: 'yes' | |
PIP_USER: 1 | |
run: | | |
python -m coverage xml | |
- name: Upload coverage | |
uses: codecov/codecov-action@v4 | |
with: | |
file: ./coverage.xml | |
flags: >- | |
CI-GHA,OS-${{ | |
runner.os | |
}},VM-${{ | |
matrix.os | |
}},Py-${{ | |
steps.python-install.outputs.python-version | |
}} | |
token: ${{ secrets.CODECOV_TOKEN }} | |
- name: Upload test results to Codecov | |
if: ${{ !cancelled() }} | |
uses: codecov/test-results-action@v1 | |
with: | |
token: ${{ secrets.CODECOV_TOKEN }} | |
check: # This job does nothing and is only used for the branch protection | |
if: always() | |
needs: | |
- lint | |
- test | |
runs-on: ubuntu-latest | |
steps: | |
- name: Decide whether the needed jobs succeeded or failed | |
uses: re-actors/alls-green@release/v1 | |
with: | |
jobs: ${{ toJSON(needs) }} | |
pre-deploy: | |
name: Pre-Deploy | |
runs-on: ubuntu-latest | |
needs: check | |
# Run only on pushing a tag | |
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') | |
steps: | |
- name: Dummy | |
run: | | |
echo "Predeploy step" | |
build-tarball: | |
permissions: | |
contents: read # to fetch code (actions/checkout) | |
name: Tarball | |
runs-on: ubuntu-latest | |
needs: pre-deploy | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install cython | |
run: >- | |
python -m | |
pip install -r requirements/cython.in -c requirements/cython.txt | |
- name: Restore llhttp generated files | |
uses: actions/download-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build/ | |
- name: Cythonize | |
run: | | |
make cythonize | |
- name: Make sdist | |
run: | | |
python -m build --sdist | |
- name: Upload artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: dist | |
path: dist | |
build-wheels: | |
permissions: | |
contents: read # to fetch code (actions/checkout) | |
name: Build wheels on ${{ matrix.os }} ${{ matrix.qemu }} | |
runs-on: ${{ matrix.os }}-latest | |
needs: pre-deploy | |
strategy: | |
matrix: | |
os: [ubuntu, windows, macos] | |
qemu: [''] | |
include: | |
# Split ubuntu job for the sake of speed-up | |
- os: ubuntu | |
qemu: aarch64 | |
- os: ubuntu | |
qemu: ppc64le | |
- os: ubuntu | |
qemu: s390x | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Set up QEMU | |
if: ${{ matrix.qemu }} | |
uses: docker/setup-qemu-action@v3 | |
with: | |
platforms: all | |
id: qemu | |
- name: Prepare emulation | |
run: | | |
if [[ -n "${{ matrix.qemu }}" ]]; then | |
# Build emulated architectures only if QEMU is set, | |
# use default "auto" otherwise | |
echo "CIBW_ARCHS_LINUX=${{ matrix.qemu }}" >> $GITHUB_ENV | |
fi | |
shell: bash | |
- name: Setup Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.x | |
- name: Update pip, wheel, setuptools, build, twine | |
run: | | |
python -m pip install -U pip wheel setuptools build twine | |
- name: Install cython | |
run: >- | |
python -m | |
pip install -r requirements/cython.in -c requirements/cython.txt | |
- name: Restore llhttp generated files | |
uses: actions/download-artifact@v3 | |
with: | |
name: llhttp | |
path: vendor/llhttp/build/ | |
- name: Cythonize | |
run: | | |
make cythonize | |
- name: Build wheels | |
uses: pypa/[email protected] | |
env: | |
CIBW_ARCHS_MACOS: x86_64 arm64 universal2 | |
- uses: actions/upload-artifact@v3 | |
with: | |
name: dist | |
path: ./wheelhouse/*.whl | |
deploy: | |
name: Deploy | |
needs: [build-tarball, build-wheels] | |
runs-on: ubuntu-latest | |
permissions: | |
contents: write # IMPORTANT: mandatory for making GitHub Releases | |
id-token: write # IMPORTANT: mandatory for trusted publishing & sigstore | |
environment: | |
name: pypi | |
url: https://pypi.org/p/aiohttp | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
with: | |
submodules: true | |
- name: Login | |
run: | | |
echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token | |
- name: Download distributions | |
uses: actions/download-artifact@v3 | |
with: | |
name: dist | |
path: dist | |
- name: Collected dists | |
run: | | |
tree dist | |
- name: Make Release | |
uses: aio-libs/[email protected] | |
with: | |
changes_file: CHANGES.rst | |
name: aiohttp | |
version_file: aiohttp/__init__.py | |
github_token: ${{ secrets.GITHUB_TOKEN }} | |
dist_dir: dist | |
fix_issue_regex: >- | |
:issue:`(\d+)` | |
fix_issue_repl: >- | |
#\1 | |
- name: >- | |
Publish 🐍📦 to PyPI | |
uses: pypa/gh-action-pypi-publish@release/v1 | |
- name: Sign the dists with Sigstore | |
uses: sigstore/[email protected] | |
with: | |
inputs: >- | |
./dist/*.tar.gz | |
./dist/*.whl | |
- name: Upload artifact signatures to GitHub Release | |
# Confusingly, this action also supports updating releases, not | |
# just creating them. This is what we want here, since we've manually | |
# created the release above. | |
uses: softprops/action-gh-release@v2 | |
with: | |
# dist/ contains the built packages, which smoketest-artifacts/ | |
# contains the signatures and certificates. | |
files: dist/** |