Skip to content

[Snyk] Fix for 2 vulnerabilities #10

[Snyk] Fix for 2 vulnerabilities

[Snyk] Fix for 2 vulnerabilities #10

Workflow file for this run

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
---
name: "Build Images"
on: # yamllint disable-line rule:truthy
pull_request_target:
permissions:
# all other permissions are set to none
contents: read
env:
MOUNT_SELECTED_LOCAL_SOURCES: "false"
ANSWER: "yes"
CHECK_IMAGE_FOR_REBUILD: "true"
SKIP_CHECK_REMOTE_IMAGE: "true"
DEBIAN_VERSION: "bullseye"
DB_RESET: "true"
VERBOSE: "true"
GITHUB_REPOSITORY: ${{ github.repository }}
GITHUB_USERNAME: ${{ github.actor }}
# You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the
# Airflow one is going to be used
CONSTRAINTS_GITHUB_REPOSITORY: >-
${{ secrets.CONSTRAINTS_GITHUB_REPOSITORY != '' &&
secrets.CONSTRAINTS_GITHUB_REPOSITORY || 'apache/airflow' }}
# This token is WRITE one - pull_request_target type of events always have the WRITE token
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_TAG_FOR_THE_BUILD: "${{ github.event.pull_request.head.sha || github.sha }}"
concurrency:
group: build-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build-info:
timeout-minutes: 10
name: "Build Info"
runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }}
env:
targetBranch: ${{ github.event.pull_request.base.ref }}
outputs:
runsOn: ${{ github.repository == 'apache/airflow' && '["self-hosted"]' || '["ubuntu-20.04"]' }}
pythonVersions: "${{ steps.selective-checks.python-versions }}"
upgradeToNewerDependencies: ${{ steps.selective-checks.outputs.upgrade-to-newer-dependencies }}
allPythonVersions: ${{ steps.selective-checks.outputs.all-python-versions }}
allPythonVersionsListAsString: ${{ steps.selective-checks.outputs.all-python-versions-list-as-string }}
defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }}
run-tests: ${{ steps.selective-checks.outputs.run-tests }}
run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }}
image-build: ${{ steps.dynamic-outputs.outputs.image-build }}
cacheDirective: ${{ steps.dynamic-outputs.outputs.cacheDirective }}
targetBranch: ${{ steps.dynamic-outputs.outputs.targetBranch }}
defaultBranch: ${{ steps.selective-checks.outputs.default-branch }}
targetCommitSha: "${{steps.discover-pr-merge-commit.outputs.targetCommitSha ||
github.event.pull_request.head.sha ||
github.sha
}}"
steps:
- name: Discover PR merge commit
id: discover-pr-merge-commit
run: |
TARGET_COMMIT_SHA="$(gh api '${{ github.event.pull_request.url }}' --jq .merge_commit_sha)"
echo "TARGET_COMMIT_SHA=$TARGET_COMMIT_SHA" >> $GITHUB_ENV
echo "::set-output name=targetCommitSha::${TARGET_COMMIT_SHA}"
if: github.event_name == 'pull_request_target'
# The labels in the event aren't updated when re-triggering the job, So lets hit the API to get
# up-to-date values
- name: Get latest PR labels
id: get-latest-pr-labels
run: |
echo -n "::set-output name=pullRequestLabels::"
gh api graphql --paginate -F node_id=${{github.event.pull_request.node_id}} -f query='
query($node_id: ID!, $endCursor: String) {
node(id:$node_id) {
... on PullRequest {
labels(first: 100, after: $endCursor) {
nodes { name }
pageInfo { hasNextPage endCursor }
}
}
}
}' --jq '.data.node.labels.nodes[]' | jq --slurp -c '[.[].name]'
if: github.event_name == 'pull_request_target'
# Retrieve it to be able to determine which files has changed in the incoming commit of the PR
# we checkout the target commit and it's parent to be able to compare them
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
- uses: actions/checkout@v2
with:
ref: ${{ env.TARGET_COMMIT_SHA }}
persist-credentials: false
fetch-depth: 2
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v2
with:
persist-credentials: false
submodules: recursive
- name: Selective checks
id: selective-checks
env:
PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }}
run: |
if [[ ${GITHUB_EVENT_NAME} == "pull_request_target" ]]; then
# Run selective checks
./scripts/ci/selective_ci_checks.sh "${TARGET_COMMIT_SHA}"
else
# Run all checks
./scripts/ci/selective_ci_checks.sh
fi
- name: Compute dynamic outputs
id: dynamic-outputs
run: |
set -x
if [[ "${{ github.event_name }}" == 'pull_request_target' ]]; then
echo "::set-output name=targetBranch::${targetBranch}"
else
# Direct push to branch, or scheduled build
echo "::set-output name=targetBranch::${GITHUB_REF#refs/heads/}"
fi
if [[ "${{ github.event_name }}" == 'schedule' ]]; then
echo "::set-output name=cacheDirective::disabled"
else
echo "::set-output name=cacheDirective:registry"
fi
if [[ "$SELECTIVE_CHECKS_IMAGE_BUILD" == "true" ]]; then
echo "::set-output name=image-build::true"
else
echo "::set-output name=image-build::false"
fi
env:
SELECTIVE_CHECKS_IMAGE_BUILD: ${{ steps.selective-checks.outputs.image-build }}
- name: env
run: printenv
env:
dynamicOutputs: ${{ toJSON(steps.dynamic-outputs.outputs) }}
PR_LABELS: ${{ steps.get-latest-pr-labels.outputs.pullRequestLabels }}
GITHUB_CONTEXT: ${{ toJson(github) }}
build-ci-images:
permissions:
packages: write
timeout-minutes: 80
name: "Build CI images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info]
if: |
needs.build-info.outputs.image-build == 'true' &&
github.event.pull_request.head.repo.full_name != 'apache/airflow'
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }}
BACKEND: sqlite
outputs: ${{toJSON(needs.build-info.outputs) }}
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
- uses: actions/checkout@v2
with:
ref: ${{ needs.build-info.outputs.targetCommitSha }}
persist-credentials: false
submodules: recursive
- name: "Retrieve DEFAULTS from the _initialization.sh"
# We cannot "source" the script here because that would be a security problem (we cannot run
# any code that comes from the sources coming from the PR. Therefore, we extract the
# DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands
id: defaults
run: |
DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV
DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \
scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV
DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV
- name: >
Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder
to use ci/scripts from there.
uses: actions/checkout@v2
with:
path: "main-airflow"
ref: "${{ needs.build-info.outputs.targetBranch }}"
persist-credentials: false
submodules: recursive
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
- name: >
Override "scripts/ci" with the "${{ needs.build-info.outputs.targetBranch }}" branch
so that the PR does not override it
# We should not override those scripts which become part of the image as they will not be
# changed in the image built - we should only override those that are executed to build
# the image.
run: |
rm -rfv "scripts/ci"
rm -rfv "dev"
mv -v "main-airflow/scripts/ci" "scripts"
mv -v "main-airflow/dev" "."
- uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: ./scripts/ci/install_breeze.sh
- name: "Free space"
run: breeze free-space
- name: >
Build & Push CI images ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
${{ needs.build-info.outputs.allPythonVersionsListAsString }}
run: breeze build-image --push-image --tag-as-latest --run-in-parallel
env:
UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }}
- name: Push empty CI image ${{ env.PYTHON_MAJOR_MINOR_VERSION }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}
if: failure() || cancelled()
run: breeze build-image --push-image --empty-image --run-in-parallel
env:
IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
- name: "Candidates for pip resolver backtrack triggers"
if: failure() || cancelled()
run: >
breeze find-newer-dependencies --max-age 1
--python "${{ needs.build-info.outputs.defaultPythonVersion }}"
- name: "Fix ownership"
run: breeze fix-ownership
if: always()
build-prod-images:
permissions:
packages: write
timeout-minutes: 80
name: "Build PROD images ${{ needs.build-info.outputs.allPythonVersionsListAsString }}"
runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }}
needs: [build-info, build-ci-images]
if: |
needs.build-info.outputs.image-build == 'true' &&
github.event.pull_request.head.repo.full_name != 'apache/airflow'
env:
RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn)[0] }}
BACKEND: sqlite
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
- uses: actions/checkout@v2
with:
ref: ${{ needs.build-info.outputs.targetCommitSha }}
persist-credentials: false
submodules: recursive
- name: "Retrieve DEFAULTS from the _initialization.sh"
# We cannot "source" the script here because that would be a security problem (we cannot run
# any code that comes from the sources coming from the PR. Therefore we extract the
# DEFAULT_BRANCH and DEFAULT_CONSTRAINTS_BRANCH and DEBIAN_VERSION via custom grep/awk/sed commands
id: defaults
run: |
DEFAULT_BRANCH=$(grep "export DEFAULT_BRANCH" scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV
DEFAULT_CONSTRAINTS_BRANCH=$(grep "export DEFAULT_CONSTRAINTS_BRANCH" \
scripts/ci/libraries/_initialization.sh | \
awk 'BEGIN{FS="="} {print $3}' | sed s'/["}]//g')
echo "DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH}" >> $GITHUB_ENV
DEBIAN_VERSION=$(grep "export DEBIAN_VERSION" scripts/ci/libraries/_initialization.sh | \
cut -d "=" -f 3 | sed s'/["}]//g')
echo "DEBIAN_VERSION=${DEBIAN_VERSION}" >> $GITHUB_ENV
- name: >
Checkout "${{ needs.build-info.outputs.targetBranch }}" branch to 'main-airflow' folder
to use ci/scripts from there.
uses: actions/checkout@v2
with:
path: "main-airflow"
ref: "${{ needs.build-info.outputs.targetBranch }}"
persist-credentials: false
submodules: recursive
- name: "Setup python"
uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
- name: >
Override "scripts/ci" with the "${{ needs.build-info.outputs.targetBranch }}" branch
so that the PR does not override it
# We should not override those scripts which become part of the image as they will not be
# changed in the image built - we should only override those that are executed to build
# the image.
run: |
rm -rfv "scripts/ci"
rm -rfv "dev"
mv -v "main-airflow/scripts/ci" "scripts"
mv -v "main-airflow/dev" "."
- uses: actions/setup-python@v2
with:
python-version: ${{ needs.build-info.outputs.defaultPythonVersion }}
cache: 'pip'
cache-dependency-path: ./dev/breeze/setup*
- run: ./scripts/ci/install_breeze.sh
- name: "Free space"
run: breeze free-space
- name: >
Pull CI image for PROD build:
${{ needs.build-info.outputs.defaultPythonVersion }}:${{ env.IMAGE_TAG_FOR_THE_BUILD }}
run: breeze pull-image --tag-as-latest
env:
# Always use default Python version of CI image for preparing packages
PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }}
IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
- name: "Cleanup dist and context file"
run: rm -fv ./dist/* ./docker-context-files/*
- name: "Prepare providers packages"
run: >
breeze prepare-provider-packages
--package-list-file ./scripts/ci/installed_providers.txt
--package-format wheel
--version-suffix-for-pypi dev0
- name: "Prepare airflow package"
run: breeze prepare-airflow-package --package-format wheel --version-suffix-for-pypi dev0
- name: "Move dist packages to docker-context files"
run: mv -v ./dist/*.whl ./docker-context-files
- name: >
Build & Push PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
${{ needs.build-info.outputs.allPythonVersionsListAsString }}
run: >
breeze build-prod-image
--run-in-parallel
--tag-as-latest
--push-image
--install-packages-from-context
--disable-airflow-repo-cache
--airflow-is-in-context
env:
UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }}
DOCKER_CACHE: ${{ needs.build-info.outputs.cacheDirective }}
IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
PYTHON_VERSIONS: ${{ needs.build-info.outputs.allPythonVersionsListAsString }}
- name: Push empty PROD images ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
if: failure() || cancelled()
run: breeze build-prod-image --cleanup-context --push-image --empty-image --run-in-parallel
env:
IMAGE_TAG: ${{ env.IMAGE_TAG_FOR_THE_BUILD }}
- name: "Fix ownership"
run: breeze fix-ownership
if: always()