From 6e43327ff1662175c71b31885d78eb36a9c477f5 Mon Sep 17 00:00:00 2001 From: Ian Knox <81931810+iknox-fa@users.noreply.github.com> Date: Tue, 1 Feb 2022 16:49:33 -0600 Subject: [PATCH 001/933] Docker release CT-3 (#4616) * new docker setup * formatting * Updated spark: support for extras * Added third-party adapter support * More selective lib installs for spark * added docker to bumpversion * Updated refs to be tag-based because bumpversion doesn't understand 'latest' * Updated docs per PR feedback * reducing RUNs and formatting/pip best practices changes * Added multi-architecture support and small test script, updated docs * typo * Added a few more tests * fixed tests output, clarified dbt-postgres special case-ness * Fix merge conflicts * formatting * Updated spark: support for extras * Added third-party adapter support * More selective lib installs for spark * added docker to bumpversion * Updated refs to be tag-based because bumpversion doesn't understand 'latest' * Updated docs per PR feedback * reducing RUNs and formatting/pip best practices changes * Added multi-architecture support and small test script, updated docs * typo * Added a few more tests * fixed tests output, clarified dbt-postgres special case-ness * changelog * basic framework * PR ready excepts docs * PR feedback automatic commit by git-black, original commits: d6cc8b30428127dd4050b05eb3ae4ba445c97d69 --- .github/actions/latest-wrangler/README.md | 6 +++--- .github/actions/latest-wrangler/action.yml | 2 +- .../latest-wrangler/examples/example_workflow.yml | 4 ++-- .../examples/example_workflow_dispatch.json | 2 +- .github/actions/latest-wrangler/main.py | 15 ++++++--------- .github/workflows/release_docker.yml | 6 +++--- .github/workflows/test/.actrc | 2 +- .github/workflows/test/.gitignore | 2 +- .github/workflows/test/.secrets.EXAMPLE | 2 +- .github/workflows/test/inputs/release_docker.json | 2 +- 10 files changed, 20 insertions(+), 23 deletions(-) diff --git a/.github/actions/latest-wrangler/README.md b/.github/actions/latest-wrangler/README.md index 867247b641d..9d5033259c0 100644 --- a/.github/actions/latest-wrangler/README.md +++ b/.github/actions/latest-wrangler/README.md @@ -9,7 +9,7 @@ Plug in the necessary inputs to determine if the container being built should be | `package` | Name of the GH package to check against | | `new_version` | Semver of new container | | `gh_token` | GH token with package read scope| -| `halt_on_missing` | Return non-zero exit code if requested package does not exist. (defaults to false)| +| `halt_on_missing` | Return non-zero exit code if requested package does not exist. (defaults to false)| ## Outputs @@ -21,7 +21,7 @@ Plug in the necessary inputs to determine if the container being built should be ## Example workflow ```yaml name: Ship it! -on: +on: workflow_dispatch: inputs: package: @@ -47,4 +47,4 @@ jobs: run: | echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !" echo "Is it minor.latest? Survey says: ${{ steps.is_latest.outputs.minor_latest }} !" -``` \ No newline at end of file +``` diff --git a/.github/actions/latest-wrangler/action.yml b/.github/actions/latest-wrangler/action.yml index ca82956cbd8..d712eecf64e 100644 --- a/.github/actions/latest-wrangler/action.yml +++ b/.github/actions/latest-wrangler/action.yml @@ -7,7 +7,7 @@ inputs: new_version: description: "Semver of the container being built (I.E. 1.0.4)" required: true - gh_token: + gh_token: description: "Auth token for github (must have view packages scope)" required: true outputs: diff --git a/.github/actions/latest-wrangler/examples/example_workflow.yml b/.github/actions/latest-wrangler/examples/example_workflow.yml index 2a08fb54e49..66b171c434b 100644 --- a/.github/actions/latest-wrangler/examples/example_workflow.yml +++ b/.github/actions/latest-wrangler/examples/example_workflow.yml @@ -1,5 +1,5 @@ name: Ship it! -on: +on: workflow_dispatch: inputs: package: @@ -23,4 +23,4 @@ jobs: gh_token: ${{ secrets.GITHUB_TOKEN }} - name: Print the results run: | - echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !" \ No newline at end of file + echo "Is it latest? Survey says: ${{ steps.is_latest.outputs.latest }} !" diff --git a/.github/actions/latest-wrangler/examples/example_workflow_dispatch.json b/.github/actions/latest-wrangler/examples/example_workflow_dispatch.json index 6330dfa91a9..29667a4a167 100644 --- a/.github/actions/latest-wrangler/examples/example_workflow_dispatch.json +++ b/.github/actions/latest-wrangler/examples/example_workflow_dispatch.json @@ -3,4 +3,4 @@ "version_number": "1.0.1", "package": "dbt-redshift" } -} \ No newline at end of file +} diff --git a/.github/actions/latest-wrangler/main.py b/.github/actions/latest-wrangler/main.py index 549ec384631..23e14cf5abe 100644 --- a/.github/actions/latest-wrangler/main.py +++ b/.github/actions/latest-wrangler/main.py @@ -23,7 +23,7 @@ # Log info if we don't get a 200 if package_request.status_code != 200: print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}") - + # Make an early exit if there is no matching package in github if package_request.status_code == 404: if halt_on_missing: @@ -35,12 +35,11 @@ sys.exit(0) # TODO: verify package meta is "correct" - # https://github.com/dbt-labs/dbt-core/issues/4640 + # https://github.com/dbt-labs/dbt-core/issues/4640 # map versions and tags version_tag_map = { - version["id"]: version["metadata"]["container"]["tags"] - for version in package_meta + version["id"]: version["metadata"]["container"]["tags"] for version in package_meta } # is pre-release @@ -63,9 +62,7 @@ if f"{new_version.major}.{new_version.minor}.latest" in tags: # Similar to above, only now we expect exactly two tags: # major.minor.patch and major.minor.latest - current_minor_latest = parse( - [tag for tag in tags if "latest" not in tag][0] - ) + current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0]) else: current_minor_latest = False @@ -79,7 +76,8 @@ def is_latest( :param pre_rel: Wether or not the version of the new container is a pre-release :param new_version: The version of the new container - :param remote_latest: The version of the previously identified container that's already tagged latest or False + :param remote_latest: The version of the previously identified container that's + already tagged latest or False """ # is a pre-release = not latest if pre_rel: @@ -95,4 +93,3 @@ def is_latest( print(f"::set-output name=latest::{latest}") print(f"::set-output name=minor_latest::{minor_latest}") - diff --git a/.github/workflows/release_docker.yml b/.github/workflows/release_docker.yml index 67f121f1bd0..1a955e3a57e 100644 --- a/.github/workflows/release_docker.yml +++ b/.github/workflows/release_docker.yml @@ -5,7 +5,7 @@ # Docker images for dbt are used in a number of important places throughout the dbt ecosystem. This is how we keep those images up-to-date. # **when?** -# This is triggered manually +# This is triggered manually # **next steps** # - build this into the release workflow (or conversly, break out the different release methods into their own workflow files) @@ -55,7 +55,7 @@ jobs: name: Set up docker image builder runs-on: ubuntu-latest needs: [get_version_meta] - steps: + steps: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 @@ -110,4 +110,4 @@ jobs: build-args: | ${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }} tags: | - ghcr.io/dbt-labs/${{ github.event.inputs.package }}:latest \ No newline at end of file + ghcr.io/dbt-labs/${{ github.event.inputs.package }}:latest diff --git a/.github/workflows/test/.actrc b/.github/workflows/test/.actrc index 21448263dc1..027d95f14ff 100644 --- a/.github/workflows/test/.actrc +++ b/.github/workflows/test/.actrc @@ -1 +1 @@ --P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest \ No newline at end of file +-P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest diff --git a/.github/workflows/test/.gitignore b/.github/workflows/test/.gitignore index b4ddc884c6b..1233aaed111 100644 --- a/.github/workflows/test/.gitignore +++ b/.github/workflows/test/.gitignore @@ -1 +1 @@ -.secrets \ No newline at end of file +.secrets diff --git a/.github/workflows/test/.secrets.EXAMPLE b/.github/workflows/test/.secrets.EXAMPLE index 3e790800a7e..9b3e0acc9c1 100644 --- a/.github/workflows/test/.secrets.EXAMPLE +++ b/.github/workflows/test/.secrets.EXAMPLE @@ -1 +1 @@ -GITHUB_TOKEN=GH_PERSONAL_ACCESS_TOKEN_GOES_HERE \ No newline at end of file +GITHUB_TOKEN=GH_PERSONAL_ACCESS_TOKEN_GOES_HERE diff --git a/.github/workflows/test/inputs/release_docker.json b/.github/workflows/test/inputs/release_docker.json index a219c00de6e..f5bbcb176ba 100644 --- a/.github/workflows/test/inputs/release_docker.json +++ b/.github/workflows/test/inputs/release_docker.json @@ -3,4 +3,4 @@ "version_number": "1.0.1", "package": "dbt-postgres" } -} \ No newline at end of file +} From 11401a9a75ce6764117f5997ef5afb5f07c0721c Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Thu, 27 Jan 2022 16:23:55 -0500 Subject: [PATCH 002/933] Initial addition of CODEOWNERS file (#4620) * Initial addition of CODEOWNERS file * Proposed sub-team ownership (#4632) * Updating for the events module to be both language and execution * Adding more comment details Co-authored-by: Jeremy Cohen automatic commit by git-black, original commits: 13571435a319d6237bfc414ef15f27e23f0729db --- .github/CODEOWNERS | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ecfe53dd4fb..750e985ee08 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,5 +1,5 @@ # This file contains the code owners for the dbt-core repo. -# PRs will be automatically assigned for review to the associated +# PRs will be automatically assigned for review to the associated # team(s) or person(s) that touches any files that are mapped to them. # # A statement takes precedence over the statements above it so more general @@ -9,7 +9,7 @@ # Consult GitHub documentation for formatting guidelines: # https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#example-of-a-codeowners-file -# As a default for areas with no assignment, +# As a default for areas with no assignment, # the core team as a whole will be assigned * @dbt-labs/core @@ -37,7 +37,7 @@ /core/dbt/include/global_project @dbt-labs/core-execution @dbt-labs/core-adapters # Perf regression testing framework -# This excludes the test project files itself since those aren't specific +# This excludes the test project files itself since those aren't specific # framework changes (excluded by not setting an owner next to it- no owner) /performance @nathaniel-may /performance/projects From 953e8b2f077bbd7c2d16f8309eed369e1801e9ba Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Fri, 21 Jan 2022 12:40:55 -0500 Subject: [PATCH 003/933] Add Backport Action (#4605) automatic commit by git-black, original commits: 7798f932a09a12f1993ea9cb781a20707383df96 --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 21c42fa7062..f0fa4dceaaf 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,5 +1,5 @@ # **what?** -# When a PR is merged, if it has the backport label, it will create +# When a PR is merged, if it has the backport label, it will create # a new PR to backport those changes to the given branch. If it can't # cleanly do a backport, it will comment on the merged PR of the failure. # From 6039f5f810a0156ef9501e76daacb9de35adcf19 Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Tue, 4 Jan 2022 17:00:03 -0500 Subject: [PATCH 004/933] Mirroring issues to Jira (#4548) * Adding issue creation Jira Action * Adding issue closing Jira Action * Add labeling logic automatic commit by git-black, original commits: 892426eecb57e9d91bec92ae26995e9bf0155267 --- .github/workflows/jira-creation.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/jira-creation.yml b/.github/workflows/jira-creation.yml index c84e106a75d..b4016befce0 100644 --- a/.github/workflows/jira-creation.yml +++ b/.github/workflows/jira-creation.yml @@ -13,7 +13,7 @@ name: Jira Issue Creation on: issues: types: [opened, labeled] - + permissions: issues: write From 342324a77b8029542c8876bf504d6faee57e851b Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Mon, 29 Nov 2021 10:37:14 -0500 Subject: [PATCH 005/933] Adding release workflow (#4288) automatic commit by git-black, original commits: f72b603196364084614e327bf7b9bf748fb4396a --- .github/workflows/release.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 26f7d409ec5..842cedf6ffb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,5 +1,5 @@ # **what?** -# Take the given commit, run unit tests specifically on that sha, build and +# Take the given commit, run unit tests specifically on that sha, build and # package it, and then release to GitHub and PyPi with that specific build # **why?** @@ -144,7 +144,7 @@ jobs: github-release: - name: GitHub Release + name: GitHub Release needs: test-build @@ -155,7 +155,7 @@ jobs: with: name: dist path: '.' - + # Need to set an output variable because env variables can't be taken as input # This is needed for the next step with releasing to GitHub - name: Find release type @@ -179,7 +179,7 @@ jobs: dbt_core-${{github.event.inputs.version_number}}-py3-none-any.whl dbt-postgres-${{github.event.inputs.version_number}}.tar.gz dbt-core-${{github.event.inputs.version_number}}.tar.gz - + pypi-release: name: Pypi release @@ -188,12 +188,12 @@ jobs: needs: github-release environment: PypiProd - steps: + steps: - uses: actions/download-artifact@v2 with: name: dist path: 'dist' - + - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@v1.4.2 with: From c85c112627659f5c628916ff0603d46ee8b22bca Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Mon, 24 Jan 2022 12:20:12 -0500 Subject: [PATCH 006/933] Changing Jira mirroring workflows to point to shared Actions (#4615) automatic commit by git-black, original commits: f467fba151912f3cb14413173b946e875894d098 --- .github/workflows/jira-label.yml | 1 - .github/workflows/jira-transition.yml | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml index fd533a170fe..f01a38f57e9 100644 --- a/.github/workflows/jira-label.yml +++ b/.github/workflows/jira-label.yml @@ -24,4 +24,3 @@ jobs: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - diff --git a/.github/workflows/jira-transition.yml b/.github/workflows/jira-transition.yml index 71273c7a9c9..ed9f9cd4fc7 100644 --- a/.github/workflows/jira-transition.yml +++ b/.github/workflows/jira-transition.yml @@ -21,4 +21,4 @@ jobs: secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} \ No newline at end of file + JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} From 4ce771cb0d19f2f04e89d4ef4defd1d46eefd3fa Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Thu, 14 Oct 2021 10:12:51 -0400 Subject: [PATCH 007/933] Schema check testing (#3870) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Create schema-check.yml * Adding PR trigger to test * Adding branch name * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update schema-check.yml * Update .github/workflows/schema-check.yml Co-authored-by: Kamil Breguła * Revert "Update .github/workflows/schema-check.yml" This reverts commit a2f1fa81ef7448917b430f055cd2989ba1912eb1. Reverting b/c this broke something in the workflow Co-authored-by: Kamil Breguła automatic commit by git-black, original commits: 04a9195297a9f97a8c6d426422bb02dc0f47d82d --- .github/workflows/schema-check.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/schema-check.yml b/.github/workflows/schema-check.yml index c647cc2d201..ee65ff71296 100644 --- a/.github/workflows/schema-check.yml +++ b/.github/workflows/schema-check.yml @@ -1,5 +1,5 @@ # **what?** -# Compares the schema of the dbt version of the given ref vs +# Compares the schema of the dbt version of the given ref vs # the latest official schema releases found in schemas.getdbt.com. # If there are differences, the workflow will fail and upload the # diff as an artifact. The metadata team should be alerted to the change. @@ -37,20 +37,20 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 - + - name: Checkout dbt repo uses: actions/checkout@v2.3.4 with: path: ${{ env.DBT_REPO_DIRECTORY }} - + - name: Checkout schemas.getdbt.com repo - uses: actions/checkout@v2.3.4 - with: + uses: actions/checkout@v2.3.4 + with: repository: dbt-labs/schemas.getdbt.com ref: 'main' ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }} path: ${{ env.SCHEMA_REPO_DIRECTORY }} - + - name: Generate current schema run: | cd ${{ env.DBT_REPO_DIRECTORY }} @@ -59,7 +59,7 @@ jobs: pip install --upgrade pip pip install -r dev-requirements.txt -r editable-requirements.txt python scripts/collect-artifact-schema.py --path ${{ env.LATEST_SCHEMA_PATH }} - + # Copy generated schema files into the schemas.getdbt.com repo # Do a git diff to find any changes # Ignore any date or version changes though From 94d1f4695bf21ae7a06863b8b7835e144e649a25 Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Wed, 5 Jan 2022 09:29:52 -0500 Subject: [PATCH 008/933] Mirror labels to Jira (#4550) * Adding Jira label mirroring * Fixing bad step name automatic commit by git-black, original commits: e943b9fc842535e958ef4fd0b8703adc91556bc6 --- .github/workflows/jira-label.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml index f01a38f57e9..3da2e3a3867 100644 --- a/.github/workflows/jira-label.yml +++ b/.github/workflows/jira-label.yml @@ -13,7 +13,7 @@ name: Jira Label Mirroring on: issues: types: [labeled, unlabeled] - + permissions: issues: read From e8c126be96fce308cd7478c5349bd9a1b5e5d8d9 Mon Sep 17 00:00:00 2001 From: Nathaniel May Date: Fri, 3 Dec 2021 12:51:28 -0500 Subject: [PATCH 009/933] add new interop tests for black-box json log schema testing (#4327) automatic commit by git-black, original commits: 53104986476231280cb2c7bc20cc36ef5790f222 --- .github/workflows/structured-logging-schema-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/structured-logging-schema-check.yml b/.github/workflows/structured-logging-schema-check.yml index 96b613764a1..cfdb479f212 100644 --- a/.github/workflows/structured-logging-schema-check.yml +++ b/.github/workflows/structured-logging-schema-check.yml @@ -1,6 +1,6 @@ # This Action checks makes a dbt run to sample json structured logs # and checks that they conform to the currently documented schema. -# +# # If this action fails it either means we have unintentionally deviated # from our documented structured logging schema, or we need to bump the # version of our structured logging and add new documentation to From 33432bd2f0d809735cf05072d52558bc1ac5d138 Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Thu, 7 Oct 2021 22:10:47 -0400 Subject: [PATCH 010/933] Creating version bump action (#4015) * Creating version bump action * Adding workflow dispatch inputs automatic commit by git-black, original commits: 92a0930634b5ee3bb6592b00ef5a28fd004f2d15 --- .github/workflows/version-bump.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml index 9860fe3a192..62a55875592 100644 --- a/.github/workflows/version-bump.yml +++ b/.github/workflows/version-bump.yml @@ -1,16 +1,16 @@ # **what?** # This workflow will take a version number and a dry run flag. With that -# it will run versionbump to update the version number everywhere in the +# it will run versionbump to update the version number everywhere in the # code base and then generate an update Docker requirements file. If this # is a dry run, a draft PR will open with the changes. If this isn't a dry # run, the changes will be committed to the branch this is run on. # **why?** -# This is to aid in releasing dbt and making sure we have updated +# This is to aid in releasing dbt and making sure we have updated # the versions and Docker requirements in all places. # **when?** -# This is triggered either manually OR +# This is triggered either manually OR # from the repository_dispatch event "version-bump" which is sent from # the dbt-release repo Action @@ -25,10 +25,10 @@ on: is_dry_run: description: 'Creates a draft PR to allow testing instead of committing to a branch' required: true - default: 'true' + default: 'true' repository_dispatch: types: [version-bump] - + jobs: bump: runs-on: ubuntu-latest @@ -57,15 +57,15 @@ jobs: run: | python3 -m venv env source env/bin/activate - pip install --upgrade pip - + pip install --upgrade pip + - name: Create PR branch if: ${{ steps.variables.outputs.IS_DRY_RUN == 'true' }} run: | git checkout -b bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID git push origin bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID git branch --set-upstream-to=origin/bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID bumping-version/${{steps.variables.outputs.VERSION_NUMBER}}_$GITHUB_RUN_ID - + # - name: Generate Docker requirements # run: | # source env/bin/activate @@ -76,7 +76,7 @@ jobs: - name: Bump version run: | source env/bin/activate - pip install -r dev-requirements.txt + pip install -r dev-requirements.txt env/bin/bumpversion --allow-dirty --new-version ${{steps.variables.outputs.VERSION_NUMBER}} major git status From 6a480c58bb50d805d6c694ab5b0fd77d7f4cbbb4 Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Mon, 3 Jan 2022 12:37:20 -0500 Subject: [PATCH 011/933] Commenting our Docker portion of Version Bump (#4541) automatic commit by git-black, original commits: fc8b8c11d5feae4a85900dfd776dfc5a3b10ec55 --- .github/workflows/version-bump.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml index 62a55875592..0bfce106b6b 100644 --- a/.github/workflows/version-bump.yml +++ b/.github/workflows/version-bump.yml @@ -69,7 +69,7 @@ jobs: # - name: Generate Docker requirements # run: | # source env/bin/activate - # pip install -r requirements.txt + # pip install -r requirements.txt # pip freeze -l > docker/requirements/requirements.txt # git status From 0f97ea60e5ef8bb0a81af1795746d52a33bbe6bd Mon Sep 17 00:00:00 2001 From: Connor McArthur Date: Wed, 6 Mar 2019 17:44:57 -0500 Subject: [PATCH 012/933] =?UTF-8?q?Bump=20version:=200.13.0a2=20=E2=86=92?= =?UTF-8?q?=200.13.0rc1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit automatic commit by git-black, original commits: 74152562fe6ef7eb826ff2f6b28c6533b72e6d80 --- .bumpversion.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 8ea00aaba93..efff5abecad 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -6,7 +6,7 @@ parse = (?P\d+) ((?Pa|b|rc) (?P
\d+)  # pre-release version num
 	)?
-serialize = 
+serialize =
 	{major}.{minor}.{patch}{prekind}{pre}
 	{major}.{minor}.{patch}
 commit = False
@@ -15,7 +15,7 @@ tag = False
 [bumpversion:part:prekind]
 first_value = a
 optional_value = final
-values = 
+values =
 	a
 	b
 	rc

From 0c9e270c923274428e366f15f4c2374e8ba026b4 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 24 Sep 2019 09:40:55 -0600
Subject: [PATCH 013/933] Convert Relation types to hologram.JsonSchemaMixin

Fix a lot of mypy things, add a number of adapter-ish modules to it
Split relations and columns into separate files
split context.common into base + common
 - base is all that's required for the config renderer
Move Credentials into connection contracts since that's what they really are
Removed model_name/table_name -> consolidated to identifier
 - I hope I did not break seeds, which claimed to care about render(False)
Unify shared 'external' relation type with bigquery's own
hack workarounds for some import cycles with plugin registration and config p
arsing
Assorted backwards compatibility fixes around types, deep_merge vs shallow merge
Remove APIObject


automatic commit by git-black, original commits:
  eb9bfcda4ab314e4ee5bf548fab07ee4fadbee4d
---
 core/dbt/adapters/base/column.py      | 16 ++++-----
 core/dbt/adapters/base/connections.py |  4 +--
 core/dbt/adapters/base/plugin.py      |  2 +-
 core/dbt/adapters/base/relation.py    | 48 +++++++++++++--------------
 core/dbt/adapters/sql/connections.py  |  6 ++--
 core/dbt/context/base.py              |  3 +-
 core/dbt/contracts/connection.py      | 10 +++---
 core/dbt/utils.py                     |  1 +
 8 files changed, 43 insertions(+), 47 deletions(-)

diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py
index df0319c3d60..af9d295bc53 100644
--- a/core/dbt/adapters/base/column.py
+++ b/core/dbt/adapters/base/column.py
@@ -8,10 +8,10 @@
 @dataclass
 class Column:
     TYPE_LABELS: ClassVar[Dict[str, str]] = {
-        'STRING': 'TEXT',
-        'TIMESTAMP': 'TIMESTAMP',
-        'FLOAT': 'FLOAT',
-        'INTEGER': 'INT'
+        "STRING": "TEXT",
+        "TIMESTAMP": "TIMESTAMP",
+        "FLOAT": "FLOAT",
+        "INTEGER": "INT",
     }
     column: str
     dtype: str
@@ -41,14 +41,12 @@ def data_type(self) -> str:
         if self.is_string():
             return Column.string_type(self.string_size())
         elif self.is_numeric():
-            return Column.numeric_type(self.dtype, self.numeric_precision,
-                                       self.numeric_scale)
+            return Column.numeric_type(self.dtype, self.numeric_precision, self.numeric_scale)
         else:
             return self.dtype
 
     def is_string(self) -> bool:
-        return self.dtype.lower() in ['text', 'character varying', 'character',
-                                      'varchar']
+        return self.dtype.lower() in ["text", "character varying", "character", "varchar"]
 
     def is_number(self):
         return any([self.is_integer(), self.is_numeric(), self.is_float()])
@@ -76,7 +74,7 @@ def string_size(self) -> int:
         if not self.is_string():
             raise RuntimeException("Called string_size() on non-string field!")
 
-        if self.dtype == 'text' or self.char_size is None:
+        if self.dtype == "text" or self.char_size is None:
             # char_size should never be None. Handle it reasonably just in case
             return 256
         else:
diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 218aa287bf9..73fca9643d0 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -122,7 +122,7 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
         if name is None:
             # if a name isn't specified, we'll re-use a single handle
             # named 'master'
-            conn_name = 'master'
+            conn_name = "master"
         else:
             if not isinstance(name, str):
                 raise dbt.exceptions.CompilerException(
@@ -143,7 +143,7 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
             )
             self.set_thread_connection(conn)
 
-        if conn.name == conn_name and conn.state == 'open':
+        if conn.name == conn_name and conn.state == "open":
             return conn
 
         fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py
index c87b2a26a91..14d4edda69d 100644
--- a/core/dbt/adapters/base/plugin.py
+++ b/core/dbt/adapters/base/plugin.py
@@ -28,7 +28,7 @@ def __init__(
         adapter: Type[AdapterProtocol],
         credentials: Type[Credentials],
         include_path: str,
-        dependencies: Optional[List[str]] = None
+        dependencies: Optional[List[str]] = None,
     ):
 
         self.adapter: Type[AdapterProtocol] = adapter
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 672348f4285..65d0679616f 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -16,7 +16,7 @@
 import dbt.exceptions
 
 
-Self = TypeVar('Self', bound='BaseRelation')
+Self = TypeVar("Self", bound="BaseRelation")
 
 
 @dataclass(frozen=True, eq=False, repr=False)
@@ -40,7 +40,7 @@ def _get_field_named(cls, field_name):
             if field.name == field_name:
                 return field
         # this should be unreachable
-        raise ValueError(f'BaseRelation has no {field_name} field!')
+        raise ValueError(f"BaseRelation has no {field_name} field!")
 
     def __eq__(self, other):
         if not isinstance(other, self.__class__):
@@ -49,7 +49,7 @@ def __eq__(self, other):
 
     @classmethod
     def get_default_quote_policy(cls) -> Policy:
-        return cls._get_field_named('quote_policy').default
+        return cls._get_field_named("quote_policy").default
 
     @classmethod
     def get_default_include_policy(cls) -> Policy:
@@ -72,9 +72,9 @@ def matches(
         identifier: Optional[str] = None,
     ) -> bool:
         search = filter_null_values({
-            ComponentName.Database: database,
-            ComponentName.Schema: schema,
-            ComponentName.Identifier: identifier
+            {
+                ComponentName.Database: database,
+                ComponentName.Schema: schema,
         })
 
         if not search:
@@ -113,9 +113,9 @@ def quote(
         identifier: Optional[bool] = None,
     ) -> Self:
         policy = filter_null_values({
-            ComponentName.Database: database,
-            ComponentName.Schema: schema,
-            ComponentName.Identifier: identifier
+            {
+                ComponentName.Database: database,
+                ComponentName.Schema: schema,
         })
 
         new_quote_policy = self.quote_policy.replace_dict(policy)
@@ -128,9 +128,9 @@ def include(
         identifier: Optional[bool] = None,
     ) -> Self:
         policy = filter_null_values({
-            ComponentName.Database: database,
-            ComponentName.Schema: schema,
-            ComponentName.Identifier: identifier
+            {
+                ComponentName.Database: database,
+                ComponentName.Schema: schema,
         })
 
         new_include_policy = self.include_policy.replace_dict(policy)
@@ -185,15 +185,13 @@ def quoted(self, identifier):
         )
 
     @classmethod
-    def create_from_source(
-        cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any
-    ) -> Self:
+    def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
         source_quoting = source.quoting.to_dict(omit_none=True)
         source_quoting.pop('column', None)
         quote_policy = deep_merge(
             cls.get_default_quote_policy().to_dict(omit_none=True),
             source_quoting,
-            kwargs.get('quote_policy', {}),
+            kwargs.get("quote_policy", {}),
         )
 
         return cls.create(
@@ -272,14 +270,16 @@ def create(
         type: Optional[RelationType] = None,
         **kwargs,
     ) -> Self:
-        kwargs.update({
-            'path': {
-                'database': database,
-                'schema': schema,
-                'identifier': identifier,
-            },
-            'type': type,
-        })
+        kwargs.update(
+            {
+                "path": {
+                    "database": database,
+                    "schema": schema,
+                    "identifier": identifier,
+                },
+                "type": type,
+            }
+        )
         return cls.from_dict(kwargs)
 
     def __repr__(self) -> str:
diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index b3984e04253..891e3b90d69 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -54,7 +54,7 @@ def add_query(
         sql: str,
         auto_begin: bool = True,
         bindings: Optional[Any] = None,
-        abridge_sql_log: bool = False
+        abridge_sql_log: bool = False,
     ) -> Tuple[Connection, Any]:
         connection = self.get_thread_connection()
         if auto_begin and connection.transaction_open is False:
@@ -91,9 +91,7 @@ def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
 
     @classmethod
     def process_results(
-        cls,
-        column_names: Iterable[str],
-        rows: Iterable[Any]
+        cls, column_names: Iterable[str], rows: Iterable[Any]
     ) -> List[Dict[str, Any]]:
         unique_col_names = dict()
         for idx in range(len(column_names)):
diff --git a/core/dbt/context/base.py b/core/dbt/context/base.py
index 616bd331d4d..b1f3c5e3537 100644
--- a/core/dbt/context/base.py
+++ b/core/dbt/context/base.py
@@ -143,8 +143,7 @@ def __new__(mcls, name, bases, dct):
 
 
 class Var:
-    UndefinedVarError = "Required var '{}' not found in config:\nVars "\
-                        "supplied to {} = {}"
+    UndefinedVarError = "Required var '{}' not found in config:\nVars " "supplied to {} = {}"
     _VAR_NOTSET = object()
 
     def __init__(
diff --git a/core/dbt/contracts/connection.py b/core/dbt/contracts/connection.py
index 4485884a022..8ea4a91028e 100644
--- a/core/dbt/contracts/connection.py
+++ b/core/dbt/contracts/connection.py
@@ -122,9 +122,7 @@ class Credentials(
 
     @abc.abstractproperty
     def type(self) -> str:
-        raise NotImplementedError(
-            'type not implemented for base credentials class'
-        )
+        raise NotImplementedError("type not implemented for base credentials class")
 
     @property
     def unique_field(self) -> str:
@@ -176,9 +174,11 @@ def __post_serialize__(self, dct):
         if self._ALIASES:
             dct.update({
                 new_name: dct[canonical_name]
-                for new_name, canonical_name in self._ALIASES.items()
+                    new_name: dct[canonical_name]
                 if canonical_name in dct
-            })
+                    if canonical_name in dct
+                }
+            )
         return dct
 
 
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index e1f3fe537c1..4601b6d5c2e 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -437,6 +437,7 @@ def lowercase(value: Optional[str]) -> Optional[str]:
 # attributes, and regular properties only work with objects. maybe this should
 # be handled by the RelationProxy?
 
+
 class classproperty(object):
     def __init__(self, func):
         self.func = func

From 0cdac791b189f25160c0c557c170c6e4ab24c7ac Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 13 Jan 2020 14:23:41 -0700
Subject: [PATCH 014/933] add "is_number" and "is_float" Column methods

Split out snowflake column type
added column type test integration tests


automatic commit by git-black, original commits:
  54b64f89226da556ff252356071496b5c1ce3a2e
---
 core/dbt/adapters/base/column.py | 28 ++++++++++++++++++++--------
 1 file changed, 20 insertions(+), 8 deletions(-)

diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py
index af9d295bc53..c65b9869c55 100644
--- a/core/dbt/adapters/base/column.py
+++ b/core/dbt/adapters/base/column.py
@@ -24,7 +24,7 @@ def translate_type(cls, dtype: str) -> str:
         return cls.TYPE_LABELS.get(dtype.upper(), dtype)
 
     @classmethod
-    def create(cls, name, label_or_dtype: str) -> 'Column':
+    def create(cls, name, label_or_dtype: str) -> "Column":
         column_type = cls.translate_type(label_or_dtype)
         return cls(name, column_type)
 
@@ -54,21 +54,33 @@ def is_number(self):
     def is_float(self):
         return self.dtype.lower() in [
             # floats
-            'real', 'float4', 'float', 'double precision', 'float8'
+            "real",
+            "float4",
+            "float",
+            "double precision",
+            "float8",
         ]
 
     def is_integer(self) -> bool:
         return self.dtype.lower() in [
             # real types
-            'smallint', 'integer', 'bigint',
-            'smallserial', 'serial', 'bigserial',
+            "smallint",
+            "integer",
+            "bigint",
+            "smallserial",
+            "serial",
+            "bigserial",
             # aliases
-            'int2', 'int4', 'int8',
-            'serial2', 'serial4', 'serial8',
+            "int2",
+            "int4",
+            "int8",
+            "serial2",
+            "serial4",
+            "serial8",
         ]
 
     def is_numeric(self) -> bool:
-        return self.dtype.lower() in ['numeric', 'decimal']
+        return self.dtype.lower() in ["numeric", "decimal"]
 
     def string_size(self) -> int:
         if not self.is_string():
@@ -80,7 +92,7 @@ def string_size(self) -> int:
         else:
             return int(self.char_size)
 
-    def can_expand_to(self, other_column: 'Column') -> bool:
+    def can_expand_to(self, other_column: "Column") -> bool:
         """returns True if this column can be expanded to the size of the
         other column"""
         if not self.is_string() or not other_column.is_string():

From 629abaa48c9e10cafe78a26a9b44c033eebf8562 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 14 Apr 2020 13:23:12 -0600
Subject: [PATCH 015/933] Use "describe table" to get the columns in a relation
 on snowflake

automatic commit by git-black, original commits:
  107bc5c2170472e2a619ad17bd408f881483ac56
---
 core/dbt/adapters/base/column.py | 14 +++++---------
 1 file changed, 5 insertions(+), 9 deletions(-)

diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py
index c65b9869c55..46cfbaa4a49 100644
--- a/core/dbt/adapters/base/column.py
+++ b/core/dbt/adapters/base/column.py
@@ -120,12 +120,10 @@ def __repr__(self) -> str:
         return "".format(self.name, self.data_type)
 
     @classmethod
-    def from_description(cls, name: str, raw_data_type: str) -> 'Column':
-        match = re.match(r'([^(]+)(\([^)]+\))?', raw_data_type)
+    def from_description(cls, name: str, raw_data_type: str) -> "Column":
+        match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type)
         if match is None:
-            raise RuntimeException(
-                f'Could not interpret data type "{raw_data_type}"'
-            )
+            raise RuntimeException(f'Could not interpret data type "{raw_data_type}"')
         data_type, size_info = match.groups()
         char_size = None
         numeric_precision = None
@@ -133,7 +131,7 @@ def from_description(cls, name: str, raw_data_type: str) -> 'Column':
         if size_info is not None:
             # strip out the parentheses
             size_info = size_info[1:-1]
-            parts = size_info.split(',')
+            parts = size_info.split(",")
             if len(parts) == 1:
                 try:
                     char_size = int(parts[0])
@@ -158,6 +156,4 @@ def from_description(cls, name: str, raw_data_type: str) -> 'Column':
                         f'could not convert "{parts[1]}" to an integer'
                     )
 
-        return cls(
-            name, data_type, char_size, numeric_precision, numeric_scale
-        )
+        return cls(name, data_type, char_size, numeric_precision, numeric_scale)

From 436c28c664c726570987dcc144caac69f176567c Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 23 Sep 2019 11:30:56 -0600
Subject: [PATCH 016/933] Mypy fixes

Added some type annotations
Clean up some mypy issues around the "available" decorators


automatic commit by git-black, original commits:
  2799a8c34d02c14f2384d875fb57abb690fed8f6
---
 core/dbt/adapters/base/impl.py       | 42 ++++++++--------------------
 core/dbt/adapters/base/meta.py       |  6 +++-
 core/dbt/utils.py                    |  4 +--
 third-party-stubs/agate/__init__.pyi |  4 +--
 4 files changed, 19 insertions(+), 37 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index d55de305d7a..c5c41538acd 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -488,9 +488,7 @@ def truncate_relation(self, relation: BaseRelation) -> None:
 
     @abc.abstractmethod
     @available.parse_none
-    def rename_relation(
-        self, from_relation: BaseRelation, to_relation: BaseRelation
-    ) -> None:
+    def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
         """Rename the relation from from_relation to to_relation.
 
         Implementors must call self.cache.rename() to preserve cache state.
@@ -501,7 +499,7 @@ def rename_relation(
 
     @abc.abstractmethod
     @available.parse_list
-    def get_columns_in_relation(
+    def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
         self, relation: BaseRelation
     ) -> List[BaseColumn]:
         """Get a list of the columns in the given Relation. """
@@ -510,9 +508,7 @@ def get_columns_in_relation(
         )
 
     @available.deprecated('get_columns_in_relation', lambda *a, **k: [])
-    def get_columns_in_table(
-        self, schema: str, identifier: str
-    ) -> List[BaseColumn]:
+    def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
         """DEPRECATED: Get a list of the columns in the given table."""
         relation = self.Relation.create(
             database=self.config.credentials.database,
@@ -523,9 +519,7 @@ def get_columns_in_table(
         return self.get_columns_in_relation(relation)
 
     @abc.abstractmethod
-    def expand_column_types(
-        self, goal: BaseRelation, current: BaseRelation
-    ) -> None:
+    def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None:
         """Expand the current table's types to match the goal table. (passable)
 
         :param self.Relation goal: A relation that currently exists in the
@@ -684,9 +678,7 @@ def list_relations(
 
         return relations
 
-    def _make_match_kwargs(
-        self, database: str, schema: str, identifier: str
-    ) -> Dict[str, str]:
+    def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dict[str, str]:
         quoting = self.config.quoting
         if identifier is not None and quoting['identifier'] is False:
             identifier = identifier.lower()
@@ -722,9 +714,7 @@ def _make_match(
         return matches
 
     @available.parse_none
-    def get_relation(
-        self, database: str, schema: str, identifier: str
-    ) -> Optional[BaseRelation]:
+    def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]:
         relations_list = self.list_relations(database, schema)
 
         matches = self._make_match(relations_list, database, schema,
@@ -824,9 +814,7 @@ def quote_seed_column(
     # converting agate types into their sql equivalents.
     ###
     @abc.abstractclassmethod
-    def convert_text_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         """Return the type in the database that best maps to the agate.Text
         type for the given agate table and column index.
 
@@ -838,9 +826,7 @@ def convert_text_type(
             '`convert_text_type` is not implemented for this adapter!')
 
     @abc.abstractclassmethod
-    def convert_number_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         """Return the type in the database that best maps to the agate.Number
         type for the given agate table and column index.
 
@@ -852,9 +838,7 @@ def convert_number_type(
             '`convert_number_type` is not implemented for this adapter!')
 
     @abc.abstractclassmethod
-    def convert_boolean_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         """Return the type in the database that best maps to the agate.Boolean
         type for the given agate table and column index.
 
@@ -866,9 +850,7 @@ def convert_boolean_type(
             '`convert_boolean_type` is not implemented for this adapter!')
 
     @abc.abstractclassmethod
-    def convert_datetime_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         """Return the type in the database that best maps to the agate.DateTime
         type for the given agate table and column index.
 
@@ -993,9 +975,7 @@ def execute_macro(
         return result
 
     @classmethod
-    def _catalog_filter_table(
-        cls, table: agate.Table, manifest: Manifest
-    ) -> agate.Table:
+    def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate.Table:
         """Filter the table as appropriate for catalog entries. Subclasses can
         override this to change filtering rules on a per-adapter basis.
         """
diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py
index 209240c0de7..602bf84dfeb 100644
--- a/core/dbt/adapters/base/meta.py
+++ b/core/dbt/adapters/base/meta.py
@@ -30,9 +30,11 @@ def my_other_method(self, a, b):
             x.update(big_expensive_db_query())
             return x
         """
+
         def inner(func):
             func._parse_replacement_ = parse_replacement
             return self(func)
+
         return inner
 
     def deprecated(
@@ -57,13 +59,14 @@ def my_old_slow_method(self, arg):
         The optional parse_replacement, if provided, will provide a parse-time
         replacement for the actual method (see `available.parse`).
         """
+
         def wrapper(func):
             func_name = func.__name__
             renamed_method(func_name, supported_name)
 
             @wraps(func)
             def inner(*args, **kwargs):
-                warn('adapter:{}'.format(func_name))
+                warn("adapter:{}".format(func_name))
                 return func(*args, **kwargs)
 
             if parse_replacement:
@@ -71,6 +74,7 @@ def inner(*args, **kwargs):
             else:
                 available_function = self
             return available_function(inner)
+
         return wrapper
 
     def parse_none(self, func: Callable) -> Callable:
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index 4601b6d5c2e..d56b0bdd062 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -298,8 +298,8 @@ def __get__(self, obj, objtype):
         return functools.partial(self.__call__, obj)
 
 
-K_T = TypeVar('K_T')
-V_T = TypeVar('V_T')
+K_T = TypeVar("K_T")
+V_T = TypeVar("V_T")
 
 
 def filter_null_values(input: Dict[K_T, Optional[V_T]]) -> Dict[K_T, V_T]:
diff --git a/third-party-stubs/agate/__init__.pyi b/third-party-stubs/agate/__init__.pyi
index ce27e1fb175..54b020754d5 100644
--- a/third-party-stubs/agate/__init__.pyi
+++ b/third-party-stubs/agate/__init__.pyi
@@ -12,7 +12,6 @@ from .data_types import (
     TimeDelta as TimeDelta,
 )
 
-
 class MappedSequence(Sequence):
     def __init__(self, values: Any, keys: Optional[Any] = ...) -> None: ...
     def __unicode__(self): ...
@@ -29,7 +28,6 @@ class MappedSequence(Sequence):
     def get(self, key: Any, default: Optional[Any] = ...): ...
     def dict(self): ...
 
-
 class Row(MappedSequence): ...
 
 
@@ -50,7 +48,7 @@ class Table:
     def rows(self): ...
     def print_csv(self, **kwargs: Any) -> None: ...
     def print_json(self, **kwargs: Any) -> None: ...
-    def where(self, test: Callable[[Row], bool]) -> 'Table': ...
+    def where(self, test: Callable[[Row], bool]) -> "Table": ...
     def select(self, key: Union[Iterable[str], str]) -> 'Table': ...
     # these definitions are much narrower than what's actually accepted
     @classmethod

From 3d33d45bc626bd1038136769ead155da22127248 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 17 Dec 2019 11:13:33 -0700
Subject: [PATCH 017/933] Fix mypy checking

Make mypy check our nested namespace packages by putting dbt in the mypy_path.
Fix a number of exposed mypy/type checker complaints. The checker mostly
passes now even if you add `--check-untyped-defs`, though there are a couple lingering issues so I'll leave that out of CI
Change the return type of RunOperation a bit - adds a couple fields to appease mypy

Also, bump the mypy version (it catches a few more issues).


automatic commit by git-black, original commits:
  9cc7a7a87fcfdc2c558b91c9316efb22bf3fdb27
---
 core/dbt/adapters/base/impl.py                |  8 ++-----
 core/dbt/adapters/base/meta.py                |  4 +---
 core/dbt/adapters/base/query_headers.py       |  4 ++--
 core/dbt/adapters/base/relation.py            |  2 +-
 core/dbt/adapters/cache.py                    | 12 ++++++----
 core/dbt/adapters/factory.py                  |  2 +-
 core/dbt/clients/jinja.py                     |  4 ++--
 core/dbt/clients/system.py                    | 24 ++++++-------------
 core/dbt/logger.py                            |  7 ++----
 core/dbt/main.py                              |  4 ++--
 core/dbt/semver.py                            | 18 ++++++--------
 core/dbt/task/generate.py                     |  8 ++-----
 core/dbt/task/list.py                         |  4 +---
 core/dbt/task/run.py                          |  4 +---
 core/dbt/task/run_operation.py                |  2 +-
 core/dbt/task/runnable.py                     | 22 ++++++-----------
 core/dbt/tracking.py                          | 20 +++++++---------
 third-party-stubs/agate/__init__.pyi          | 10 +++++---
 .../snowplow_tracker/__init__.pyi             | 12 ++++++++--
 19 files changed, 72 insertions(+), 99 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index c5c41538acd..247c97eb844 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -887,15 +887,11 @@ def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
 
     @available
     @classmethod
-    def convert_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> Optional[str]:
+    def convert_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
         return cls.convert_agate_type(agate_table, col_idx)
 
     @classmethod
-    def convert_agate_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> Optional[str]:
+    def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
         agate_type: Type = agate_table.column_types[col_idx]
         conversions: List[Tuple[Type, Callable[..., str]]] = [
             (agate.Text, cls.convert_text_type),
diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py
index 602bf84dfeb..c03d0d54245 100644
--- a/core/dbt/adapters/base/meta.py
+++ b/core/dbt/adapters/base/meta.py
@@ -99,9 +99,7 @@ def __new__(mcls, name, bases, namespace, **kwargs):
         # I'm not sure there is any benefit to it after poking around a bit,
         # but having it doesn't hurt on the python side (and omitting it could
         # hurt for obscure metaclass reasons, for all I know)
-        cls = abc.ABCMeta.__new__(  # type: ignore
-            mcls, name, bases, namespace, **kwargs
-        )
+        cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)  # type: ignore
 
         # this is very much inspired by ABCMeta's own implementation
 
diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py
index 49c564ffbe4..2f7171936bc 100644
--- a/core/dbt/adapters/base/query_headers.py
+++ b/core/dbt/adapters/base/query_headers.py
@@ -44,7 +44,7 @@ def add(self, sql: str) -> str:
         return '/* {} */\n{}'.format(self.query_comment.strip(), sql)
 
     def set(self, comment: Optional[str], append: bool):
-        if isinstance(comment, str) and '*/' in comment:
+        if isinstance(comment, str) and "*/" in comment:
             # tell the user "no" so they don't hurt themselves by writing
             # garbage
             raise RuntimeException(
@@ -69,7 +69,7 @@ def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
             assert isinstance(comment_macro, str)
             macro = '\n'.join((
                 '{%- macro query_comment_macro(connection_name, node) -%}',
-                comment_macro,
+                    "{%- macro query_comment_macro(connection_name, node) -%}",
                 '{% endmacro %}'
             ))
             ctx = self._get_context()
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 65d0679616f..bf4d6871905 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -345,7 +345,7 @@ def get_relation_type(cls) -> Type[RelationType]:
         return RelationType
 
 
-Info = TypeVar('Info', bound='InformationSchema')
+Info = TypeVar("Info", bound="InformationSchema")
 
 
 @dataclass(frozen=True, eq=False, repr=False)
diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index 1aa3805fb00..cd9d3b9369b 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -176,7 +176,9 @@ def __init__(self) -> None:
         self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
 
     def add_schema(
-        self, database: Optional[str], schema: Optional[str],
+        self,
+        database: Optional[str],
+        schema: Optional[str],
     ) -> None:
         """Add a schema to the set of known schemas (case-insensitive)
 
@@ -186,7 +188,9 @@ def add_schema(
         self.schemas.add((lowercase(database), lowercase(schema)))
 
     def drop_schema(
-        self, database: Optional[str], schema: Optional[str],
+        self,
+        database: Optional[str],
+        schema: Optional[str],
     ) -> None:
         """Drop the given schema and remove it from the set of known schemas.
 
@@ -451,9 +455,7 @@ def rename(self, old, new):
 
         fire_event(DumpAfterRenameSchema(dump=Lazy.defer(lambda: self.dump_graph())))
 
-    def get_relations(
-        self, database: Optional[str], schema: Optional[str]
-    ) -> List[Any]:
+    def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
         """Case-insensitively yield all relations matching the given schema.
 
         :param str schema: The case-insensitive schema name to list from.
diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index 64021f10536..39f51cb797b 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -61,7 +61,7 @@ def load_plugin(self, name: str) -> Type[Credentials]:
         # singletons
         try:
             # mypy doesn't think modules have any attributes.
-            mod: Any = import_module('.' + name, 'dbt.adapters')
+            mod: Any = import_module("." + name, "dbt.adapters")
         except ModuleNotFoundError as exc:
             # if we failed to import the target module in particular, inform
             # the user about it via a runtime error
diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index 0ba25237336..5c80d100385 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -51,8 +51,8 @@ def _linecache_inject(source, write):
     else:
         # `codecs.encode` actually takes a `bytes` as the first argument if
         # the second argument is 'hex' - mypy does not know this.
-        rnd = codecs.encode(os.urandom(12), 'hex')  # type: ignore
-        filename = rnd.decode('ascii')
+        rnd = codecs.encode(os.urandom(12), "hex")  # type: ignore
+        filename = rnd.decode("ascii")
 
     # put ourselves in the cache
     cache_entry = (
diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py
index da7793a0ec8..df77310b305 100644
--- a/core/dbt/clients/system.py
+++ b/core/dbt/clients/system.py
@@ -111,7 +111,7 @@ def make_directory(path: str) -> None:
                 raise e
 
 
-def make_file(path: str, contents: str = '', overwrite: bool = False) -> bool:
+def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool:
     """
     Make a file at `path` assuming that the directory it resides in already
     exists. The file is saved with contents `contents`
@@ -139,7 +139,7 @@ def supports_symlinks() -> bool:
     return getattr(os, "symlink", None) is not None
 
 
-def write_file(path: str, contents: str = '') -> bool:
+def write_file(path: str, contents: str = "") -> bool:
     path = convert_path(path)
     try:
         make_directory(os.path.dirname(path))
@@ -177,9 +177,7 @@ def write_json(path: str, data: Dict[str, Any]) -> bool:
     return write_file(path, json.dumps(data, cls=dbt.utils.JSONEncoder))
 
 
-def _windows_rmdir_readonly(
-    func: Callable[[str], Any], path: str, exc: Tuple[Any, OSError, Any]
-):
+def _windows_rmdir_readonly(func: Callable[[str], Any], path: str, exc: Tuple[Any, OSError, Any]):
     exception_val = exc[1]
     if exception_val.errno == errno.EACCES:
         os.chmod(path, stat.S_IWUSR)
@@ -316,9 +314,7 @@ def open_dir_cmd() -> str:
         return 'xdg-open'
 
 
-def _handle_posix_cwd_error(
-    exc: OSError, cwd: str, cmd: List[str]
-) -> NoReturn:
+def _handle_posix_cwd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
     if exc.errno == errno.ENOENT:
         message = 'Directory does not exist'
     elif exc.errno == errno.EACCES:
@@ -330,9 +326,7 @@ def _handle_posix_cwd_error(
     raise dbt.exceptions.WorkingDirectoryError(cwd, cmd, message)
 
 
-def _handle_posix_cmd_error(
-    exc: OSError, cwd: str, cmd: List[str]
-) -> NoReturn:
+def _handle_posix_cmd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
     if exc.errno == errno.ENOENT:
         message = "Could not find command, ensure it is in the user's PATH"
     elif exc.errno == errno.EACCES:
@@ -408,9 +402,7 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
     )
 
 
-def run_cmd(
-    cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None
-) -> Tuple[bytes, bytes]:
+def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> Tuple[bytes, bytes]:
     fire_event(SystemExecutingCmd(cmd=cmd))
     if len(cmd) == 0:
         raise dbt.exceptions.CommandError(cwd, cmd)
@@ -480,9 +472,7 @@ def rename(from_path: str, to_path: str, force: bool = False) -> None:
     shutil.move(from_path, to_path)
 
 
-def untar_package(
-    tar_path: str, dest_dir: str, rename_to: Optional[str] = None
-) -> None:
+def untar_package(tar_path: str, dest_dir: str, rename_to: Optional[str] = None) -> None:
     tar_path = convert_path(tar_path)
     tar_dir_name = None
     with tarfile.open(tar_path, 'r:gz') as tarball:
diff --git a/core/dbt/logger.py b/core/dbt/logger.py
index 604130b3100..e4fa57ddc1f 100644
--- a/core/dbt/logger.py
+++ b/core/dbt/logger.py
@@ -126,9 +126,7 @@ def format_text(self):
         self.format_string = self._text_format_string
 
     def reset(self):
-        raise NotImplementedError(
-            'reset() not implemented in FormatterMixin subclass'
-        )
+        raise NotImplementedError("reset() not implemented in FormatterMixin subclass")
 
 
 class OutputHandler(logbook.StreamHandler, FormatterMixin):
@@ -444,8 +442,7 @@ def _super_init(self, log_path):
         FormatterMixin.__init__(self, DEBUG_LOG_FORMAT)
 
     def _replay_buffered(self):
-        assert self._msg_buffer is not None, \
-            '_msg_buffer should never be None in _replay_buffered'
+        assert self._msg_buffer is not None, "_msg_buffer should never be None in _replay_buffered"
         for record in self._msg_buffer:
             super().emit(record)
         self._msg_buffer = None
diff --git a/core/dbt/main.py b/core/dbt/main.py
index 92d2e5a7e9e..9ac0037f78b 100644
--- a/core/dbt/main.py
+++ b/core/dbt/main.py
@@ -804,9 +804,9 @@ def _build_list_subparser(subparsers, base_subparser):
         aliases=['ls'],
     )
     sub.set_defaults(cls=list_task.ListTask, which='list', rpc_method=None)
-    resource_values: List[str] = [
+    resource_values: List[str] = [str(s) for s in list_task.ListTask.ALL_RESOURCE_VALUES] + [
         str(s) for s in list_task.ListTask.ALL_RESOURCE_VALUES
-    ] + ['default', 'all']
+        "all",
     sub.add_argument('--resource-type',
                      choices=resource_values,
                      action='append',
diff --git a/core/dbt/semver.py b/core/dbt/semver.py
index 39c9c133fae..c3c5acec37b 100644
--- a/core/dbt/semver.py
+++ b/core/dbt/semver.py
@@ -224,9 +224,7 @@ def _try_combine_exact(self, a, b):
     def _try_combine_lower_bound_with_exact(self, lower, exact):
         comparison = lower.compare(exact)
 
-        if (comparison < 0 or
-            (comparison == 0 and
-             lower.matcher == Matchers.GREATER_THAN_OR_EQUAL)):
+        if comparison < 0 or (comparison == 0 and lower.matcher == Matchers.GREATER_THAN_OR_EQUAL):
             return exact
 
         raise VersionsNotCompatibleException()
@@ -238,7 +236,7 @@ def _try_combine_lower_bound(self, a, b):
             return b
 
         if not (a.is_exact or b.is_exact):
-            comparison = (a.compare(b) < 0)
+            comparison = a.compare(b) < 0
 
             if comparison:
                 return b
@@ -254,9 +252,7 @@ def _try_combine_lower_bound(self, a, b):
     def _try_combine_upper_bound_with_exact(self, upper, exact):
         comparison = upper.compare(exact)
 
-        if (comparison > 0 or
-            (comparison == 0 and
-             upper.matcher == Matchers.LESS_THAN_OR_EQUAL)):
+        if comparison > 0 or (comparison == 0 and upper.matcher == Matchers.LESS_THAN_OR_EQUAL):
             return exact
 
         raise VersionsNotCompatibleException()
@@ -268,7 +264,7 @@ def _try_combine_upper_bound(self, a, b):
             return b
 
         if not (a.is_exact or b.is_exact):
-            comparison = (a.compare(b) > 0)
+            comparison = a.compare(b) > 0
 
             if comparison:
                 return b
@@ -284,7 +280,7 @@ def _try_combine_upper_bound(self, a, b):
     def reduce(self, other):
         start = None
 
-        if(self.start.is_exact and other.start.is_exact):
+        if self.start.is_exact and other.start.is_exact:
             start = end = self._try_combine_exact(self.start, other.start)
 
         else:
@@ -300,7 +296,7 @@ def __str__(self):
         result = []
 
         if self.start.is_unbounded and self.end.is_unbounded:
-            return 'ANY'
+            return "ANY"
 
         if not self.start.is_unbounded:
             result.append(self.start.to_version_string())
@@ -308,7 +304,7 @@ def __str__(self):
         if not self.end.is_unbounded:
             result.append(self.end.to_version_string())
 
-        return ', '.join(result)
+        return ", ".join(result)
 
     def to_version_string_pair(self):
         to_return = []
diff --git a/core/dbt/task/generate.py b/core/dbt/task/generate.py
index e405f54984a..43a854417f2 100644
--- a/core/dbt/task/generate.py
+++ b/core/dbt/task/generate.py
@@ -196,9 +196,7 @@ def get_unique_id_mapping(
 class GenerateTask(CompileTask):
     def _get_manifest(self) -> Manifest:
         if self.manifest is None:
-            raise InternalException(
-                'manifest should not be None in _get_manifest'
-            )
+            raise InternalException("manifest should not be None in _get_manifest")
         return self.manifest
 
     def run(self) -> CatalogArtifact:
@@ -233,9 +231,7 @@ def run(self) -> CatalogArtifact:
                     to_asset_path)
 
         if self.manifest is None:
-            raise InternalException(
-                'self.manifest was None in run!'
-            )
+            raise InternalException("self.manifest was None in run!")
 
         adapter = get_adapter(self.config)
         with adapter.connection_named('generate_catalog'):
diff --git a/core/dbt/task/list.py b/core/dbt/task/list.py
index 741022c8033..67d153f0ea3 100644
--- a/core/dbt/task/list.py
+++ b/core/dbt/task/list.py
@@ -77,9 +77,7 @@ def _iterate_selected_nodes(self):
             warn_or_error('No nodes selected!')
             return
         if self.manifest is None:
-            raise InternalException(
-                'manifest is None in _iterate_selected_nodes'
-            )
+            raise InternalException("manifest is None in _iterate_selected_nodes")
         for node in nodes:
             if node in self.manifest.nodes:
                 yield self.manifest.nodes[node]
diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py
index 1c50e320b55..6151cc8608e 100644
--- a/core/dbt/task/run.py
+++ b/core/dbt/task/run.py
@@ -313,9 +313,7 @@ def get_hooks_by_type(
     ) -> List[ParsedHookNode]:
 
         if self.manifest is None:
-            raise InternalException(
-                'self.manifest was None in get_hooks_by_type'
-            )
+            raise InternalException("self.manifest was None in get_hooks_by_type")
 
         nodes = self.manifest.nodes.values()
         # find all hooks defined in the manifest (could be multiple projects)
diff --git a/core/dbt/task/run_operation.py b/core/dbt/task/run_operation.py
index 3c0dfe9c55f..b36a96a4b33 100644
--- a/core/dbt/task/run_operation.py
+++ b/core/dbt/task/run_operation.py
@@ -31,7 +31,7 @@ def _get_kwargs(self) -> Dict[str, Any]:
 
     def compile_manifest(self) -> None:
         if self.manifest is None:
-            raise InternalException('manifest was None in compile_manifest')
+            raise InternalException("manifest was None in compile_manifest")
 
     def _run_unsafe(self) -> agate.Table:
         adapter = get_adapter(self.config)
diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py
index 95e831de479..68f7c67716e 100644
--- a/core/dbt/task/runnable.py
+++ b/core/dbt/task/runnable.py
@@ -82,9 +82,7 @@ def load_manifest(self):
 
     def compile_manifest(self):
         if self.manifest is None:
-            raise InternalException(
-                'compile_manifest called before manifest was loaded'
-            )
+            raise InternalException("compile_manifest called before manifest was loaded")
         adapter = get_adapter(self.config)
         compiler = adapter.get_compiler()
         self.graph = compiler.compile(self.manifest)
@@ -182,7 +180,7 @@ def raise_on_first_error(self):
         return False
 
     def get_runner_type(self, node):
-        raise NotImplementedException('Not Implemented')
+        raise NotImplementedException("Not Implemented")
 
     def result_path(self):
         return os.path.join(self.config.target_path, RESULT_FILE_NAME)
@@ -279,9 +277,7 @@ def run_queue(self, pool):
         """Given a pool, submit jobs from the queue to the pool.
         """
         if self.job_queue is None:
-            raise InternalException(
-                'Got to run_queue with no job queue set'
-            )
+            raise InternalException("Got to run_queue with no job queue set")
 
         def callback(result):
             """Note: mark_done, at a minimum, must happen here or dbt will
@@ -290,9 +286,7 @@ def callback(result):
             self._handle_result(result)
 
             if self.job_queue is None:
-                raise InternalException(
-                    'Got to run_queue callback with no job queue set'
-                )
+                raise InternalException("Got to run_queue callback with no job queue set")
             self.job_queue.mark_done(result.node.unique_id)
 
         while not self.job_queue.empty():
@@ -334,7 +328,7 @@ def _handle_result(self, result):
         node = result.node
 
         if self.manifest is None:
-            raise InternalException('manifest was None in _handle_result')
+            raise InternalException("manifest was None in _handle_result")
 
         if isinstance(node, ParsedSourceDefinition):
             self.manifest.update_source(node)
@@ -453,9 +447,7 @@ def run(self):
         self._runtime_initialize()
 
         if self._flattened_nodes is None:
-            raise InternalException(
-                'after _runtime_initialize, _flattened_nodes was still None'
-            )
+            raise InternalException("after _runtime_initialize, _flattened_nodes was still None")
 
         if len(self._flattened_nodes) == 0:
             with TextOnly():
@@ -499,7 +491,7 @@ def get_model_schemas(
         self, adapter, selected_uids: Iterable[str]
     ) -> Set[BaseRelation]:
         if self.manifest is None:
-            raise InternalException('manifest was None in get_model_schemas')
+            raise InternalException("manifest was None in get_model_schemas")
         result: Set[BaseRelation] = set()
 
         for node in self.manifest.nodes.values():
diff --git a/core/dbt/tracking.py b/core/dbt/tracking.py
index aa9a6f0dd46..705d4477de1 100644
--- a/core/dbt/tracking.py
+++ b/core/dbt/tracking.py
@@ -315,8 +315,7 @@ def track_resource_counts(resource_counts):
 
 def track_model_run(options):
     context = [SelfDescribingJson(RUN_MODEL_SPEC, options)]
-    assert active_user is not None, \
-        'Cannot track model runs when active user is None'
+    assert active_user is not None, "Cannot track model runs when active user is None"
 
     track(
         active_user,
@@ -329,8 +328,7 @@ def track_model_run(options):
 
 def track_rpc_request(options):
     context = [SelfDescribingJson(RPC_REQUEST_SPEC, options)]
-    assert active_user is not None, \
-        'Cannot track rpc requests when active user is None'
+    assert active_user is not None, "Cannot track rpc requests when active user is None"
 
     track(
         active_user,
@@ -342,8 +340,7 @@ def track_rpc_request(options):
 
 
 def track_package_install(config, args, options):
-    assert active_user is not None, \
-        'Cannot track package installs when active user is None'
+    assert active_user is not None, "Cannot track package installs when active user is None"
 
     invocation_data = get_invocation_context(active_user, config, args)
 
@@ -391,8 +388,7 @@ def track_invocation_end(
         get_dbt_env_context()
     ]
 
-    assert active_user is not None, \
-        'Cannot track invocation end when active user is None'
+    assert active_user is not None, "Cannot track invocation end when active user is None"
 
     track(
         active_user,
@@ -497,10 +493,12 @@ def __init__(self):
 
     def process(self, record):
         if active_user is not None:
-            record.extra.update({
-                "run_started_at": active_user.run_started_at.isoformat(),
+            record.extra.update(
+                {
                 "invocation_id": get_invocation_id(),
-            })
+                    "invocation_id": get_invocation_id(),
+                }
+            )
 
 
 def initialize_from_flags():
diff --git a/third-party-stubs/agate/__init__.pyi b/third-party-stubs/agate/__init__.pyi
index 54b020754d5..92bd711003c 100644
--- a/third-party-stubs/agate/__init__.pyi
+++ b/third-party-stubs/agate/__init__.pyi
@@ -49,12 +49,16 @@ class Table:
     def print_csv(self, **kwargs: Any) -> None: ...
     def print_json(self, **kwargs: Any) -> None: ...
     def where(self, test: Callable[[Row], bool]) -> "Table": ...
-    def select(self, key: Union[Iterable[str], str]) -> 'Table': ...
+    def select(self, key: Union[Iterable[str], str]) -> "Table": ...
     # these definitions are much narrower than what's actually accepted
     @classmethod
-    def from_object(cls, obj: Iterable[Dict[str, Any]], *, column_types: Optional['TypeTester'] = None) -> 'Table': ...
+    def from_object(
+        cls, obj: Iterable[Dict[str, Any]], *, column_types: Optional["TypeTester"] = None
+    ) -> "Table": ...
     @classmethod
-    def from_csv(cls, path: Iterable[str], *, column_types: Optional['TypeTester'] = None) -> 'Table': ...
+    def from_csv(
+        cls, path: Iterable[str], *, column_types: Optional["TypeTester"] = None
+    ) -> "Table": ...
     @classmethod
     def merge(cls, tables: Iterable['Table']) -> 'Table': ...
     def rename(self, column_names: Optional[Iterable[str]] = None, row_names: Optional[Any] = None, slug_columns: bool = False, slug_rows: bool=False, **kwargs: Any) -> 'Table': ...
diff --git a/third-party-stubs/snowplow_tracker/__init__.pyi b/third-party-stubs/snowplow_tracker/__init__.pyi
index de00975641b..8f7a2dab079 100644
--- a/third-party-stubs/snowplow_tracker/__init__.pyi
+++ b/third-party-stubs/snowplow_tracker/__init__.pyi
@@ -1,7 +1,6 @@
 import logging
 from typing import Union, Optional, List, Any, Dict
 
-
 class Subject:
     def __init__(self) -> None: ...
     def set_platform(self, value: Any): ...
@@ -35,7 +34,16 @@ class Tracker:
 
     def __init__(self, emitters: Union[List[Any], Any], subject: Optional[Subject] = ..., namespace: Optional[str] = ..., app_id: Optional[str] = ..., encode_base64: bool = ...) -> None: ...
     def set_subject(self, subject: Optional[Subject]): ...
-    def track_struct_event(self, category: str, action: str, label: Optional[str] = None, property_: Optional[str] = None, value: Optional[float] = None, context: Optional[List[Any]] = None, tstamp: Optional[Any] = None): ...
+    def track_struct_event(
+        self,
+        category: str,
+        action: str,
+        label: Optional[str] = None,
+        property_: Optional[str] = None,
+        value: Optional[float] = None,
+        context: Optional[List[Any]] = None,
+        tstamp: Optional[Any] = None,
+    ): ...
     def flush(self, asynchronous: bool = False): ...
 
 

From 562b4b40c6504b4be976614a1ea4016536837eff Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Mar 2019 21:08:38 -0700
Subject: [PATCH 018/933] per-thread connections

parsing now always opens a connection, instead of waiting to need it
remove model_name/available_raw/etc


automatic commit by git-black, original commits:
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/base/connections.py | 9 +++++----
 core/dbt/adapters/base/impl.py        | 2 +-
 core/dbt/adapters/base/meta.py        | 2 +-
 core/dbt/adapters/sql/connections.py  | 4 ++--
 core/dbt/adapters/sql/impl.py         | 3 +--
 core/dbt/clients/system.py            | 4 ++--
 core/dbt/task/generate.py             | 2 +-
 core/dbt/task/run.py                  | 2 +-
 8 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 73fca9643d0..a3639d98407 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -1,5 +1,6 @@
 import abc
 import os
+
 # multiprocessing.RLock is a function returning this type
 from multiprocessing.synchronize import RLock
 from threading import get_ident
@@ -139,7 +140,7 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
                 state=ConnectionState.INIT,
                 transaction_open=False,
                 handle=None,
-                credentials=self.profile.credentials
+                credentials=self.profile.credentials,
             )
             self.set_thread_connection(conn)
 
@@ -148,7 +149,7 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
 
         fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE))
 
-        if conn.state == 'open':
+        if conn.state == "open":
             fire_event(ConnectionReused(conn_name=conn_name))
         else:
             conn.handle = LazyHandle(self.open)
@@ -195,7 +196,7 @@ def release(self) -> None:
     def cleanup_all(self) -> None:
         with self.lock:
             for connection in self.thread_connections.values():
-                if connection.state not in {'closed', 'init'}:
+                if connection.state not in {"closed", "init"}:
                     fire_event(ConnectionLeftOpen(conn_name=connection.name))
                 else:
                     fire_event(ConnectionClosed(conn_name=connection.name))
@@ -230,7 +231,7 @@ def _rollback_handle(cls, connection: Connection) -> None:
     def _close_handle(cls, connection: Connection) -> None:
         """Perform the actual close operation."""
         # On windows, sometimes connection handles don't have a close() attr.
-        if hasattr(connection.handle, 'close'):
+        if hasattr(connection.handle, "close"):
             fire_event(ConnectionClosed2(conn_name=connection.name))
             connection.handle.close()
         else:
diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 247c97eb844..a557c283f90 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -184,7 +184,7 @@ def debug_query(self) -> None:
     def nice_connection_name(self) -> str:
         conn = self.connections.get_if_exists()
         if conn is None or conn.name is None:
-            return ''
+            return ""
         return conn.name
 
     @contextmanager
diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py
index c03d0d54245..12922768aca 100644
--- a/core/dbt/adapters/base/meta.py
+++ b/core/dbt/adapters/base/meta.py
@@ -111,7 +111,7 @@ def __new__(mcls, name, bases, namespace, **kwargs):
 
         # collect base class data first
         for base in bases:
-            available.update(getattr(base, '_available_', set()))
+            available.update(getattr(base, "_available_", set()))
             replacements.update(getattr(base, '_parse_replacements_', set()))
 
         # override with local data if it exists
diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 891e3b90d69..81ea5af68ff 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -131,10 +131,10 @@ def execute(
         return response, table
 
     def add_begin_query(self):
-        return self.add_query('BEGIN', auto_begin=False)
+        return self.add_query("BEGIN", auto_begin=False)
 
     def add_commit_query(self):
-        return self.add_query('COMMIT', auto_begin=False)
+        return self.add_query("COMMIT", auto_begin=False)
 
     def begin(self):
         connection = self.get_thread_connection()
diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index 36de954dd09..c49399cc928 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -63,8 +63,7 @@ def add_query(
         :param abridge_sql_log: If set, limit the raw sql logged to 512
             characters
         """
-        return self.connections.add_query(sql, auto_begin, bindings,
-                                          abridge_sql_log)
+        return self.connections.add_query(sql, auto_begin, bindings, abridge_sql_log)
 
     @classmethod
     def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py
index df77310b305..5821d798532 100644
--- a/core/dbt/clients/system.py
+++ b/core/dbt/clients/system.py
@@ -378,7 +378,7 @@ def _handle_windows_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
         cls = dbt.exceptions.WorkingDirectoryError
     else:
         message = 'Unknown error: {} (errno={}: "{}")'.format(
-            str(exc), exc.errno, errno.errorcode.get(exc.errno, '')
+            str(exc), exc.errno, errno.errorcode.get(exc.errno, "")
         )
     raise cls(cwd, cmd, message)
 
@@ -529,7 +529,7 @@ def move(src, dst):
     except OSError:
         # probably different drives
         if os.path.isdir(src):
-            if _absnorm(dst + '\\').startswith(_absnorm(src + '\\')):
+            if _absnorm(dst + "\\").startswith(_absnorm(src + "\\")):
                 # dst is inside src
                 raise EnvironmentError(
                     "Cannot move a directory '{}' into itself '{}'"
diff --git a/core/dbt/task/generate.py b/core/dbt/task/generate.py
index 43a854417f2..d6bd952d454 100644
--- a/core/dbt/task/generate.py
+++ b/core/dbt/task/generate.py
@@ -234,7 +234,7 @@ def run(self) -> CatalogArtifact:
             raise InternalException("self.manifest was None in run!")
 
         adapter = get_adapter(self.config)
-        with adapter.connection_named('generate_catalog'):
+        with adapter.connection_named("generate_catalog"):
             fire_event(BuildingCatalog())
             catalog_table, exceptions = adapter.get_catalog(self.manifest)
 
diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py
index 6151cc8608e..4a0cc12df4e 100644
--- a/core/dbt/task/run.py
+++ b/core/dbt/task/run.py
@@ -449,7 +449,7 @@ def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]):
         self.write_manifest()
 
     def before_run(self, adapter, selected_uids: AbstractSet[str]):
-        with adapter.connection_named('master'):
+        with adapter.connection_named("master"):
             self.create_schemas(adapter, selected_uids)
             self.populate_adapter_cache(adapter)
             self.defer_to_manifest(adapter, selected_uids)

From 9966d46bb09365d2f1b6355513247d5c3faaabc2 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 24 Apr 2019 10:44:44 -0600
Subject: [PATCH 019/933] create a decorator for stubbing out methods at parse
 time

Includes some unit tests
Update integration tests to handle the fact that sometimes we now fail at runtime


automatic commit by git-black, original commits:
  0f1c154a1a38352cd307cd21dccaf70b3043cb27
---
 core/dbt/adapters/base/impl.py | 4 ++--
 core/dbt/adapters/base/meta.py | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index a557c283f90..ac42c099cec 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -507,7 +507,7 @@ def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
             '`get_columns_in_relation` is not implemented for this adapter!'
         )
 
-    @available.deprecated('get_columns_in_relation', lambda *a, **k: [])
+    @available.deprecated("get_columns_in_relation", lambda *a, **k: [])
     def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
         """DEPRECATED: Get a list of the columns in the given table."""
         relation = self.Relation.create(
@@ -735,7 +735,7 @@ def get_relation(self, database: str, schema: str, identifier: str) -> Optional[
 
         return None
 
-    @available.deprecated('get_relation', lambda *a, **k: False)
+    @available.deprecated("get_relation", lambda *a, **k: False)
     def already_exists(self, schema: str, name: str) -> bool:
         """DEPRECATED: Return if a model already exists in the database"""
         database = self.config.credentials.database
diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py
index 12922768aca..b3918c4d29c 100644
--- a/core/dbt/adapters/base/meta.py
+++ b/core/dbt/adapters/base/meta.py
@@ -112,13 +112,13 @@ def __new__(mcls, name, bases, namespace, **kwargs):
         # collect base class data first
         for base in bases:
             available.update(getattr(base, "_available_", set()))
-            replacements.update(getattr(base, '_parse_replacements_', set()))
+            replacements.update(getattr(base, "_parse_replacements_", set()))
 
         # override with local data if it exists
         for name, value in namespace.items():
             if getattr(value, '_is_available_', False):
                 available.add(name)
-            parse_replacement = getattr(value, '_parse_replacement_', None)
+            parse_replacement = getattr(value, "_parse_replacement_", None)
             if parse_replacement is not None:
                 replacements[name] = parse_replacement
 

From 0519cc7e4e154bc57e1ea04c3b3abc09b013ba55 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 9 Oct 2018 06:46:18 -0600
Subject: [PATCH 020/933] Split out connection managers

automatic commit by git-black, original commits:
  9ffbb3ad02ab322694bf6ff33b08517d31f7920e
---
 core/dbt/adapters/base/connections.py         | 16 +++++++-------
 core/dbt/adapters/base/impl.py                | 13 +++---------
 core/dbt/adapters/base/meta.py                |  2 +-
 core/dbt/adapters/sql/connections.py          |  6 ++++--
 .../dbt/adapters/postgres/connections.py      | 21 ++++++++++---------
 5 files changed, 27 insertions(+), 31 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index a3639d98407..0e248937e8d 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -46,6 +46,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
     You must also set the 'TYPE' class attribute with a class-unique constant
     string.
     """
+
     TYPE: str = NotImplemented
 
     def __init__(self, profile: AdapterRequiredConfig):
@@ -116,7 +117,8 @@ def exception_handler(self, sql: str) -> ContextManager:
             underlying database.
         """
         raise dbt.exceptions.NotImplementedException(
-            '`exception_handler` is not implemented for this adapter!')
+            "`exception_handler` is not implemented for this adapter!"
+        )
 
     def set_connection_name(self, name: Optional[str] = None) -> Connection:
         conn_name: str
@@ -161,7 +163,7 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
     def cancel_open(self) -> Optional[List[str]]:
         """Cancel all open connections on the adapter. (passable)"""
         raise dbt.exceptions.NotImplementedException(
-            '`cancel_open` is not implemented for this adapter!'
+            "`cancel_open` is not implemented for this adapter!"
         )
 
     @abc.abstractclassmethod
@@ -174,9 +176,7 @@ def open(cls, connection: Connection) -> Connection:
         This should be thread-safe, or hold the lock if necessary. The given
         connection should not be in either in_use or available.
         """
-        raise dbt.exceptions.NotImplementedException(
-            '`open` is not implemented for this adapter!'
-        )
+        raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!")
 
     def release(self) -> None:
         with self.lock:
@@ -209,14 +209,14 @@ def cleanup_all(self) -> None:
     def begin(self) -> None:
         """Begin a transaction. (passable)"""
         raise dbt.exceptions.NotImplementedException(
-            '`begin` is not implemented for this adapter!'
+            "`begin` is not implemented for this adapter!"
         )
 
     @abc.abstractmethod
     def commit(self) -> None:
         """Commit a transaction. (passable)"""
         raise dbt.exceptions.NotImplementedException(
-            '`commit` is not implemented for this adapter!'
+            "`commit` is not implemented for this adapter!"
         )
 
     @classmethod
@@ -292,5 +292,5 @@ def execute(
         :rtype: Tuple[Union[str, AdapterResponse], agate.Table]
         """
         raise dbt.exceptions.NotImplementedException(
-            '`execute` is not implemented for this adapter!'
+            "`execute` is not implemented for this adapter!"
         )
diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index ac42c099cec..308a5a57783 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -222,11 +222,7 @@ def execute(
         :return: A tuple of the status and the results (empty if fetch=False).
         :rtype: Tuple[Union[str, AdapterResponse], agate.Table]
         """
-        return self.connections.execute(
-            sql=sql,
-            auto_begin=auto_begin,
-            fetch=fetch
-        )
+        return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
 
     @available.parse(lambda *a, **k: ('', empty_table()))
     def get_partitions_metadata(
@@ -389,7 +385,7 @@ def cache_added(self, relation: Optional[BaseRelation]) -> str:
             )
         self.cache.add(relation)
         # so jinja doesn't render things
-        return ''
+        return ""
 
     @available
     def cache_dropped(self, relation: Optional[BaseRelation]) -> str:
@@ -585,10 +581,7 @@ def get_missing_columns(
 
         missing_columns = set(from_columns.keys()) - set(to_columns.keys())
 
-        return [
-            col for (col_name, col) in from_columns.items()
-            if col_name in missing_columns
-        ]
+        return [col for (col_name, col) in from_columns.items() if col_name in missing_columns]
 
     @available.parse_none
     def valid_snapshot_target(self, relation: BaseRelation) -> None:
diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py
index b3918c4d29c..de35a4f826a 100644
--- a/core/dbt/adapters/base/meta.py
+++ b/core/dbt/adapters/base/meta.py
@@ -116,7 +116,7 @@ def __new__(mcls, name, bases, namespace, **kwargs):
 
         # override with local data if it exists
         for name, value in namespace.items():
-            if getattr(value, '_is_available_', False):
+            if getattr(value, "_is_available_", False):
                 available.add(name)
             parse_replacement = getattr(value, "_parse_replacement_", None)
             if parse_replacement is not None:
diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 81ea5af68ff..ff664622fdb 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -23,11 +23,12 @@ class SQLConnectionManager(BaseConnectionManager):
         - get_response
         - open
     """
+
     @abc.abstractmethod
     def cancel(self, connection: Connection):
         """Cancel the given connection."""
         raise dbt.exceptions.NotImplementedException(
-            '`cancel` is not implemented for this adapter!'
+            "`cancel` is not implemented for this adapter!"
         )
 
     def cancel_open(self) -> List[str]:
@@ -153,7 +154,8 @@ def commit(self):
         if connection.transaction_open is False:
             raise dbt.exceptions.InternalException(
                 'Tried to commit transaction on connection "{}", but '
-                'it does not have one open!'.format(connection.name))
+                "it does not have one open!".format(connection.name)
+            )
 
         fire_event(SQLCommit(conn_name=connection.name))
         self.add_commit_query()
diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py
index ac9bfd8e9a5..f4178fb614e 100644
--- a/plugins/postgres/dbt/adapters/postgres/connections.py
+++ b/plugins/postgres/dbt/adapters/postgres/connections.py
@@ -51,7 +51,7 @@ def _connection_keys(self):
 
 
 class PostgresConnectionManager(SQLConnectionManager):
-    TYPE = 'postgres'
+    TYPE = "postgres"
 
     @contextmanager
     def exception_handler(self, sql):
@@ -59,7 +59,7 @@ def exception_handler(self, sql):
             yield
 
         except psycopg2.DatabaseError as e:
-            logger.debug('Postgres error: {}'.format(str(e)))
+            logger.debug("Postgres error: {}".format(str(e)))
 
             try:
                 self.rollback_if_open()
@@ -83,8 +83,8 @@ def exception_handler(self, sql):
 
     @classmethod
     def open(cls, connection):
-        if connection.state == 'open':
-            logger.debug('Connection is already open, skipping open.')
+        if connection.state == "open":
+            logger.debug("Connection is already open, skipping open.")
             return connection
 
         credentials = cls.get_credentials(connection.credentials)
@@ -125,20 +125,21 @@ def open(cls, connection):
                 password=credentials.password,
                 port=credentials.port,
                 connect_timeout=credentials.connect_timeout,
-                **kwargs)
+                **kwargs,
+            )
 
             if credentials.role:
                 handle.cursor().execute('set role {}'.format(credentials.role))
 
             connection.handle = handle
-            connection.state = 'open'
+            connection.state = "open"
         except psycopg2.Error as e:
-            logger.debug("Got an error when attempting to open a postgres "
-                         "connection: '{}'"
-                         .format(e))
+            logger.debug(
+                "Got an error when attempting to open a postgres " "connection: '{}'".format(e)
+            )
 
             connection.handle = None
-            connection.state = 'fail'
+            connection.state = "fail"
 
             raise dbt.exceptions.FailedToConnectException(str(e))
 

From 68bfa4111438db8ccad7f6ed959d0bf1dc4121ad Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 25 Jun 2020 09:41:08 -0600
Subject: [PATCH 021/933] Allow plugin macros to override core

Moved some relation logic out of adapters and into contracts
Fixed a typo in a type name
Fixed an issue where tests could fail based on color settings
Fixed the type analysis for adapter plugins/factory
Swapped more concrete tyeps out for protocols
Finally removed PACKAGES global
Added unit tests


automatic commit by git-black, original commits:
  abe345e925fb9b272699a7f106719ed76e1f4bbd
---
 core/dbt/adapters/base/plugin.py   |  5 ++---
 core/dbt/adapters/base/relation.py |  7 ++++++-
 core/dbt/adapters/protocol.py      |  2 +-
 core/dbt/config/runtime.py         |  4 +---
 core/dbt/contracts/relation.py     | 29 ++++++++++++-----------------
 5 files changed, 22 insertions(+), 25 deletions(-)

diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py
index 14d4edda69d..21541317bae 100644
--- a/core/dbt/adapters/base/plugin.py
+++ b/core/dbt/adapters/base/plugin.py
@@ -8,11 +8,10 @@
 def project_name_from_path(include_path: str) -> str:
     # avoid an import cycle
     from dbt.config.project import Project
+
     partial = Project.partial_load(include_path)
     if partial.project_name is None:
-        raise CompilationException(
-            f'Invalid project at {include_path}: name not set!'
-        )
+        raise CompilationException(f"Invalid project at {include_path}: name not set!")
     return partial.project_name
 
 
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index bf4d6871905..ceb048e0efa 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -7,7 +7,12 @@
 from dbt.contracts.graph.compiled import CompiledNode
 from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode
 from dbt.contracts.relation import (
-    RelationType, ComponentName, HasQuoting, FakeAPIObject, Policy, Path
+    RelationType,
+    ComponentName,
+    HasQuoting,
+    FakeAPIObject,
+    Policy,
+    Path,
 )
 from dbt.exceptions import InternalException
 from dbt.node_types import NodeType
diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py
index e0731485ebc..f7fef69562f 100644
--- a/core/dbt/adapters/protocol.py
+++ b/core/dbt/adapters/protocol.py
@@ -34,7 +34,7 @@ class ColumnProtocol(Protocol):
     pass
 
 
-Self = TypeVar('Self', bound='RelationProtocol')
+Self = TypeVar("Self", bound="RelationProtocol")
 
 
 class RelationProtocol(Protocol):
diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py
index 61a17605d7e..a7f7ae0f2ef 100644
--- a/core/dbt/config/runtime.py
+++ b/core/dbt/config/runtime.py
@@ -33,9 +33,7 @@
 from dbt.dataclass_schema import ValidationError
 
 
-def _project_quoting_dict(
-    proj: Project, profile: Profile
-) -> Dict[ComponentName, bool]:
+def _project_quoting_dict(proj: Project, profile: Profile) -> Dict[ComponentName, bool]:
     src: Dict[str, Any] = profile.credentials.translate_aliases(proj.quoting)
     result: Dict[ComponentName, bool] = {}
     for key in ComponentName:
diff --git a/core/dbt/contracts/relation.py b/core/dbt/contracts/relation.py
index 099c476e646..a383555a400 100644
--- a/core/dbt/contracts/relation.py
+++ b/core/dbt/contracts/relation.py
@@ -13,17 +13,17 @@
 
 
 class RelationType(StrEnum):
-    Table = 'table'
-    View = 'view'
-    CTE = 'cte'
-    MaterializedView = 'materializedview'
-    External = 'external'
+    Table = "table"
+    View = "view"
+    CTE = "cte"
+    MaterializedView = "materializedview"
+    External = "external"
 
 
 class ComponentName(StrEnum):
-    Database = 'database'
-    Schema = 'schema'
-    Identifier = 'identifier'
+    Database = "database"
+    Schema = "schema"
+    Identifier = "identifier"
 
 
 class HasQuoting(Protocol):
@@ -68,8 +68,7 @@ def get_part(self, key: ComponentName) -> bool:
             return self.identifier
         else:
             raise ValueError(
-                'Got a key of {}, expected one of {}'
-                .format(key, list(ComponentName))
+                "Got a key of {}, expected one of {}".format(key, list(ComponentName))
             )
 
     def replace_dict(self, dct: Dict[ComponentName, bool]):
@@ -88,16 +87,12 @@ class Path(FakeAPIObject):
     def __post_init__(self):
         # handle pesky jinja2.Undefined sneaking in here and messing up rende
         if not isinstance(self.database, (type(None), str)):
-            raise CompilationException(
-                'Got an invalid path database: {}'.format(self.database)
-            )
+            raise CompilationException("Got an invalid path database: {}".format(self.database))
         if not isinstance(self.schema, (type(None), str)):
-            raise CompilationException(
-                'Got an invalid path schema: {}'.format(self.schema)
-            )
+            raise CompilationException("Got an invalid path schema: {}".format(self.schema))
         if not isinstance(self.identifier, (type(None), str)):
             raise CompilationException(
-                'Got an invalid path identifier: {}'.format(self.identifier)
+                "Got an invalid path identifier: {}".format(self.identifier)
             )
 
     def get_lowered_part(self, key: ComponentName) -> Optional[str]:

From 6f16f6158c41f579535bcc8dbde0fde138613a96 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 14 Dec 2018 10:40:10 -0700
Subject: [PATCH 022/933] Split dbt into core and plugins

automatic commit by git-black, original commits:
  4780c4bb182a74d4506066aed2e3c280aa30cd1b
---
 core/dbt/adapters/base/plugin.py              |  1 +
 core/dbt/config/profile.py                    | 71 +++++++++----------
 core/dbt/config/project.py                    | 60 ++++++++--------
 core/dbt/config/runtime.py                    |  2 +-
 core/dbt/include/global_project/__init__.py   |  2 +-
 .../dbt/adapters/postgres/__init__.py         |  5 +-
 .../postgres/dbt/include/postgres/__init__.py |  1 +
 plugins/postgres/setup.py                     |  6 +-
 8 files changed, 72 insertions(+), 76 deletions(-)

diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py
index 21541317bae..f0d348d8f57 100644
--- a/core/dbt/adapters/base/plugin.py
+++ b/core/dbt/adapters/base/plugin.py
@@ -22,6 +22,7 @@ class AdapterPlugin:
     :param dependencies: A list of adapter names that this adapter depends
         upon.
     """
+
     def __init__(
         self,
         adapter: Type[AdapterProtocol],
diff --git a/core/dbt/config/profile.py b/core/dbt/config/profile.py
index d531abab60b..3008b93cd59 100644
--- a/core/dbt/config/profile.py
+++ b/core/dbt/config/profile.py
@@ -23,7 +23,7 @@
 
 DEFAULT_THREADS = 1
 
-DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser('~'), '.dbt')
+DEFAULT_PROFILES_DIR = os.path.join(os.path.expanduser("~"), ".dbt")
 
 INVALID_PROFILE_MESSAGE = """
 dbt encountered an error while trying to read your profiles.yml file.
@@ -47,7 +47,7 @@
 
 
 def read_profile(profiles_dir: str) -> Dict[str, Any]:
-    path = os.path.join(profiles_dir, 'profiles.yml')
+    path = os.path.join(profiles_dir, "profiles.yml")
 
     contents = None
     if os.path.isfile(path):
@@ -124,10 +124,10 @@ def to_profile_info(
         :returns dict: The serialized profile.
         """
         result = {
-            'profile_name': self.profile_name,
-            'target_name': self.target_name,
+            "profile_name": self.profile_name,
+            "target_name": self.target_name,
             'user_config': self.user_config,
-            'threads': self.threads,
+            "threads": self.threads,
             'credentials': self.credentials,
         }
         if serialize_credentials:
@@ -150,8 +150,7 @@ def to_target_dict(self) -> Dict[str, Any]:
         return target
 
     def __eq__(self, other: object) -> bool:
-        if not (isinstance(other, self.__class__) and
-                isinstance(self, other.__class__)):
+        if not (isinstance(other, self.__class__) and isinstance(self, other.__class__)):
             return NotImplemented
         return self.to_profile_info() == other.to_profile_info()
 
@@ -173,12 +172,14 @@ def _credentials_from_profile(
         from dbt.adapters.factory import load_plugin
         # credentials carry their 'type' in their actual type, not their
         # attributes. We do want this in order to pick our Credentials class.
-        if 'type' not in profile:
+        if "type" not in profile:
             raise DbtProfileError(
-                'required field "type" not found in profile {} and target {}'
-                .format(profile_name, target_name))
+                'required field "type" not found in profile {} and target {}'.format(
+                    profile_name, target_name
+                )
+            )
 
-        typename = profile.pop('type')
+        typename = profile.pop("type")
         try:
             cls = load_plugin(typename)
             data = cls.translate_aliases(profile)
@@ -187,7 +188,7 @@ def _credentials_from_profile(
         except (RuntimeException, ValidationError) as e:
             msg = str(e) if isinstance(e, RuntimeException) else e.message
             raise DbtProfileError(
-                'Credentials in profile "{}", target "{}" invalid: {}'
+                'Credentials in profile "{}", target "{}" invalid: {}'.format(
                 .format(profile_name, target_name, msg)
             ) from e
 
@@ -209,19 +210,19 @@ def pick_profile_name(
     def _get_profile_data(
         profile: Dict[str, Any], profile_name: str, target_name: str
     ) -> Dict[str, Any]:
-        if 'outputs' not in profile:
-            raise DbtProfileError(
-                "outputs not specified in profile '{}'".format(profile_name)
-            )
-        outputs = profile['outputs']
+        if "outputs" not in profile:
+            raise DbtProfileError("outputs not specified in profile '{}'".format(profile_name))
+        outputs = profile["outputs"]
 
         if target_name not in outputs:
-            outputs = '\n'.join(' - {}'.format(output)
-                                for output in outputs)
-            msg = ("The profile '{}' does not have a target named '{}'. The "
-                   "valid target names for this profile are:\n{}"
-                   .format(profile_name, target_name, outputs))
-            raise DbtProfileError(msg, result_type='invalid_target')
+            outputs = "\n".join(" - {}".format(output) for output in outputs)
+            msg = (
+                "The profile '{}' does not have a target named '{}'. The "
+                "valid target names for this profile are:\n{}".format(
+                    profile_name, target_name, outputs
+                )
+            )
+            raise DbtProfileError(msg, result_type="invalid_target")
         profile_data = outputs[target_name]
 
         if not isinstance(profile_data, dict):
@@ -264,7 +265,7 @@ def from_credentials(
             target_name=target_name,
             user_config=user_config_obj,
             threads=threads,
-            credentials=credentials
+            credentials=credentials,
         )
         profile.validate()
         return profile
@@ -289,16 +290,14 @@ def render_profile(
         # name to extract a profile that we can render.
         if target_override is not None:
             target_name = target_override
-        elif 'target' in raw_profile:
+        elif "target" in raw_profile:
             # render the target if it was parsed from yaml
-            target_name = renderer.render_value(raw_profile['target'])
+            target_name = renderer.render_value(raw_profile["target"])
         else:
-            target_name = 'default'
+            target_name = "default"
             fire_event(MissingProfileTarget(profile_name=profile_name, target_name=target_name))
 
-        raw_profile_data = cls._get_profile_data(
-            raw_profile, profile_name, target_name
-        )
+        raw_profile_data = cls._get_profile_data(raw_profile, profile_name, target_name)
 
         try:
             profile_data = renderer.render_data(raw_profile_data)
@@ -344,7 +343,7 @@ def from_raw_profile_info(
 
         # valid connections never include the number of threads, but it's
         # stored on a per-connection level in the raw configs
-        threads = profile_data.pop('threads', DEFAULT_THREADS)
+        threads = profile_data.pop("threads", DEFAULT_THREADS)
         if threads_override is not None:
             threads = threads_override
 
@@ -384,9 +383,7 @@ def from_raw_profiles(
         :returns: The new Profile object.
         """
         if profile_name not in raw_profiles:
-            raise DbtProjectError(
-                "Could not find profile named '{}'".format(profile_name)
-            )
+            raise DbtProjectError("Could not find profile named '{}'".format(profile_name))
 
         # First, we've already got our final decision on profile name, and we
         # don't render keys, so we can pluck that out
@@ -432,8 +429,8 @@ def render_from_args(
             target could not be found.
         :returns Profile: The new Profile object.
         """
-        threads_override = getattr(args, 'threads', None)
-        target_override = getattr(args, 'target', None)
+        threads_override = getattr(args, "threads", None)
+        target_override = getattr(args, "target", None)
         raw_profiles = read_profile(flags.PROFILES_DIR)
         profile_name = cls.pick_profile_name(getattr(args, 'profile', None),
                                              project_profile_name)
@@ -442,5 +439,5 @@ def render_from_args(
             profile_name=profile_name,
             renderer=renderer,
             target_override=target_override,
-            threads_override=threads_override
+            threads_override=threads_override,
         )
diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py
index 46a09d0338a..6ad02ed4be9 100644
--- a/core/dbt/config/project.py
+++ b/core/dbt/config/project.py
@@ -83,9 +83,7 @@ def _load_yaml(path):
 
 
 def package_data_from_root(project_root):
-    package_filepath = resolve_path_from_base(
-        'packages.yml', project_root
-    )
+    package_filepath = resolve_path_from_base("packages.yml", project_root)
 
     if path_exists(package_filepath):
         packages_dict = _load_yaml(package_filepath)
@@ -96,7 +94,7 @@ def package_data_from_root(project_root):
 
 def package_config_from_data(packages_data: Dict[str, Any]):
     if not packages_data:
-        packages_data = {'packages': []}
+        packages_data = {"packages": []}
 
     try:
         PackageConfig.validate(packages_data)
@@ -119,7 +117,7 @@ def _parse_versions(versions: Union[List[str], str]) -> List[VersionSpecifier]:
     Regardless, this will return a list of VersionSpecifiers
     """
     if isinstance(versions, str):
-        versions = versions.split(',')
+        versions = versions.split(",")
     return [VersionSpecifier.from_version_string(v) for v in versions]
 
 
@@ -581,11 +579,11 @@ def __str__(self):
         return str(cfg)
 
     def __eq__(self, other):
-        if not (isinstance(other, self.__class__) and
-                isinstance(self, other.__class__)):
+        if not (isinstance(other, self.__class__) and isinstance(self, other.__class__)):
             return False
-        return self.to_project_config(with_packages=True) == \
-            other.to_project_config(with_packages=True)
+        return self.to_project_config(with_packages=True) == other.to_project_config(
+            with_packages=True
+        )
 
     def to_project_config(self, with_packages=False):
         """Return a dict representation of the config that could be written to
@@ -595,37 +593,37 @@ def to_project_config(self, with_packages=False):
             file in the root.
         :returns dict: The serialized profile.
         """
-        result = deepcopy({
-            'name': self.project_name,
-            'version': self.version,
-            'project-root': self.project_root,
-            'profile': self.profile_name,
+        result = deepcopy(
+            {
+                "name": self.project_name,
+                "version": self.version,
+                "project-root": self.project_root,
             'model-paths': self.model_paths,
-            'macro-paths': self.macro_paths,
+                "model-paths": self.model_paths,
             'seed-paths': self.seed_paths,
-            'test-paths': self.test_paths,
-            'analysis-paths': self.analysis_paths,
-            'docs-paths': self.docs_paths,
+                "seed-paths": self.seed_paths,
+                "test-paths": self.test_paths,
+                "analysis-paths": self.analysis_paths,
             'asset-paths': self.asset_paths,
-            'target-path': self.target_path,
+                "asset-paths": self.asset_paths,
             'snapshot-paths': self.snapshot_paths,
-            'clean-targets': self.clean_targets,
-            'log-path': self.log_path,
-            'quoting': self.quoting,
-            'models': self.models,
-            'on-run-start': self.on_run_start,
-            'on-run-end': self.on_run_end,
+                "snapshot-paths": self.snapshot_paths,
+                "clean-targets": self.clean_targets,
+                "log-path": self.log_path,
+                "quoting": self.quoting,
+                "models": self.models,
+                "on-run-start": self.on_run_start,
             'dispatch': self.dispatch,
-            'seeds': self.seeds,
+                "dispatch": self.dispatch,
             'snapshots': self.snapshots,
             'sources': self.sources,
             'tests': self.tests,
             'vars': self.vars.to_dict(),
-            'require-dbt-version': [
-                v.to_version_string() for v in self.dbt_version
-            ],
+                "vars": self.vars.to_dict(),
+                "require-dbt-version": [v.to_version_string() for v in self.dbt_version],
+                "config-version": self.config_version,
             'config-version': self.config_version,
-        })
+        )
         if self.query_comment:
             result['query-comment'] = \
                 self.query_comment.to_dict(omit_none=True)
@@ -662,7 +660,7 @@ def from_project_root(
         return partial.render(renderer)
 
     def hashed_name(self):
-        return hashlib.md5(self.project_name.encode('utf-8')).hexdigest()
+        return hashlib.md5(self.project_name.encode("utf-8")).hexdigest()
 
     def get_selector(self, name: str) -> Union[SelectionSpec, bool]:
         if name not in self.selectors:
diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py
index a7f7ae0f2ef..f87c7346d15 100644
--- a/core/dbt/config/runtime.py
+++ b/core/dbt/config/runtime.py
@@ -166,7 +166,7 @@ def serialize(self) -> Dict[str, Any]:
         """
         result = self.to_project_config(with_packages=True)
         result.update(self.to_profile_info(serialize_credentials=True))
-        result['cli_vars'] = deepcopy(self.cli_vars)
+        result["cli_vars"] = deepcopy(self.cli_vars)
         return result
 
     def validate(self):
diff --git a/core/dbt/include/global_project/__init__.py b/core/dbt/include/global_project/__init__.py
index fa2abf5eae1..29f314fae96 100644
--- a/core/dbt/include/global_project/__init__.py
+++ b/core/dbt/include/global_project/__init__.py
@@ -1,7 +1,7 @@
 import os
 
 PACKAGE_PATH = os.path.dirname(__file__)
-PROJECT_NAME = 'dbt'
+PROJECT_NAME = "dbt"
 
 DOCS_INDEX_FILE_PATH = os.path.normpath(
     os.path.join(PACKAGE_PATH, '..', "index.html"))
diff --git a/plugins/postgres/dbt/adapters/postgres/__init__.py b/plugins/postgres/dbt/adapters/postgres/__init__.py
index 04f0e8cda53..b5b3b7b7a09 100644
--- a/plugins/postgres/dbt/adapters/postgres/__init__.py
+++ b/plugins/postgres/dbt/adapters/postgres/__init__.py
@@ -9,6 +9,5 @@
 from dbt.include import postgres
 
 Plugin = AdapterPlugin(
-    adapter=PostgresAdapter,
-    credentials=PostgresCredentials,
-    include_path=postgres.PACKAGE_PATH)
+    adapter=PostgresAdapter, credentials=PostgresCredentials, include_path=postgres.PACKAGE_PATH
+)
diff --git a/plugins/postgres/dbt/include/postgres/__init__.py b/plugins/postgres/dbt/include/postgres/__init__.py
index 564a3d1e80a..b177e5d4932 100644
--- a/plugins/postgres/dbt/include/postgres/__init__.py
+++ b/plugins/postgres/dbt/include/postgres/__init__.py
@@ -1,2 +1,3 @@
 import os
+
 PACKAGE_PATH = os.path.dirname(__file__)
diff --git a/plugins/postgres/setup.py b/plugins/postgres/setup.py
index f62fe9a4128..fe25fecfe7f 100644
--- a/plugins/postgres/setup.py
+++ b/plugins/postgres/setup.py
@@ -61,15 +61,15 @@ def _dbt_psycopg2_name():
     url="https://github.com/dbt-labs/dbt-core",
     packages=find_namespace_packages(include=['dbt', 'dbt.*']),
     package_data={
-        'dbt': [
+        "dbt": [
             'include/postgres/dbt_project.yml',
             'include/postgres/sample_profiles.yml',
-            'include/postgres/macros/*.sql',
+            "include/postgres/macros/*.sql",
             'include/postgres/macros/**/*.sql',
         ]
     },
     install_requires=[
-        'dbt-core=={}'.format(package_version),
+        "dbt-core=={}".format(package_version),
         '{}~=2.8'.format(DBT_PSYCOPG2_NAME),
     ],
     zip_safe=False,

From a5fc598a05d28c59c3efe14979bcbca1b9d0b3fa Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 10 Oct 2019 04:53:30 -0600
Subject: [PATCH 023/933] inject query comments

Make a fake "macro" that we parse specially with a single global context
Macro takes an argument (the node, may be none)
Users supply the text of the macro in their 'user_config' under a new 'query_comment'
No macros available
query generator is an attribute on the connection manager
 - has a thread-local comment str
 - when acquiring a connection, set the comment str
new 'connection_for' context manager: like connection_named, except also use the node to set the query string
Updated unit tests to account for query comments
Added a hacky, brittle integration test
  - log to a custom stream and read that
Trim down the "target" context value to use the opt-in connection_info
 - Make sure it contains a superset of the documented stuff
 - Make sure it does not contain any blacklisted items
Change some asserts to raise InternalExceptions because assert error messages in threads are useless


automatic commit by git-black, original commits:
  ff158b8353306d75d3417bd230aea1ac15dd621d
---
 core/dbt/adapters/base/connections.py                 |  2 +-
 core/dbt/adapters/base/query_headers.py               |  5 +++--
 core/dbt/adapters/base/relation.py                    |  7 +++----
 core/dbt/clients/jinja.py                             | 10 ++++------
 core/dbt/contracts/connection.py                      |  6 ++----
 plugins/postgres/dbt/adapters/postgres/connections.py |  2 +-
 6 files changed, 14 insertions(+), 18 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 0e248937e8d..1277258310f 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -129,7 +129,7 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
         else:
             if not isinstance(name, str):
                 raise dbt.exceptions.CompilerException(
-                    f'For connection name, got {name} - not a string!'
+                    f"For connection name, got {name} - not a string!"
                 )
             assert isinstance(name, str)
             conn_name = name
diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py
index 2f7171936bc..49303299c10 100644
--- a/core/dbt/adapters/base/query_headers.py
+++ b/core/dbt/adapters/base/query_headers.py
@@ -15,7 +15,7 @@ def __init__(self, node):
         self._inner_node = node
 
     def __getattr__(self, name):
-        return getattr(self._inner_node, name, '')
+        return getattr(self._inner_node, name, "")
 
 
 class _QueryComment(local):
@@ -24,6 +24,7 @@ class _QueryComment(local):
         - the current thread's query comment.
         - a source_name indicating what set the current thread's query comment
     """
+
     def __init__(self, initial):
         self.query_comment: Optional[str] = initial
         self.append = False
@@ -87,7 +88,7 @@ def add(self, sql: str) -> str:
         return self.comment.add(sql)
 
     def reset(self):
-        self.set('master', None)
+        self.set("master", None)
 
     def set(self, name: str, node: Optional[CompileResultNode]):
         wrapped: Optional[NodeWrapper] = None
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index ceb048e0efa..445eecf8c30 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -254,15 +254,14 @@ def create_from(
         if node.resource_type == NodeType.Source:
             if not isinstance(node, ParsedSourceDefinition):
                 raise InternalException(
-                    'type mismatch, expected ParsedSourceDefinition but got {}'
-                    .format(type(node))
+                    "type mismatch, expected ParsedSourceDefinition but got {}".format(type(node))
                 )
             return cls.create_from_source(node, **kwargs)
         else:
             if not isinstance(node, (ParsedNode, CompiledNode)):
                 raise InternalException(
-                    'type mismatch, expected ParsedNode or CompiledNode but '
-                    'got {}'.format(type(node))
+                    "type mismatch, expected ParsedNode or CompiledNode but "
+                    "got {}".format(type(node))
                 )
             return cls.create_from_node(config, node, **kwargs)
 
diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index 5c80d100385..a368cef75fa 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -221,10 +221,10 @@ def __init__(self, context: Optional[Dict[str, Any]] = None) -> None:
         self.context: Optional[Dict[str, Any]] = context
 
     def get_template(self):
-        raise NotImplementedError('get_template not implemented!')
+        raise NotImplementedError("get_template not implemented!")
 
     def get_name(self) -> str:
-        raise NotImplementedError('get_name not implemented!')
+        raise NotImplementedError("get_name not implemented!")
 
     def get_macro(self):
         name = self.get_name()
@@ -333,9 +333,7 @@ def __call__(self, *args, **kwargs):
 
 
 class QueryStringGenerator(BaseMacroGenerator):
-    def __init__(
-        self, template_str: str, context: Dict[str, Any]
-    ) -> None:
+    def __init__(self, template_str: str, context: Dict[str, Any]) -> None:
         super().__init__(context)
         self.template_str: str = template_str
         env = get_environment()
@@ -345,7 +343,7 @@ def __init__(
         )
 
     def get_name(self) -> str:
-        return 'query_comment_macro'
+        return "query_comment_macro"
 
     def get_template(self):
         """Don't use the template cache, we don't have a node"""
diff --git a/core/dbt/contracts/connection.py b/core/dbt/contracts/connection.py
index 8ea4a91028e..415dab16719 100644
--- a/core/dbt/contracts/connection.py
+++ b/core/dbt/contracts/connection.py
@@ -137,7 +137,7 @@ def unique_field(self) -> str:
     def hashed_unique_field(self) -> str:
         return hashlib.md5(self.unique_field.encode('utf-8')).hexdigest()
 
-    def connection_info(
+    def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]:
         self, *, with_aliases: bool = False
     ) -> Iterable[Tuple[str, Any]]:
         """Return an ordered iterator of key/value pairs for pretty-printing.
@@ -146,9 +146,7 @@ def connection_info(
         connection_keys = set(self._connection_keys())
         aliases: List[str] = []
         if with_aliases:
-            aliases = [
-                k for k, v in self._ALIASES.items() if v in connection_keys
-            ]
+            aliases = [k for k, v in self._ALIASES.items() if v in connection_keys]
         for key in itertools.chain(self._connection_keys(), aliases):
             if key in as_dict:
                 yield key, as_dict[key]
diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py
index f4178fb614e..144c5b7c877 100644
--- a/plugins/postgres/dbt/adapters/postgres/connections.py
+++ b/plugins/postgres/dbt/adapters/postgres/connections.py
@@ -46,7 +46,7 @@ def unique_field(self):
         return self.host
 
     def _connection_keys(self):
-        return ('host', 'port', 'user', 'database', 'schema', 'search_path',
+        return (
                 'keepalives_idle', 'sslmode')
 
 

From 7a43924357a9c5b1938f266d92a8aac29802752e Mon Sep 17 00:00:00 2001
From: ilkin Balkanay 
Date: Thu, 12 Mar 2020 10:28:27 +0300
Subject: [PATCH 024/933] added query comment `append` feature (wip)

automatic commit by git-black, original commits:
  62b19b53f9c7fc22f233d8171e8a797379cf9536
---
 core/dbt/adapters/base/query_headers.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py
index 49303299c10..0213f739ed1 100644
--- a/core/dbt/adapters/base/query_headers.py
+++ b/core/dbt/adapters/base/query_headers.py
@@ -36,13 +36,13 @@ def add(self, sql: str) -> str:
         if self.append:
             # replace last ';' with ';'
             sql = sql.rstrip()
-            if sql[-1] == ';':
+            if sql[-1] == ";":
                 sql = sql[:-1]
-                return '{}\n/* {} */;'.format(sql, self.query_comment.strip())
+                return "{}\n/* {} */;".format(sql, self.query_comment.strip())
 
-            return '{}\n/* {} */'.format(sql, self.query_comment.strip())
+            return "{}\n/* {} */".format(sql, self.query_comment.strip())
 
-        return '/* {} */\n{}'.format(self.query_comment.strip(), sql)
+        return "/* {} */\n{}".format(self.query_comment.strip(), sql)
 
     def set(self, comment: Optional[str], append: bool):
         if isinstance(comment, str) and "*/" in comment:

From e32920a63c384bda64442444279bd78e3ca7dcc6 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Nov 2019 08:34:20 -0700
Subject: [PATCH 025/933] macro support, tests, add yet another mypy env for
 development

automatic commit by git-black, original commits:
  84d585c14c549ff2d5f7c1e385250f77a4efcf7a
---
 core/dbt/adapters/base/query_headers.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py
index 0213f739ed1..e8879189e89 100644
--- a/core/dbt/adapters/base/query_headers.py
+++ b/core/dbt/adapters/base/query_headers.py
@@ -48,9 +48,7 @@ def set(self, comment: Optional[str], append: bool):
         if isinstance(comment, str) and "*/" in comment:
             # tell the user "no" so they don't hurt themselves by writing
             # garbage
-            raise RuntimeException(
-                f'query comment contains illegal value "*/": {comment}'
-            )
+            raise RuntimeException(f'query comment contains illegal value "*/": {comment}')
         self.query_comment = comment
         self.append = append
 

From f2b52572f47ad8d5c98832acb430d6a16b95ea03 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Nov 2019 10:43:35 -0700
Subject: [PATCH 026/933] if the comment macro is null/empty, no comments

automatic commit by git-black, original commits:
  b56d93b9097d414f6b4210cdc9312afb0e8c3715
---
 core/dbt/adapters/base/query_headers.py | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py
index e8879189e89..26f34be9c93 100644
--- a/core/dbt/adapters/base/query_headers.py
+++ b/core/dbt/adapters/base/query_headers.py
@@ -62,15 +62,17 @@ def __init__(self, config: AdapterRequiredConfig, manifest: Manifest):
         self.config = config
 
         comment_macro = self._get_comment_macro()
-        self.generator: QueryStringFunc = lambda name, model: ''
+        self.generator: QueryStringFunc = lambda name, model: ""
         # if the comment value was None or the empty string, just skip it
         if comment_macro:
             assert isinstance(comment_macro, str)
-            macro = '\n'.join((
-                '{%- macro query_comment_macro(connection_name, node) -%}',
+            macro = "\n".join(
+                (
                     "{%- macro query_comment_macro(connection_name, node) -%}",
-                '{% endmacro %}'
-            ))
+                    comment_macro,
+                    "{% endmacro %}",
+                )
+            )
             ctx = self._get_context()
             self.generator = QueryStringGenerator(macro, ctx)
         self.comment = _QueryComment(None)

From 493023b8915289ddd47a87bce1472df2fff7c73a Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Wed, 16 Dec 2020 11:24:31 -0500
Subject: [PATCH 027/933] first pass at adding query stats, naming tbd

automatic commit by git-black, original commits:
  dddf1bcb76190e5b6b790897e39921354afb0b6f
  eb9bfcda4ab314e4ee5bf548fab07ee4fadbee4d
---
 core/dbt/adapters/base/connections.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 1277258310f..9ab22212c61 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -4,9 +4,7 @@
 # multiprocessing.RLock is a function returning this type
 from multiprocessing.synchronize import RLock
 from threading import get_ident
-from typing import (
-    Dict, Tuple, Hashable, Optional, ContextManager, List, Union
-)
+from typing import Dict, Tuple, Hashable, Optional, ContextManager, List, Union
 
 import agate
 

From 56c0d8f2ba8a46aa1977e98ff82ed5f3ccbb2de7 Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Wed, 16 Dec 2020 11:24:31 -0500
Subject: [PATCH 028/933] first pass at adding query stats, naming tbd

automatic commit by git-black, original commits:
  dddf1bcb76190e5b6b790897e39921354afb0b6f
---
 core/dbt/adapters/base/connections.py | 2 +-
 core/dbt/adapters/protocol.py         | 2 +-
 core/dbt/context/providers.py         | 2 +-
 core/dbt/contracts/project.py         | 6 ++++--
 4 files changed, 7 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 9ab22212c61..2bc3aa144f7 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -10,7 +10,7 @@
 
 import dbt.exceptions
 from dbt.contracts.connection import (
-    Connection, Identifier, ConnectionState,
+    Connection,
     AdapterRequiredConfig, LazyHandle, AdapterResponse
 )
 from dbt.contracts.graph.manifest import Manifest
diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py
index f7fef69562f..81819ba247f 100644
--- a/core/dbt/adapters/protocol.py
+++ b/core/dbt/adapters/protocol.py
@@ -7,7 +7,7 @@
 
 import agate
 
-from dbt.contracts.connection import (
+from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
     Connection, AdapterRequiredConfig, AdapterResponse
 )
 from dbt.contracts.graph.compiled import (
diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py
index 3b0ff96d004..ecc42a049a9 100644
--- a/core/dbt/context/providers.py
+++ b/core/dbt/context/providers.py
@@ -711,7 +711,7 @@ def store_raw_result(
         message=Optional[str],
         code=Optional[str],
         rows_affected=Optional[str],
-        agate_table: Optional[agate.Table] = None
+        agate_table: Optional[agate.Table] = None,
     ) -> str:
         response = AdapterResponse(
             _message=message, code=code, rows_affected=rows_affected)
diff --git a/core/dbt/contracts/project.py b/core/dbt/contracts/project.py
index d919cdd08f3..756dc2af120 100644
--- a/core/dbt/contracts/project.py
+++ b/core/dbt/contracts/project.py
@@ -11,7 +11,9 @@
 from typing import Optional, List, Dict, Union, Any
 from mashumaro.types import SerializableType
 
-PIN_PACKAGE_URL = 'https://docs.getdbt.com/docs/package-management#section-specifying-package-versions'  # noqa
+PIN_PACKAGE_URL = (
+    "https://docs.getdbt.com/docs/package-management#section-specifying-package-versions"  # noqa
+)
 DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
 
 
@@ -166,7 +168,7 @@ class RegistryPackageMetadata(
     'sql',
     'sql_now',
     'store_result',
-    'store_raw_result',
+    "store_raw_result",
     'target',
     'this',
     'tojson',

From b8e8841518417271f80cc2714289c2b7e3eebdb6 Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Mon, 21 Dec 2020 13:24:09 -0500
Subject: [PATCH 029/933] update naming

automatic commit by git-black, original commits:
  aa3bdfeb17c2e7f6974a336a40148376dd73234d
---
 core/dbt/adapters/base/connections.py                 | 6 +++++-
 core/dbt/adapters/sql/connections.py                  | 2 +-
 core/dbt/context/providers.py                         | 6 ++----
 plugins/postgres/dbt/adapters/postgres/connections.py | 4 ++--
 4 files changed, 10 insertions(+), 8 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 2bc3aa144f7..7887e870307 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -11,7 +11,11 @@
 import dbt.exceptions
 from dbt.contracts.connection import (
     Connection,
-    AdapterRequiredConfig, LazyHandle, AdapterResponse
+    Identifier,
+    ConnectionState,
+    AdapterRequiredConfig,
+    LazyHandle,
+    AdapterResponse,
 )
 from dbt.contracts.graph.manifest import Manifest
 from dbt.adapters.base.query_headers import (
diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index ff664622fdb..7a948f8e301 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -87,7 +87,7 @@ def add_query(
     def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]:
         """Get the status of the cursor."""
         raise dbt.exceptions.NotImplementedException(
-            '`get_response` is not implemented for this adapter!'
+            "`get_response` is not implemented for this adapter!"
         )
 
     @classmethod
diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py
index ecc42a049a9..0760ecfc94e 100644
--- a/core/dbt/context/providers.py
+++ b/core/dbt/context/providers.py
@@ -690,15 +690,13 @@ def load_result(self, name: str) -> Optional[AttrDict]:
 
     @contextmember
     def store_result(
-        self, name: str,
-        response: Any,
-        agate_table: Optional[agate.Table] = None
+        self, name: str, response: Any, agate_table: Optional[agate.Table] = None
     ) -> str:
         if agate_table is None:
             agate_table = agate_helper.empty_table()
 
         self.sql_results[name] = AttrDict({
-            'response': response,
+            {
             'data': agate_helper.as_matrix(agate_table),
             'table': agate_table
         })
diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py
index 144c5b7c877..8c9cce5f503 100644
--- a/plugins/postgres/dbt/adapters/postgres/connections.py
+++ b/plugins/postgres/dbt/adapters/postgres/connections.py
@@ -177,8 +177,8 @@ def get_response(cls, cursor) -> AdapterResponse:
         message = str(cursor.statusmessage)
         rows = cursor.rowcount
         status_message_parts = message.split() if message is not None else []
-        status_messsage_strings = [
-            part
+        status_messsage_strings = [part for part in status_message_parts if not part.isdigit()]
+        code = " ".join(status_messsage_strings)
             for part in status_message_parts
             if not part.isdigit()
         ]

From cb5f935278b74a4ebacc82d8d2752e97260ed678 Mon Sep 17 00:00:00 2001
From: Ian Knox <81931810+iknox-fa@users.noreply.github.com>
Date: Mon, 8 Nov 2021 17:31:24 -0600
Subject: [PATCH 030/933] Struct log for adapter call sites (#4189)

graph call sites for structured logging

Co-authored-by: Nathaniel May 
Co-authored-by: Emily Rockman 

automatic commit by git-black, original commits:
  b2aea11cdb0304c466d9968281dfa6a53ac97476
---
 core/dbt/adapters/base/connections.py | 2 +-
 core/dbt/adapters/cache.py            | 2 +-
 core/dbt/events/types.py              | 6 ++++--
 3 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 7887e870307..095db5530a9 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -30,7 +30,7 @@
     ConnectionClosed,
     ConnectionClosed2,
     Rollback,
-    RollbackFailed
+    RollbackFailed,
 )
 from dbt import flags
 
diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index cd9d3b9369b..ad595c62c80 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -18,7 +18,7 @@
     RenameSchema,
     TemporaryRelation,
     UncachedRelation,
-    UpdateReference
+    UpdateReference,
 )
 from dbt.utils import lowercase
 from dbt.helper_types import Lazy
diff --git a/core/dbt/events/types.py b/core/dbt/events/types.py
index de0310f9b0d..0237fdc7837 100644
--- a/core/dbt/events/types.py
+++ b/core/dbt/events/types.py
@@ -637,8 +637,10 @@ class UpdateReference(DebugLevel, Cache):
     code: str = "E028"
 
     def message(self) -> str:
-        return f"updated reference from {self.old_key} -> {self.cached_key} to "\
+        return (
+            f"updated reference from {self.old_key} -> {self.cached_key} to "
             "{self.new_key} -> {self.cached_key}"
+        )
 
 
 @dataclass
@@ -2427,7 +2429,7 @@ def message(self) -> str:
     TemporaryRelation(key=_ReferenceKey(database="", schema="", identifier=""))
     RenameSchema(
         old_key=_ReferenceKey(database="", schema="", identifier=""),
-        new_key=_ReferenceKey(database="", schema="", identifier="")
+        new_key=_ReferenceKey(database="", schema="", identifier=""),
     )
     DumpBeforeAddGraph(Lazy.defer(lambda: dict()))
     DumpAfterAddGraph(Lazy.defer(lambda: dict()))

From d31ace42eda8b07a688228a80057851a2c06da8d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 17 Oct 2019 07:37:41 -0600
Subject: [PATCH 031/933] Handle a  number of bad failure-path behaviors

- When a connection is missing, raise a special exception
   - instead of RuntimeError, so we catch it better
- Be graceful if a connection does not exist during nice_connection_name
- Handle the case where exceptions caught by the Snowflake exception handler do not have a 'msg' attr
- Re-raise exceptions in the adapter exception handlers "from" the originating error


automatic commit by git-black, original commits:
  ae796a84971dcf0ba5e11f9738ec134a85021320
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/base/connections.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 095db5530a9..34287b8d86c 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -70,9 +70,7 @@ def get_thread_connection(self) -> Connection:
         key = self.get_thread_identifier()
         with self.lock:
             if key not in self.thread_connections:
-                raise dbt.exceptions.InvalidConnectionException(
-                    key, list(self.thread_connections)
-                )
+                raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections))
             return self.thread_connections[key]
 
     def set_thread_connection(self, conn: Connection) -> None:

From af5614dcfb9f0b968b4c326ef068c9fb4c2c5713 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 20 Nov 2019 09:24:35 -0700
Subject: [PATCH 032/933] lazy-load connections

automatic commit by git-black, original commits:
  be36c5d974bd02faba5dd86aeb143899f897ff22
---
 core/dbt/adapters/base/connections.py | 2 +-
 core/dbt/contracts/connection.py      | 3 +--
 2 files changed, 2 insertions(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index 34287b8d86c..b1b51e0597b 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -77,7 +77,7 @@ def set_thread_connection(self, conn: Connection) -> None:
         key = self.get_thread_identifier()
         if key in self.thread_connections:
             raise dbt.exceptions.InternalException(
-                'In set_thread_connection, existing connection exists for {}'
+                "In set_thread_connection, existing connection exists for {}"
             )
         self.thread_connections[key] = conn
 
diff --git a/core/dbt/contracts/connection.py b/core/dbt/contracts/connection.py
index 415dab16719..32110a0a5fd 100644
--- a/core/dbt/contracts/connection.py
+++ b/core/dbt/contracts/connection.py
@@ -83,8 +83,7 @@ def handle(self):
                 self._handle.resolve(self)
             except RecursionError as exc:
                 raise InternalException(
-                    "A connection's open() method attempted to read the "
-                    "handle value"
+                    "A connection's open() method attempted to read the " "handle value"
                 ) from exc
         return self._handle
 

From b9d4f3dfda5558d460f9ef4f0859665922702a9b Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 27 Jul 2020 10:36:32 -0600
Subject: [PATCH 033/933] Handle the fallout of closing connections in release

- close() implies rollback, so do not call it
- make sure to not open new connections for executors in single-threaded mode
- logging cleanups
- fix a test case that never acquired connections
- to cancel other connections, one must first acquire a connection for the master thread
- change a number of release() calls to rollback

release vs rollback


automatic commit by git-black, original commits:
  3b917b9d79bce6f303249e8dbbd7690c48a75ad1
---
 core/dbt/adapters/base/connections.py |  2 +-
 core/dbt/adapters/base/impl.py        | 17 +++++------------
 core/dbt/task/runnable.py             |  9 ++++-----
 core/dbt/utils.py                     |  1 +
 4 files changed, 11 insertions(+), 18 deletions(-)

diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py
index b1b51e0597b..b540c54f1f3 100644
--- a/core/dbt/adapters/base/connections.py
+++ b/core/dbt/adapters/base/connections.py
@@ -242,7 +242,7 @@ def _rollback(cls, connection: Connection) -> None:
         """Roll back the given connection."""
         if connection.transaction_open is False:
             raise dbt.exceptions.InternalException(
-                f'Tried to rollback transaction on connection '
+                f"Tried to rollback transaction on connection "
                 f'"{connection.name}", but it does not have one open!'
             )
 
diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 308a5a57783..016b476b8d3 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -344,9 +344,9 @@ def _relations_cache_for_schemas(self, manifest: Manifest) -> None:
             for cache_schema in cache_schemas:
                 fut = tpe.submit_connected(
                     self,
-                    f'list_{cache_schema.database}_{cache_schema.schema}',
+                    f"list_{cache_schema.database}_{cache_schema.schema}",
                     self.list_relations_without_caching,
-                    cache_schema
+                    cache_schema,
                 )
                 futures.append(fut)
 
@@ -983,10 +983,7 @@ def _get_one_catalog(
         manifest: Manifest,
     ) -> agate.Table:
 
-        kwargs = {
-            'information_schema': information_schema,
-            'schemas': schemas
-        }
+        kwargs = {"information_schema": information_schema, "schemas": schemas}
         table = self.execute_macro(
             GET_CATALOG_MACRO_NAME,
             kwargs=kwargs,
@@ -1008,14 +1005,10 @@ def get_catalog(
             for info, schemas in schema_map.items():
                 if len(schemas) == 0:
                     continue
-                name = '.'.join([
-                    str(info.database),
-                    'information_schema'
-                ])
+                name = ".".join([str(info.database), "information_schema"])
 
                 fut = tpe.submit_connected(
-                    self, name,
-                    self._get_one_catalog, info, schemas, manifest
+                    self, name, self._get_one_catalog, info, schemas, manifest
                 )
                 futures.append(fut)
 
diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py
index 68f7c67716e..4dd47fc74a1 100644
--- a/core/dbt/task/runnable.py
+++ b/core/dbt/task/runnable.py
@@ -354,7 +354,7 @@ def _cancel_connections(self, pool):
         if not adapter.is_cancelable():
             fire_event(QueryCancelationUnsupported(type=adapter.type()))
         else:
-            with adapter.connection_named('master'):
+            with adapter.connection_named("master"):
                 for conn_name in adapter.cancel_open_connections():
                     if self.manifest is not None:
                         node = self.manifest.nodes.get(conn_name)
@@ -546,9 +546,9 @@ def create_schema(relation: BaseRelation) -> None:
         with dbt.utils.executor(self.config) as tpe:
             for req in required_databases:
                 if req.database is None:
-                    name = 'list_schemas'
+                    name = "list_schemas"
                 else:
-                    name = f'list_{req.database}'
+                    name = f"list_{req.database}"
                 fut = tpe.submit_connected(adapter, name, list_schemas, req)
                 list_futures.append(fut)
 
@@ -568,8 +568,7 @@ def create_schema(relation: BaseRelation) -> None:
                 if db_schema not in existing_schemas_lowered:
                     existing_schemas_lowered.add(db_schema)
                     fut = tpe.submit_connected(
-                        adapter, f'create_{info.database or ""}_{info.schema}',
-                        create_schema, info
+                        adapter, f'create_{info.database or ""}_{info.schema}', create_schema, info
                     )
                     create_futures.append(fut)
 
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index d56b0bdd062..2532bf54dc0 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -471,6 +471,7 @@ def submit_connected(self, adapter, conn_name, func, *args, **kwargs):
         def connected(conn_name, func, *args, **kwargs):
             with self.connection_named(adapter, conn_name):
                 return func(*args, **kwargs)
+
         return self.submit(connected, conn_name, func, *args, **kwargs)
 
 

From ae5af960fe4336094fb81235793e39341b0cacf2 Mon Sep 17 00:00:00 2001
From: Nathaniel May 
Date: Thu, 2 Dec 2021 18:35:51 -0500
Subject: [PATCH 034/933] use reference keys instead of relations (#4410)

automatic commit by git-black, original commits:
  9bdf5fe74aca9d3366be594c498332cd8ae2c1f1
---
 core/dbt/adapters/base/impl.py      | 5 -----
 core/dbt/adapters/reference_keys.py | 8 ++++----
 2 files changed, 4 insertions(+), 9 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 016b476b8d3..67ad729ec00 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -663,11 +663,6 @@ def list_relations(
         relations = self.list_relations_without_caching(
             schema_relation
         )
-        fire_event(ListRelations(
-            database=database,
-            schema=schema,
-            relations=[_make_key(x) for x in relations]
-        ))
 
         return relations
 
diff --git a/core/dbt/adapters/reference_keys.py b/core/dbt/adapters/reference_keys.py
index 5780e0d0beb..734b6845f5f 100644
--- a/core/dbt/adapters/reference_keys.py
+++ b/core/dbt/adapters/reference_keys.py
@@ -4,7 +4,7 @@
 from typing import Optional
 
 
-_ReferenceKey = namedtuple('_ReferenceKey', 'database schema identifier')
+_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
 
 
 def lowercase(value: Optional[str]) -> Optional[str]:
@@ -19,6 +19,6 @@ def _make_key(relation) -> _ReferenceKey:
     to keep track of quoting
     """
     # databases and schemas can both be None
-    return _ReferenceKey(lowercase(relation.database),
-                         lowercase(relation.schema),
-                         lowercase(relation.identifier))
+    return _ReferenceKey(
+        lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier)
+    )

From 6db1699a9565a0affae3aec43d0d4d5ee7cfebe1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 23 Jun 2020 14:28:13 -0600
Subject: [PATCH 035/933] protocols

automatic commit by git-black, original commits:
  4d33554465cf90f60646d58d2f9b3a269dbca13b
---
 core/dbt/adapters/protocol.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py
index 81819ba247f..65bc70f1ab1 100644
--- a/core/dbt/adapters/protocol.py
+++ b/core/dbt/adapters/protocol.py
@@ -1,6 +1,6 @@
 from dataclasses import dataclass
 from typing import (
-    Type, Hashable, Optional, ContextManager, List, Generic, TypeVar, ClassVar,
+    Type,
     Tuple, Union, Dict, Any
 )
 from typing_extensions import Protocol
@@ -64,10 +64,10 @@ def compile_node(
         ...
 
 
-AdapterConfig_T = TypeVar(
-    'AdapterConfig_T', bound=AdapterConfig
-)
-ConnectionManager_T = TypeVar(
+AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig)
+ConnectionManager_T = TypeVar("ConnectionManager_T", bound=ConnectionManagerProtocol)
+Relation_T = TypeVar("Relation_T", bound=RelationProtocol)
+Column_T = TypeVar("Column_T", bound=ColumnProtocol)
     'ConnectionManager_T', bound=ConnectionManagerProtocol
 )
 Relation_T = TypeVar(

From 51c1b6f19442dec7a9e75bd188e751b0fda9898d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 17 Aug 2020 09:10:56 -0600
Subject: [PATCH 036/933] Have the adapter be responsible for producing the
 compiler

The adapter's Relation is consulted for adding the ephemeral model prefix

Also hide some things from Jinja

Have the adapter be responsible for producing the compiler, move CTE generation into the Relation object


automatic commit by git-black, original commits:
  f80a759488cf69664fd60eadede753fa9026ee2a
---
 core/dbt/adapters/base/impl.py |  4 +---
 core/dbt/adapters/protocol.py  | 14 ++++++++++++--
 core/dbt/context/providers.py  | 14 ++++++--------
 3 files changed, 19 insertions(+), 13 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 67ad729ec00..98456d304c4 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -23,9 +23,7 @@
 )
 from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows
 from dbt.clients.jinja import MacroGenerator
-from dbt.contracts.graph.compiled import (
-    CompileResultNode, CompiledSeedNode
-)
+from dbt.contracts.graph.compiled import CompileResultNode, CompiledSeedNode
 from dbt.contracts.graph.manifest import Manifest, MacroManifest
 from dbt.contracts.graph.parsed import ParsedSeedNode
 from dbt.exceptions import warn_or_error
diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py
index 65bc70f1ab1..ff5b3238c22 100644
--- a/core/dbt/adapters/protocol.py
+++ b/core/dbt/adapters/protocol.py
@@ -1,7 +1,17 @@
 from dataclasses import dataclass
 from typing import (
     Type,
-    Tuple, Union, Dict, Any
+    Hashable,
+    Optional,
+    ContextManager,
+    List,
+    Generic,
+    TypeVar,
+    ClassVar,
+    Tuple,
+    Union,
+    Dict,
+    Any,
 )
 from typing_extensions import Protocol
 
@@ -87,7 +97,7 @@ class AdapterProtocol(
         Relation_T,
         Column_T,
         Compiler_T,
-    ]
+    ],
 ):
     AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]]
     Column: ClassVar[Type[Column_T]]
diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py
index 0760ecfc94e..276175bc0dd 100644
--- a/core/dbt/context/providers.py
+++ b/core/dbt/context/providers.py
@@ -405,8 +405,7 @@ class ParseDatabaseWrapper(BaseDatabaseWrapper):
     """
 
     def __getattr__(self, name):
-        override = (name in self._adapter._available_ and
-                    name in self._adapter._parse_replacements_)
+        override = name in self._adapter._available_ and name in self._adapter._parse_replacements_
 
         if override:
             return self._adapter._parse_replacements_[name]
@@ -475,9 +474,7 @@ def create_relation(
     ) -> RelationProxy:
         if target_model.is_ephemeral_model:
             self.model.set_cte(target_model.unique_id, None)
-            return self.Relation.create_ephemeral_from_node(
-                self.config, target_model
-            )
+            return self.Relation.create_ephemeral_from_node(self.config, target_model)
         else:
             return self.Relation.create_from(self.config, target_model)
 
@@ -508,9 +505,10 @@ def create_relation(
             # In operations, we can't ref() ephemeral nodes, because
             # ParsedMacros do not support set_cte
             raise_compiler_error(
-                'Operations can not ref() ephemeral nodes, but {} is ephemeral'
-                .format(target_model.name),
-                self.model
+                "Operations can not ref() ephemeral nodes, but {} is ephemeral".format(
+                    target_model.name
+                ),
+                self.model,
             )
         else:
             return super().create_relation(target_model, name)

From 167be2bea4b97f766ed2ecec4676215b9fdeafc7 Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Mon, 21 Dec 2020 13:24:09 -0500
Subject: [PATCH 037/933] update naming

automatic commit by git-black, original commits:
  8520ff35b30d48a820e521cba75ce192b34df86d
  aa3bdfeb17c2e7f6974a336a40148376dd73234d
  dddf1bcb76190e5b6b790897e39921354afb0b6f
  f80a759488cf69664fd60eadede753fa9026ee2a
---
 core/dbt/adapters/protocol.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py
index ff5b3238c22..9f529b0be3e 100644
--- a/core/dbt/adapters/protocol.py
+++ b/core/dbt/adapters/protocol.py
@@ -18,11 +18,7 @@
 import agate
 
 from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse
-    Connection, AdapterRequiredConfig, AdapterResponse
-)
-from dbt.contracts.graph.compiled import (
-    CompiledNode, ManifestNode, NonSourceCompiledNode
-)
+from dbt.contracts.graph.compiled import CompiledNode, ManifestNode, NonSourceCompiledNode
 from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition
 from dbt.contracts.graph.model_config import BaseConfig
 from dbt.contracts.graph.manifest import Manifest

From d84d5886027e4340bdb7352e019af1ec48bcaf2e Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 17 Aug 2020 09:10:56 -0600
Subject: [PATCH 038/933] Have the adapter be responsible for producing the
 compiler

The adapter's Relation is consulted for adding the ephemeral model prefix

Also hide some things from Jinja

Have the adapter be responsible for producing the compiler, move CTE generation into the Relation object


automatic commit by git-black, original commits:
  4d33554465cf90f60646d58d2f9b3a269dbca13b
  f80a759488cf69664fd60eadede753fa9026ee2a
---
 core/dbt/adapters/protocol.py | 10 +---------
 1 file changed, 1 insertion(+), 9 deletions(-)

diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py
index 9f529b0be3e..882b492317f 100644
--- a/core/dbt/adapters/protocol.py
+++ b/core/dbt/adapters/protocol.py
@@ -74,15 +74,7 @@ def compile_node(
 ConnectionManager_T = TypeVar("ConnectionManager_T", bound=ConnectionManagerProtocol)
 Relation_T = TypeVar("Relation_T", bound=RelationProtocol)
 Column_T = TypeVar("Column_T", bound=ColumnProtocol)
-    'ConnectionManager_T', bound=ConnectionManagerProtocol
-)
-Relation_T = TypeVar(
-    'Relation_T', bound=RelationProtocol
-)
-Column_T = TypeVar(
-    'Column_T', bound=ColumnProtocol
-)
-Compiler_T = TypeVar('Compiler_T', bound=CompilerProtocol)
+Compiler_T = TypeVar("Compiler_T", bound=CompilerProtocol)
 
 
 class AdapterProtocol(

From cdad1fdcd03e94959b87c50576d154cfdb438c3d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 25 Jun 2020 09:41:08 -0600
Subject: [PATCH 039/933] Allow plugin macros to override core

Moved some relation logic out of adapters and into contracts
Fixed a typo in a type name
Fixed an issue where tests could fail based on color settings
Fixed the type analysis for adapter plugins/factory
Swapped more concrete tyeps out for protocols
Finally removed PACKAGES global
Added unit tests


automatic commit by git-black, original commits:
  abe345e925fb9b272699a7f106719ed76e1f4bbd
  eb9bfcda4ab314e4ee5bf548fab07ee4fadbee4d
---
 core/dbt/adapters/base/relation.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 445eecf8c30..c1246a95c79 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -1,8 +1,6 @@
 from collections.abc import Hashable
 from dataclasses import dataclass
-from typing import (
-    Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
-)
+from typing import Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set
 
 from dbt.contracts.graph.compiled import CompiledNode
 from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode

From 4b31b7295a115493bef3dc1bd9d447413aadb384 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 2 Jul 2020 07:58:39 -0600
Subject: [PATCH 040/933] move the sql for getting different rows into dbt
 proper, from the test suite. Bump pytest dependency.

automatic commit by git-black, original commits:
  4cf2b78fcaef7f4cd5e8c89e484e7ed03f889b24
---
 core/dbt/adapters/base/impl.py     | 8 ++++----
 core/dbt/adapters/base/relation.py | 2 +-
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 98456d304c4..e39001b2809 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -1124,7 +1124,7 @@ def get_rows_different_sql(
         relation_a: BaseRelation,
         relation_b: BaseRelation,
         column_names: Optional[List[str]] = None,
-        except_operator: str = 'EXCEPT',
+        except_operator: str = "EXCEPT",
     ) -> str:
         """Generate SQL for a query that returns a single row with a two
         columns: the number of rows that are different between the two
@@ -1137,7 +1137,7 @@ def get_rows_different_sql(
             names = sorted((self.quote(c.name) for c in columns))
         else:
             names = sorted((self.quote(n) for n in column_names))
-        columns_csv = ', '.join(names)
+        columns_csv = ", ".join(names)
 
         sql = COLUMNS_EQUAL_SQL.format(
             columns=columns_csv,
@@ -1149,7 +1149,7 @@ def get_rows_different_sql(
         return sql
 
 
-COLUMNS_EQUAL_SQL = '''
+COLUMNS_EQUAL_SQL = """
 with diff_count as (
     SELECT
         1 as id,
@@ -1175,7 +1175,7 @@ def get_rows_different_sql(
     diff_count.num_missing as num_mismatched
 from row_count_diff
 join diff_count using (id)
-'''.strip()
+""".strip()
 
 
 def catch_as_completed(
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index c1246a95c79..cc6c7897010 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -56,7 +56,7 @@ def get_default_quote_policy(cls) -> Policy:
 
     @classmethod
     def get_default_include_policy(cls) -> Policy:
-        return cls._get_field_named('include_policy').default
+        return cls._get_field_named("include_policy").default
 
     def get(self, key, default=None):
         """Override `.get` to return a metadata object so we don't break

From d0c159e6e61f85f3b3d37965f5ac5e81870553ec Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 23 Oct 2019 13:56:12 -0600
Subject: [PATCH 041/933] Add a special handler to support the
 dbt_utils._is_relation macro

automatic commit by git-black, original commits:
  f4ca94f6d80d9325730a9e7643fdec45546d6e0e
---
 core/dbt/adapters/base/relation.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index cc6c7897010..e920708a660 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -62,10 +62,8 @@ def get(self, key, default=None):
         """Override `.get` to return a metadata object so we don't break
         dbt_utils.
         """
-        if key == 'metadata':
-            return {
-                'type': self.__class__.__name__
-            }
+        if key == "metadata":
+            return {"type": self.__class__.__name__}
         return super().get(key, default)
 
     def matches(

From 76088f5951f4cdbf33f714aae5c2a15b0964e795 Mon Sep 17 00:00:00 2001
From: Connor McArthur 
Date: Thu, 26 Apr 2018 17:38:44 -0400
Subject: [PATCH 042/933] Implement relations api (#727)

automatic commit by git-black, original commits:
  5344f54c3c766b8b7716c0cbe26a78095b7ad9e7
---
 core/dbt/adapters/base/relation.py | 23 +++++++++++++--------
 core/dbt/exceptions.py             | 33 +++++++++++++++---------------
 core/dbt/links.py                  |  2 +-
 3 files changed, 32 insertions(+), 26 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index e920708a660..3f76b62e4cf 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -72,16 +72,19 @@ def matches(
         schema: Optional[str] = None,
         identifier: Optional[str] = None,
     ) -> bool:
-        search = filter_null_values({
+        search = filter_null_values(
             {
                 ComponentName.Database: database,
                 ComponentName.Schema: schema,
-        })
+                ComponentName.Identifier: identifier,
+            }
+        )
 
         if not search:
             # nothing was passed in
             raise dbt.exceptions.RuntimeException(
-                "Tried to match relation, but no search path was passed!")
+                "Tried to match relation, but no search path was passed!"
+            )
 
         exact_match = True
         approximate_match = True
@@ -113,11 +116,13 @@ def quote(
         schema: Optional[bool] = None,
         identifier: Optional[bool] = None,
     ) -> Self:
-        policy = filter_null_values({
+        policy = filter_null_values(
             {
                 ComponentName.Database: database,
                 ComponentName.Schema: schema,
-        })
+                ComponentName.Identifier: identifier,
+            }
+        )
 
         new_quote_policy = self.quote_policy.replace_dict(policy)
         return self.replace(quote_policy=new_quote_policy)
@@ -128,11 +133,13 @@ def include(
         schema: Optional[bool] = None,
         identifier: Optional[bool] = None,
     ) -> Self:
-        policy = filter_null_values({
+        policy = filter_null_values(
             {
                 ComponentName.Database: database,
                 ComponentName.Schema: schema,
-        })
+                ComponentName.Identifier: identifier,
+            }
+        )
 
         new_include_policy = self.include_policy.replace_dict(policy)
         return self.replace(include_policy=new_include_policy)
@@ -180,7 +187,7 @@ def render(self) -> str:
         )
 
     def quoted(self, identifier):
-        return '{quote_char}{identifier}{quote_char}'.format(
+        return "{quote_char}{identifier}{quote_char}".format(
             quote_char=self.quote_character,
             identifier=identifier,
         )
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 2fc16e5e5fe..213a579315b 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -735,13 +735,13 @@ def raise_dataclass_not_dict(obj):
 
 def relation_wrong_type(relation, expected_type, model=None):
     raise_compiler_error(
-        ('Trying to create {expected_type} {relation}, '
-         'but it currently exists as a {current_type}. Either '
-         'drop {relation} manually, or run dbt with '
-         '`--full-refresh` and dbt will drop it for you.')
-        .format(relation=relation,
-                current_type=relation.type,
-                expected_type=expected_type),
+        (
+            "Trying to create {expected_type} {relation}, "
+            "but it currently exists as a {current_type}. Either "
+            "drop {relation} manually, or run dbt with "
+            "`--full-refresh` and dbt will drop it for you."
+        ).format(relation=relation, current_type=relation.type, expected_type=expected_type),
+        model,
         model)
 
 
@@ -790,10 +790,10 @@ def raise_dep_not_found(node, node_description, required_pkg):
 
 def multiple_matching_relations(kwargs, matches):
     raise_compiler_error(
-        'get_relation returned more than one relation with the given args. '
-        'Please specify a database or schema to narrow down the result set.'
-        '\n{}\n\n{}'
-        .format(kwargs, matches))
+        "get_relation returned more than one relation with the given args. "
+        "Please specify a database or schema to narrow down the result set."
+        "\n{}\n\n{}".format(kwargs, matches)
+    )
 
 
 def get_relation_returned_multiple_results(kwargs, matches):
@@ -802,12 +802,11 @@ def get_relation_returned_multiple_results(kwargs, matches):
 
 def approximate_relation_match(target, relation):
     raise_compiler_error(
-        'When searching for a relation, dbt found an approximate match. '
-        'Instead of guessing \nwhich relation to use, dbt will move on. '
-        'Please delete {relation}, or rename it to be less ambiguous.'
-        '\nSearched for: {target}\nFound: {relation}'
-        .format(target=target,
-                relation=relation))
+        "When searching for a relation, dbt found an approximate match. "
+        "Instead of guessing \nwhich relation to use, dbt will move on. "
+        "Please delete {relation}, or rename it to be less ambiguous."
+        "\nSearched for: {target}\nFound: {relation}".format(target=target, relation=relation)
+    )
 
 
 def raise_duplicate_macro_name(node_1, node_2, namespace) -> NoReturn:
diff --git a/core/dbt/links.py b/core/dbt/links.py
index c934942e391..c38d5cd749c 100644
--- a/core/dbt/links.py
+++ b/core/dbt/links.py
@@ -1,4 +1,4 @@
 ProfileConfigDocs = 'https://docs.getdbt.com/docs/configure-your-profile'
-SnowflakeQuotingDocs = 'https://docs.getdbt.com/v0.10/docs/configuring-quoting'
+SnowflakeQuotingDocs = "https://docs.getdbt.com/v0.10/docs/configuring-quoting"
 IncrementalDocs = 'https://docs.getdbt.com/docs/configuring-incremental-models'
 BigQueryNewPartitionBy = 'https://docs.getdbt.com/docs/upgrading-to-0-16-0'

From bc9b0b6c8565d2adf1c548ce35b7b41fc0455385 Mon Sep 17 00:00:00 2001
From: Emilie Lima Schario <14057155+emilieschario@users.noreply.github.com>
Date: Mon, 29 Nov 2021 05:20:01 -0500
Subject: [PATCH 043/933] Adjust logic when finding approx matches for model or
 test matching (#4076)

* adjust logic when finding approx matches

* update changelog

* Update core/dbt/adapters/base/relation.py

Co-authored-by: Jeremy Cohen 

* Update changelog

Co-authored-by: Jeremy Cohen 
Co-authored-by: Jeremy Cohen 

automatic commit by git-black, original commits:
  ed1ff2caac5b87ae4fd6480fc1a11de6e03555be
---
 core/dbt/adapters/base/relation.py | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 3f76b62e4cf..c06e25a1680 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -93,9 +93,8 @@ def matches(
             if not self._is_exactish_match(k, v):
                 exact_match = False
 
-            if (
-                self.path.get_lowered_part(k).strip(self.quote_character) !=
-                v.lower().strip(self.quote_character)
+            if self.path.get_lowered_part(k).strip(self.quote_character) != v.lower().strip(
+                self.quote_character
             ):
                 approximate_match = False
 

From 1b84ae15d8c591b8307d79223776a0acbe68e8ba Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 19 Jun 2019 09:44:37 -0600
Subject: [PATCH 044/933] Fix casing comparisons on dbt-created relations

When dbt creates a relation in the db, add a special flag
When checking a node name match:
 - if that flag is present and quoting is disabled, do a lowercase compare
 - otherwise remain case sensitive


automatic commit by git-black, original commits:
  1d94fb67daf34e72c80d82187ac0507352e5e0f6
  5344f54c3c766b8b7716c0cbe26a78095b7ad9e7
---
 core/dbt/adapters/base/relation.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index c06e25a1680..367c34759ad 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -99,9 +99,7 @@ def matches(
                 approximate_match = False
 
         if approximate_match and not exact_match:
-            target = self.create(
-                database=database, schema=schema, identifier=identifier
-            )
+            target = self.create(database=database, schema=schema, identifier=identifier)
             dbt.exceptions.approximate_relation_match(target, self)
 
         return exact_match

From eea876c8cc8a1d841122379a6063bf655d1600d9 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 30 Oct 2019 20:38:23 -0600
Subject: [PATCH 045/933] Make bigquery adapters SQLAdapters

Implement more things via macros
Refactor Relations vs InformationSchemas to handle BQ better
Fix a bug where bigquery cached uppercase schema names wrong
 - by using information_schema this just goes away :)


automatic commit by git-black, original commits:
  72d83f988e3c0f6c22e21fdd4b87681f7452f843
---
 core/dbt/adapters/base/relation.py | 20 +++++++-------------
 core/dbt/adapters/sql/impl.py      |  2 +-
 2 files changed, 8 insertions(+), 14 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 367c34759ad..a2743490f3b 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -141,7 +141,7 @@ def include(
         new_include_policy = self.include_policy.replace_dict(policy)
         return self.replace(include_policy=new_include_policy)
 
-    def information_schema(self, view_name=None) -> 'InformationSchema':
+    def information_schema(self, view_name=None) -> "InformationSchema":
         # some of our data comes from jinja, where things can be `Undefined`.
         if not isinstance(view_name, str):
             view_name = None
@@ -151,7 +151,7 @@ def information_schema(self, view_name=None) -> 'InformationSchema':
         info_schema = InformationSchema.from_relation(self, view_name)
         return info_schema.incorporate(path={"schema": None})
 
-    def information_schema_only(self) -> 'InformationSchema':
+    def information_schema_only(self) -> "InformationSchema":
         return self.information_schema()
 
     def without_identifier(self) -> 'BaseRelation':
@@ -164,9 +164,7 @@ def without_identifier(self) -> 'BaseRelation':
         """
         return self.include(identifier=False).replace_path(identifier=None)
 
-    def _render_iterator(
-        self
-    ) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
+    def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]:
 
         for key in ComponentName:
             path_part: Optional[str] = None
@@ -359,17 +357,15 @@ class InformationSchema(BaseRelation):
     def __post_init__(self):
         if not isinstance(self.information_schema_view, (type(None), str)):
             raise dbt.exceptions.CompilationException(
-                'Got an invalid name: {}'.format(self.information_schema_view)
+                "Got an invalid name: {}".format(self.information_schema_view)
             )
 
     @classmethod
-    def get_path(
-        cls, relation: BaseRelation, information_schema_view: Optional[str]
-    ) -> Path:
+    def get_path(cls, relation: BaseRelation, information_schema_view: Optional[str]) -> Path:
         return Path(
             database=relation.database,
             schema=relation.schema,
-            identifier='INFORMATION_SCHEMA',
+            identifier="INFORMATION_SCHEMA",
         )
 
     @classmethod
@@ -400,9 +396,7 @@ def from_relation(
         relation: BaseRelation,
         information_schema_view: Optional[str],
     ) -> Info:
-        include_policy = cls.get_include_policy(
-            relation, information_schema_view
-        )
+        include_policy = cls.get_include_policy(relation, information_schema_view)
         quote_policy = cls.get_quote_policy(relation, information_schema_view)
         path = cls.get_path(relation, information_schema_view)
         return cls(
diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index c49399cc928..e4051d5f2ad 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -245,7 +245,7 @@ def check_schema_exists(self, database: str, schema: str) -> bool:
         information_schema = self.Relation.create(
             database=database,
             schema=schema,
-            identifier='INFORMATION_SCHEMA',
+            identifier="INFORMATION_SCHEMA",
             quote_policy=self.config.quoting
         ).information_schema()
 

From 1d695279e790374ac3ab150e9467c13d36122995 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 6 May 2020 13:20:07 -0600
Subject: [PATCH 046/933] Change list_relations_without_caching macro to take a
 single argument

The argument is a Relation object with no identifier field, configured with the appropriate quoting information

Unique quoted/unquoted representations will be treated as distinct
The logic for generating what schemas to search for relations is now distinct from the catalog search logic.
Schema creation/dropping takes a similar relation argument
Add tests


automatic commit by git-black, original commits:
  e392212c0e34ee1fe8759d46b402d13342d874f7
---
 core/dbt/adapters/base/impl.py     | 5 ++++-
 core/dbt/adapters/base/relation.py | 6 ++----
 core/dbt/adapters/sql/impl.py      | 9 +++++----
 3 files changed, 11 insertions(+), 9 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index e39001b2809..5f80979f1a3 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -659,7 +659,10 @@ def list_relations(
         # we can't build the relations cache because we don't have a
         # manifest so we can't run any operations.
         relations = self.list_relations_without_caching(
-            schema_relation
+        fire_event(
+            ListRelations(
+                database=database, schema=schema, relations=[_make_key(x) for x in relations]
+            )
         )
 
         return relations
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index a2743490f3b..f8800608069 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -154,7 +154,7 @@ def information_schema(self, view_name=None) -> "InformationSchema":
     def information_schema_only(self) -> "InformationSchema":
         return self.information_schema()
 
-    def without_identifier(self) -> 'BaseRelation':
+    def without_identifier(self) -> "BaseRelation":
         """Return a form of this relation that only has the database and schema
         set to included. To get the appropriately-quoted form the schema out of
         the result (for use as part of a query), use `.render()`. To get the
@@ -427,9 +427,7 @@ def add(self, relation: BaseRelation):
             schema = relation.schema.lower()
         self[key].add(schema)
 
-    def search(
-        self
-    ) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
+    def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]:
         for information_schema_name, schemas in self.items():
             for schema in schemas:
                 yield information_schema_name, schema
diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index e4051d5f2ad..a7a13c5ddbb 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -184,7 +184,7 @@ def create_schema(self, relation: BaseRelation) -> None:
         relation = relation.without_identifier()
         fire_event(SchemaCreation(relation=_make_key(relation)))
         kwargs = {
-            'relation': relation,
+            "relation": relation,
         }
         self.execute_macro(CREATE_SCHEMA_MACRO_NAME, kwargs=kwargs)
         self.commit_if_has_connection()
@@ -195,16 +195,17 @@ def drop_schema(self, relation: BaseRelation) -> None:
         relation = relation.without_identifier()
         fire_event(SchemaDrop(relation=_make_key(relation)))
         kwargs = {
-            'relation': relation,
+            "relation": relation,
         }
         self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs)
         # we can update the cache here
         self.cache.drop_schema(relation.database, relation.schema)
 
     def list_relations_without_caching(
-        self, schema_relation: BaseRelation,
+        self,
+        schema_relation: BaseRelation,
     ) -> List[BaseRelation]:
-        kwargs = {'schema_relation': schema_relation}
+        kwargs = {"schema_relation": schema_relation}
         results = self.execute_macro(
             LIST_RELATIONS_MACRO_NAME,
             kwargs=kwargs

From 4d84b2ffc9261aa2c2d8557f88c9a1e5f65f07b2 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 26 May 2020 11:56:17 -0600
Subject: [PATCH 047/933] Make databases optional

automatic commit by git-black, original commits:
  c21af17a3034645ba0dc801a6c46fe6c5d5d3815
---
 core/dbt/adapters/base/impl.py     | 4 +---
 core/dbt/adapters/base/relation.py | 5 +----
 core/dbt/adapters/cache.py         | 4 ++--
 core/dbt/task/generate.py          | 2 +-
 core/dbt/task/runnable.py          | 6 ++----
 5 files changed, 7 insertions(+), 14 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 5f80979f1a3..102625599c5 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -643,9 +643,7 @@ def expand_target_column_types(
 
         self.expand_column_types(from_relation, to_relation)
 
-    def list_relations(
-        self, database: Optional[str], schema: str
-    ) -> List[BaseRelation]:
+    def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelation]:
         if self._schema_is_cached(database, schema):
             return self.cache.get_relations(database, schema)
 
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index f8800608069..3a049206c40 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -176,10 +176,7 @@ def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[s
 
     def render(self) -> str:
         # if there is nothing set, this will return the empty string.
-        return '.'.join(
-            part for _, part in self._render_iterator()
-            if part is not None
-        )
+        return ".".join(part for _, part in self._render_iterator() if part is not None)
 
     def quoted(self, identifier):
         return "{quote_char}{identifier}{quote_char}".format(
diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index ad595c62c80..ce3b0d6c03d 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -467,8 +467,8 @@ def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[
         with self.lock:
             results = [
                 r.inner for r in self.relations.values()
-                if (lowercase(r.schema) == schema and
-                    lowercase(r.database) == database)
+                for r in self.relations.values()
+                if (lowercase(r.schema) == schema and lowercase(r.database) == database)
             ]
 
         if None in results:
diff --git a/core/dbt/task/generate.py b/core/dbt/task/generate.py
index d6bd952d454..1f63af1a13f 100644
--- a/core/dbt/task/generate.py
+++ b/core/dbt/task/generate.py
@@ -60,7 +60,7 @@ def __init__(self, columns: List[PrimitiveDict]):
             self.add_column(col)
 
     def get_table(self, data: PrimitiveDict) -> CatalogTable:
-        database = data.get('table_database')
+        database = data.get("table_database")
         if database is None:
             dkey: Optional[str] = None
         else:
diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py
index 4dd47fc74a1..6dbef5a5133 100644
--- a/core/dbt/task/runnable.py
+++ b/core/dbt/task/runnable.py
@@ -516,9 +516,7 @@ def create_schemas(self, adapter, selected_uids: Iterable[str]):
         existing_schemas_lowered: Set[Tuple[Optional[str], Optional[str]]]
         existing_schemas_lowered = set()
 
-        def list_schemas(
-            db_only: BaseRelation
-        ) -> List[Tuple[Optional[str], str]]:
+        def list_schemas(db_only: BaseRelation) -> List[Tuple[Optional[str], str]]:
             # the database can be None on some warehouses that don't support it
             database_quoted: Optional[str]
             db_lowercase = dbt.utils.lowercase(db_only.database)
@@ -535,7 +533,7 @@ def list_schemas(
             ]
 
         def create_schema(relation: BaseRelation) -> None:
-            db = relation.database or ''
+            db = relation.database or ""
             schema = relation.schema
             with adapter.connection_named(f'create_{db}_{schema}'):
                 adapter.create_schema(relation)

From 1f5eea85007d7175986ffa5d8cc47bd5b37f7995 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 7 Feb 2020 14:03:28 -0700
Subject: [PATCH 048/933] add column-level quoting for tests

add support in columns for a quote field
add support in source quoting for a column field
add support in models for a quote_columns field
add tests


automatic commit by git-black, original commits:
  3ec9d358f7b04954c36e6af0931df8d6c25ee3ac
---
 core/dbt/adapters/base/relation.py | 2 +-
 core/dbt/parser/schemas.py         | 5 +----
 2 files changed, 2 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 3a049206c40..0fc14083f9e 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -187,7 +187,7 @@ def quoted(self, identifier):
     @classmethod
     def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any) -> Self:
         source_quoting = source.quoting.to_dict(omit_none=True)
-        source_quoting.pop('column', None)
+        source_quoting.pop("column", None)
         quote_policy = deep_merge(
             cls.get_default_quote_policy().to_dict(omit_none=True),
             source_quoting,
diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py
index 523002e17a9..6dcc26d56a1 100644
--- a/core/dbt/parser/schemas.py
+++ b/core/dbt/parser/schemas.py
@@ -465,10 +465,7 @@ def parse_test(
             column_tags: List[str] = []
         else:
             column_name = column.name
-            should_quote = (
-                column.quote or
-                (column.quote is None and target_block.quote_columns)
-            )
+            should_quote = column.quote or (column.quote is None and target_block.quote_columns)
             if should_quote:
                 column_name = get_adapter(self.root_project).quote(column_name)
             column_tags = column.tags

From ffee02c3b166e9fbad96567cecc7b5b608c37945 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 1 Feb 2019 13:57:30 -0700
Subject: [PATCH 049/933] Remove RunManager

Move some of RunManager into tasks
Move compile_node work into compilation
Move manifest work into the GraphLoader
Move the rest into the runners

Implement freshness calculations for sources

command: 'dbt source snapshot-freshness'
support for 4 adapters (no presto)
Integration tests
break up main.py's argument parsing
Pass the manifest along to freshness calculation

Results support for freshness

New freshness result contracts
Fix source result printing
Result contract cleanup
safe_run supports alternate result types
Fix tests to support changes in results

PR feedback:

- snowflake macro changed to always return utc
- no cte in collect_freshness
- remove extra optional arg
- fix the has_freshness check to examine if there is anything in freshness
- support error_after without warn_after and vice-versa
- snowflake: convert_timestamp -> convert_timezone

Update sources to be Relations

 - update contracts
 - add create_from_source
 - add create_from_source calls
 - fix tests

PR feedback

create_from_source forces quotes
default source schema/table from source/table names
snowflake quoting nonsense
also fix output: pass -> PASS
make seeding test 017 take 1m instead of 3m by using csv instead of sql

- source tweaks for the docs site


automatic commit by git-black, original commits:
  5e8ab9ce4a8ac905d66bf85414a0be121fc58dbd
---
 core/dbt/adapters/base/impl.py                | 10 +++----
 core/dbt/adapters/base/relation.py            |  2 +-
 core/dbt/main.py                              | 30 +++++++++----------
 core/dbt/task/freshness.py                    |  6 ++--
 core/dbt/task/run.py                          |  3 +-
 .../postgres/dbt/adapters/postgres/impl.py    |  2 +-
 6 files changed, 25 insertions(+), 28 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 102625599c5..871bcf32360 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -44,7 +44,7 @@
 
 
 GET_CATALOG_MACRO_NAME = 'get_catalog'
-FRESHNESS_MACRO_NAME = 'collect_freshness'
+FRESHNESS_MACRO_NAME = "collect_freshness"
 
 
 def _expect_row_value(key: str, row: agate.Row):
@@ -1023,7 +1023,7 @@ def calculate_freshness(
     ) -> Dict[str, Any]:
         """Calculate the freshness of sources in dbt, and return it"""
         kwargs: Dict[str, Any] = {
-            'source': source,
+            "source": source,
             'loaded_at_field': loaded_at_field,
             'filter': filter,
         }
@@ -1052,9 +1052,9 @@ def calculate_freshness(
         snapshotted_at = _utc(table[0][1], source, loaded_at_field)
         age = (snapshotted_at - max_loaded_at).total_seconds()
         return {
-            'max_loaded_at': max_loaded_at,
-            'snapshotted_at': snapshotted_at,
-            'age': age,
+            "max_loaded_at": max_loaded_at,
+            "snapshotted_at": snapshotted_at,
+            "age": age,
         }
 
     def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 0fc14083f9e..c7c4625bc90 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -199,7 +199,7 @@ def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs
             schema=source.schema,
             identifier=source.identifier,
             quote_policy=quote_policy,
-            **kwargs
+            **kwargs,
         )
 
     @staticmethod
diff --git a/core/dbt/main.py b/core/dbt/main.py
index 9ac0037f78b..24f8c86a1c2 100644
--- a/core/dbt/main.py
+++ b/core/dbt/main.py
@@ -313,7 +313,7 @@ def _build_base_subparser():
 
 def _build_docs_subparser(subparsers, base_subparser):
     docs_sub = subparsers.add_parser(
-        'docs',
+        "docs",
         help='''
         Generate or serve the documentation website for your project.
         '''
@@ -323,7 +323,7 @@ def _build_docs_subparser(subparsers, base_subparser):
 
 def _build_source_subparser(subparsers, base_subparser):
     source_sub = subparsers.add_parser(
-        'source',
+        "source",
         help='''
         Manage your project's sources
         ''',
@@ -560,7 +560,7 @@ def _build_parse_subparser(subparsers, base_subparser):
 def _build_docs_generate_subparser(subparsers, base_subparser):
     # it might look like docs_sub is the correct parents entry, but that
     # will cause weird errors about 'conflicting option strings'.
-    generate_sub = subparsers.add_parser('generate', parents=[base_subparser])
+    generate_sub = subparsers.add_parser("generate", parents=[base_subparser])
     generate_sub.set_defaults(cls=generate_task.GenerateTask,
                               which='generate', rpc_method='docs.generate')
     generate_sub.add_argument(
@@ -692,7 +692,7 @@ def _build_seed_subparser(subparsers, base_subparser):
 
 
 def _build_docs_serve_subparser(subparsers, base_subparser):
-    serve_sub = subparsers.add_parser('serve', parents=[base_subparser])
+    serve_sub = subparsers.add_parser("serve", parents=[base_subparser])
     serve_sub.add_argument(
         '--port',
         default=8080,
@@ -760,8 +760,8 @@ def _build_source_freshness_subparser(subparsers, base_subparser):
         aliases=['snapshot-freshness'],
     )
     sub.add_argument(
-        '-o',
-        '--output',
+        "-o",
+        "--output",
         required=False,
         help='''
         Specify the output path for the json report. By default, outputs to
@@ -899,15 +899,15 @@ def parse_args(args, cls=DBTArgumentParser):
     )
 
     p.add_argument(
-        '--version',
-        action='dbtversion',
+        "--version",
+        action="dbtversion",
         help='''
         Show version information
         ''')
 
     p.add_argument(
-        '-r',
-        '--record-timing-info',
+        "-r",
+        "--record-timing-info",
         default=None,
         type=str,
         help='''
@@ -917,9 +917,9 @@ def parse_args(args, cls=DBTArgumentParser):
     )
 
     p.add_argument(
-        '-d',
-        '--debug',
-        action='store_true',
+        "-d",
+        "--debug",
+        action="store_true",
         default=None,
         help='''
         Display debug logging during dbt execution. Useful for debugging and
@@ -1017,8 +1017,8 @@ def parse_args(args, cls=DBTArgumentParser):
     # a thread, as the profiler ignores child threads. Users should really
     # never use this.
     p.add_argument(
-        '--single-threaded',
-        action='store_true',
+        "--single-threaded",
+        action="store_true",
         help=argparse.SUPPRESS,
     )
 
diff --git a/core/dbt/task/freshness.py b/core/dbt/task/freshness.py
index 8bae7f85d1b..715b94826d8 100644
--- a/core/dbt/task/freshness.py
+++ b/core/dbt/task/freshness.py
@@ -25,7 +25,7 @@
 from dbt.contracts.graph.parsed import ParsedSourceDefinition
 
 
-RESULT_FILE_NAME = 'sources.json'
+RESULT_FILE_NAME = "sources.json"
 
 
 class FreshnessRunner(BaseRunner):
@@ -215,9 +215,7 @@ def write_result(self, result):
 
     def get_result(self, results, elapsed_time, generated_at):
         return FreshnessResult.from_node_results(
-            elapsed_time=elapsed_time,
-            generated_at=generated_at,
-            results=results
+            elapsed_time=elapsed_time, generated_at=generated_at, results=results
         )
 
     def task_end_messages(self, results):
diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py
index 4a0cc12df4e..1e6d9f3810a 100644
--- a/core/dbt/task/run.py
+++ b/core/dbt/task/run.py
@@ -407,8 +407,7 @@ def print_results_line(self, results, execution_time):
         execution = ""
 
         if execution_time is not None:
-            execution = " in {execution_time:0.2f}s".format(
-                execution_time=execution_time)
+            execution = " in {execution_time:0.2f}s".format(execution_time=execution_time)
 
         with TextOnly():
             fire_event(EmptyLine())
diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py
index 7058e5d1a22..13f670cb4a8 100644
--- a/plugins/postgres/dbt/adapters/postgres/impl.py
+++ b/plugins/postgres/dbt/adapters/postgres/impl.py
@@ -68,7 +68,7 @@ class PostgresAdapter(SQLAdapter):
 
     @classmethod
     def date_function(cls):
-        return 'now()'
+        return "now()"
 
     @available
     def verify_database(self, database):

From 440d1dc69546f591aa9335b73eacaf8a3b89dd06 Mon Sep 17 00:00:00 2001
From: NiallRees 
Date: Sun, 24 Jan 2021 16:19:20 +0000
Subject: [PATCH 050/933] Make generated CTE test names lowercase to match
 style guide

automatic commit by git-black, original commits:
  cb3c007acd71bca6d87cf1ee64bc6b6d6a5577b4
---
 core/dbt/adapters/base/relation.py | 2 +-
 core/dbt/utils.py                  | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index c7c4625bc90..bfc696836b0 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -204,7 +204,7 @@ def create_from_source(cls: Type[Self], source: ParsedSourceDefinition, **kwargs
 
     @staticmethod
     def add_ephemeral_prefix(name: str):
-        return f'__dbt__cte__{name}'
+        return f"__dbt__cte__{name}"
 
     @classmethod
     def create_ephemeral_from_node(
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index 2532bf54dc0..2647bd494e4 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -307,7 +307,7 @@ def filter_null_values(input: Dict[K_T, Optional[V_T]]) -> Dict[K_T, V_T]:
 
 
 def add_ephemeral_model_prefix(s: str) -> str:
-    return '__dbt__cte__{}'.format(s)
+    return "__dbt__cte__{}".format(s)
 
 
 def timestring() -> str:

From 10ecf5f0abc7296cb618e6098f2c7557ef8963b5 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 2 Jan 2019 11:35:42 -0700
Subject: [PATCH 051/933] Make config() accept database, add adapter-specifc
 aliasing

Add concept of aliasing for credentials/relations

All databases use database, schema, and identifier internally now:
 - Postgres/Redshift have 'dbname' aliased to database and pass to
    password
 - Bigquery has 'project' aliased to database and 'dataset' aliased to
    schema
 - Set default database include policy to True everywhere

config() calls accept aliases instead of canonical names

Remove unused functions and change others to accept Relations (see core/CHANGELOG.md)

Add catalog, etc support for multiple databases


automatic commit by git-black, original commits:
  874ead97514e66686244cef629f041d87da7e3b3
---
 core/dbt/adapters/base/impl.py                | 48 ++++++++++---------
 core/dbt/adapters/base/relation.py            |  3 +-
 core/dbt/adapters/cache.py                    |  4 +-
 core/dbt/exceptions.py                        | 23 ++++++---
 .../postgres/dbt/adapters/postgres/impl.py    |  4 +-
 plugins/postgres/setup.py                     |  2 +-
 6 files changed, 48 insertions(+), 36 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 871bcf32360..96b1ba3d849 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -60,17 +60,17 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
     """Return a function that takes a row and decides if the row should be
     included in the catalog output.
     """
-    schemas = frozenset((d.lower(), s.lower())
-                        for d, s in manifest.get_used_schemas())
+    schemas = frozenset((d.lower(), s.lower()) for d, s in manifest.get_used_schemas())
 
     def test(row: agate.Row) -> bool:
-        table_database = _expect_row_value('table_database', row)
+        table_database = _expect_row_value("table_database", row)
         table_schema = _expect_row_value('table_schema', row)
         # the schema may be present but None, which is not an error and should
         # be filtered out
         if table_schema is None:
             return False
         return (table_database.lower(), table_schema.lower()) in schemas
+
     return test
 
 
@@ -555,17 +555,19 @@ def get_missing_columns(
         """
         if not isinstance(from_relation, self.Relation):
             invalid_type_error(
-                method_name='get_missing_columns',
-                arg_name='from_relation',
+                method_name="get_missing_columns",
+                arg_name="from_relation",
                 got_value=from_relation,
-                expected_type=self.Relation)
+                expected_type=self.Relation,
+            )
 
         if not isinstance(to_relation, self.Relation):
             invalid_type_error(
-                method_name='get_missing_columns',
-                arg_name='to_relation',
+                method_name="get_missing_columns",
+                arg_name="to_relation",
                 got_value=to_relation,
-                expected_type=self.Relation)
+                expected_type=self.Relation,
+            )
 
         from_columns = {
             col.name: col for col in
@@ -636,10 +638,11 @@ def expand_target_column_types(
 
         if not isinstance(to_relation, self.Relation):
             invalid_type_error(
-                method_name='expand_target_column_types',
-                arg_name='to_relation',
+                method_name="expand_target_column_types",
+                arg_name="to_relation",
                 got_value=to_relation,
-                expected_type=self.Relation)
+                expected_type=self.Relation,
+            )
 
         self.expand_column_types(from_relation, to_relation)
 
@@ -676,11 +679,13 @@ def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dic
         if database is not None and quoting['database'] is False:
             database = database.lower()
 
-        return filter_null_values({
-            'database': database,
-            'identifier': identifier,
-            'schema': schema,
-        })
+        return filter_null_values(
+            {
+                "database": database,
+                "identifier": identifier,
+                "schema": schema,
+            }
+        )
 
     def _make_match(
         self,
@@ -704,14 +709,13 @@ def _make_match(
     def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]:
         relations_list = self.list_relations(database, schema)
 
-        matches = self._make_match(relations_list, database, schema,
-                                   identifier)
+        matches = self._make_match(relations_list, database, schema, identifier)
 
         if len(matches) > 1:
             kwargs = {
-                'identifier': identifier,
-                'schema': schema,
-                'database': database,
+                "identifier": identifier,
+                "schema": schema,
+                "database": database,
             }
             get_relation_returned_multiple_results(
                 kwargs, matches
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index bfc696836b0..943ed31c7da 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -237,7 +237,8 @@ def create_from_node(
             schema=node.schema,
             identifier=node.alias,
             quote_policy=quote_policy,
-            **kwargs)
+            **kwargs,
+        )
 
     @classmethod
     def create_from(
diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index ce3b0d6c03d..303e927d172 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -47,8 +47,8 @@ def __init__(self, inner):
 
     def __str__(self) -> str:
         return (
-            '_CachedRelation(database={}, schema={}, identifier={}, inner={})'
-        ).format(self.database, self.schema, self.identifier, self.inner)
+            self.database, self.schema, self.identifier, self.inner
+        )
 
     @property
     def database(self) -> Optional[str]:
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 213a579315b..45ebfa4dc44 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -488,18 +488,27 @@ def disallow_secret_env_var(env_var_name) -> NoReturn:
     raise_parsing_error(msg.format(env_var_name=env_var_name))
 
 
-def invalid_type_error(method_name, arg_name, got_value, expected_type,
+def invalid_type_error(
                        version='0.13.0') -> NoReturn:
     """Raise a CompilationException when an adapter method available to macros
     has changed.
     """
     got_type = type(got_value)
-    msg = ("As of {version}, 'adapter.{method_name}' expects argument "
-           "'{arg_name}' to be of type '{expected_type}', instead got "
-           "{got_value} ({got_type})")
-    raise_compiler_error(msg.format(version=version, method_name=method_name,
-                         arg_name=arg_name, expected_type=expected_type,
-                         got_value=got_value, got_type=got_type))
+    msg = (
+        "As of {version}, 'adapter.{method_name}' expects argument "
+        "'{arg_name}' to be of type '{expected_type}', instead got "
+        "{got_value} ({got_type})"
+    )
+    raise_compiler_error(
+        msg.format(
+            version=version,
+            method_name=method_name,
+            arg_name=arg_name,
+            expected_type=expected_type,
+            got_value=got_value,
+            got_type=got_type,
+        )
+    )
 
 
 def invalid_bool_error(got_value, macro_name) -> NoReturn:
diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py
index 13f670cb4a8..7a3806a4c8b 100644
--- a/plugins/postgres/dbt/adapters/postgres/impl.py
+++ b/plugins/postgres/dbt/adapters/postgres/impl.py
@@ -96,9 +96,7 @@ def _link_cached_database_relations(self, schemas: Set[str]):
 
         for (dep_schema, dep_name, refed_schema, refed_name) in table:
             dependent = self.Relation.create(
-                database=database,
-                schema=dep_schema,
-                identifier=dep_name
+                database=database, schema=dep_schema, identifier=dep_name
             )
             referenced = self.Relation.create(
                 database=database,
diff --git a/plugins/postgres/setup.py b/plugins/postgres/setup.py
index fe25fecfe7f..506c8edf0d4 100644
--- a/plugins/postgres/setup.py
+++ b/plugins/postgres/setup.py
@@ -62,7 +62,7 @@ def _dbt_psycopg2_name():
     packages=find_namespace_packages(include=['dbt', 'dbt.*']),
     package_data={
         "dbt": [
-            'include/postgres/dbt_project.yml',
+            "include/postgres/dbt_project.yml",
             'include/postgres/sample_profiles.yml',
             "include/postgres/macros/*.sql",
             'include/postgres/macros/**/*.sql',

From 0a06c7fe3330adfb73b7a302c69b2d881f757952 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 28 Feb 2020 11:26:15 -0700
Subject: [PATCH 052/933] refactor, fix flake8/mypy

automatic commit by git-black, original commits:
  cc3ba20ec9cf3bc2a5047f4d2e0fafe0e86e7f63
---
 core/dbt/adapters/base/impl.py     |  5 ++++-
 core/dbt/adapters/base/relation.py | 18 +++++++++---------
 2 files changed, 13 insertions(+), 10 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 96b1ba3d849..d82ddfadcd5 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -34,7 +34,10 @@
 from dbt.adapters.base.connections import Connection, AdapterResponse
 from dbt.adapters.base.meta import AdapterMeta, available
 from dbt.adapters.base.relation import (
-    ComponentName, BaseRelation, InformationSchema, SchemaSearchMap
+    ComponentName,
+    BaseRelation,
+    InformationSchema,
+    SchemaSearchMap,
 )
 from dbt.adapters.base import Column as BaseColumn
 from dbt.adapters.cache import RelationsCache, _make_key
diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py
index 943ed31c7da..9717b14fce9 100644
--- a/core/dbt/adapters/base/relation.py
+++ b/core/dbt/adapters/base/relation.py
@@ -416,6 +416,7 @@ class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]):
     search for what schemas. The schema values are all lowercased to avoid
     duplication.
     """
+
     def add(self, relation: BaseRelation):
         key = relation.information_schema_only()
         if key not in self:
@@ -440,14 +441,13 @@ def flatten(self, allow_multiple_databases: bool = False):
                 dbt.exceptions.raise_compiler_error(str(seen))
 
         for information_schema_name, schema in self.search():
-            path = {
-                'database': information_schema_name.database,
-                'schema': schema
-            }
-            new.add(information_schema_name.incorporate(
-                path=path,
-                quote_policy={'database': False},
-                include_policy={'database': False},
-            ))
+            path = {"database": information_schema_name.database, "schema": schema}
+            new.add(
+                information_schema_name.incorporate(
+                    path=path,
+                    quote_policy={"database": False},
+                    include_policy={"database": False},
+                )
+            )
 
         return new

From b0076a06c370a7dd3c7eb87134a69a0ff12f9325 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 16 Nov 2018 09:54:53 -0700
Subject: [PATCH 053/933] stop dropping renames...

automatic commit by git-black, original commits:
  cfd2d605759550ddc3858027ebbdce3a1e1a72cf
---
 core/dbt/adapters/cache.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index 303e927d172..812ced12c25 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -29,7 +29,7 @@ def dot_separated(key: _ReferenceKey) -> str:
 
     :param _ReferenceKey key: The key to stringify.
     """
-    return '.'.join(map(str, key))
+    return ".".join(map(str, key))
 
 
 class _CachedRelation:
@@ -420,8 +420,9 @@ def _check_rename_constraints(self, old_key, new_key):
         """
         if new_key in self.relations:
             dbt.exceptions.raise_cache_inconsistent(
-                'in rename, new key {} already in cache: {}'
-                .format(new_key, list(self.relations.keys()))
+                "in rename, new key {} already in cache: {}".format(
+                    new_key, list(self.relations.keys())
+                )
             )
 
         if old_key not in self.relations:

From 99c171634dcb2e14304950355c3a119d9d6ded9f Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 26 Sep 2018 08:59:14 -0600
Subject: [PATCH 054/933] docstrings

automatic commit by git-black, original commits:
  3883ad351d6a7b4ff4fbc9a3d645f4cea4dec157
---
 core/dbt/adapters/cache.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index 812ced12c25..ee1b2d4819d 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -41,6 +41,7 @@ class _CachedRelation:
         that refer to this relation.
     :attr BaseRelation inner: The underlying dbt relation.
     """
+
     def __init__(self, inner):
         self.referenced_by = {}
         self.inner = inner
@@ -170,6 +171,7 @@ class RelationsCache:
         The adapters also hold this lock while filling the cache.
     :attr Set[str] schemas: The set of known/cached schemas, all lowercased.
     """
+
     def __init__(self) -> None:
         self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
         self.lock = threading.RLock()

From 01a5f6269b7480dd0c9e92e44b9cddd1b4010ce9 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 20 Sep 2018 11:53:57 -0600
Subject: [PATCH 055/933] First pass on caching

automatic commit by git-black, original commits:
  ccee039c7693af016f1a78a2a34dcbb51d069adf
---
 core/dbt/adapters/base/impl.py | 5 ++---
 core/dbt/adapters/cache.py     | 4 ++--
 2 files changed, 4 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index d82ddfadcd5..74388f5a78d 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -53,8 +53,7 @@
 def _expect_row_value(key: str, row: agate.Row):
     if key not in row.keys():
         raise InternalException(
-            'Got a row without "{}" column, columns: {}'
-            .format(key, row.keys())
+            'Got a row without "{}" column, columns: {}'.format(key, row.keys())
         )
     return row[key]
 
@@ -67,7 +66,7 @@ def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]:
 
     def test(row: agate.Row) -> bool:
         table_database = _expect_row_value("table_database", row)
-        table_schema = _expect_row_value('table_schema', row)
+        table_schema = _expect_row_value("table_schema", row)
         # the schema may be present but None, which is not an error and should
         # be filtered out
         if table_schema is None:
diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index ee1b2d4819d..6c5653bb9e9 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -47,7 +47,7 @@ def __init__(self, inner):
         self.inner = inner
 
     def __str__(self) -> str:
-        return (
+        return ("_CachedRelation(database={}, schema={}, identifier={}, inner={})").format(
             self.database, self.schema, self.identifier, self.inner
         )
 
@@ -469,7 +469,7 @@ def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[
         schema = lowercase(schema)
         with self.lock:
             results = [
-                r.inner for r in self.relations.values()
+                r.inner
                 for r in self.relations.values()
                 if (lowercase(r.schema) == schema and lowercase(r.database) == database)
             ]

From c2d590acad5da9e72f7a93c81ecc5473bc60e8f9 Mon Sep 17 00:00:00 2001
From: Fokko Driesprong 
Date: Thu, 5 Dec 2019 19:59:09 +0100
Subject: [PATCH 056/933] Process comments

automatic commit by git-black, original commits:
  ca454360d867d2a46daf38dd818d2b252b79371e
---
 core/dbt/adapters/cache.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index 6c5653bb9e9..e3d2137f664 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -83,7 +83,7 @@ def key(self):
         """
         return _make_key(self)
 
-    def add_reference(self, referrer: '_CachedRelation'):
+    def add_reference(self, referrer: "_CachedRelation"):
         """Add a reference from referrer to self, indicating that if this node
         were drop...cascaded, the referrer would be dropped as well.
 

From 46e6e5cb6b4da7ea97c423f2bce0bcb02ea37b22 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 3 Jan 2019 10:58:04 -0700
Subject: [PATCH 057/933] PR feedback

bigquery: naming/parameter sanity cleanup
postgres: never allow databases that aren't the default
postgres: simplify cache buliding since we know we'll only ever have one database
everything: parameter name change for execute_macro
everything: cache related bugfixes to casing
internal only: cross db/cross schema rename support in the cache
  - none of the adapters support it, but unit tests expose the behavior
tests: much more comprehensive cache tests


automatic commit by git-black, original commits:
  dadab35aee68ee12fcb9442567d0c6b9d3e3b671
---
 core/dbt/adapters/cache.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index e3d2137f664..aefa55edb55 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -127,9 +127,9 @@ def rename(self, new_relation):
         # table_name is ever anything but the identifier (via .create())
         self.inner = self.inner.incorporate(
             path={
-                'database': new_relation.inner.database,
-                'schema': new_relation.inner.schema,
-                'identifier': new_relation.inner.identifier
+                "database": new_relation.inner.database,
+                "schema": new_relation.inner.schema,
+                "identifier": new_relation.inner.identifier,
             },
         )
 

From 8a60bc6ca2116e1b5201ad35d3c40b05c6f34f34 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 25 Sep 2018 15:10:33 -0600
Subject: [PATCH 058/933] comment/todos/errors cleanup

automatic commit by git-black, original commits:
  a1cc37c6d936497b4e3230b5080909af8d2f8994
---
 core/dbt/adapters/cache.py | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index aefa55edb55..0bebf044bae 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -145,8 +145,9 @@ def rename_key(self, old_key, new_key):
         """
         if new_key in self.referenced_by:
             dbt.exceptions.raise_cache_inconsistent(
-                'in rename of "{}" -> "{}", new name is in the cache already'
-                .format(old_key, new_key)
+                'in rename of "{}" -> "{}", new name is in the cache already'.format(
+                    old_key, new_key
+                )
             )
 
         if old_key not in self.referenced_by:
@@ -267,15 +268,13 @@ def _add_link(self, referenced_key, dependent_key):
             return
         if referenced is None:
             dbt.exceptions.raise_cache_inconsistent(
-                'in add_link, referenced link key {} not in cache!'
-                .format(referenced_key)
+                "in add_link, referenced link key {} not in cache!".format(referenced_key)
             )
 
         dependent = self.relations.get(dependent_key)
         if dependent is None:
             dbt.exceptions.raise_cache_inconsistent(
-                'in add_link, dependent link key {} not in cache!'
-                .format(dependent_key)
+                "in add_link, dependent link key {} not in cache!".format(dependent_key)
             )
 
         assert dependent is not None  # we just raised!
@@ -476,7 +475,7 @@ def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[
 
         if None in results:
             dbt.exceptions.raise_cache_inconsistent(
-                'in get_relations, a None relation was found in the cache!'
+                "in get_relations, a None relation was found in the cache!"
             )
         return results
 

From 7ccf055c167a1abca82447855dc4da090c165652 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 3 Oct 2018 09:40:12 -0600
Subject: [PATCH 059/933] handle a subtle iteration issue in the cache with
 extra locking

automatic commit by git-black, original commits:
  a03ca11ab9305b030b65c0f0b9eb1817fcb8e2b4
---
 core/dbt/adapters/cache.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index 0bebf044bae..e74e6841ebe 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -237,10 +237,7 @@ def dump_graph(self):
         # self.relations or any cache entry's referenced_by during iteration
         # it's a runtime error!
         with self.lock:
-            return {
-                dot_separated(k): v.dump_graph_entry()
-                for k, v in self.relations.items()
-            }
+            return {dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items()}
 
     def _setdefault(self, relation: _CachedRelation):
         """Add a relation to the cache, or return it if it already exists.

From 3996cb7cda889f104b5b1e7e48c74806c879e97b Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 17 Dec 2019 11:13:33 -0700
Subject: [PATCH 060/933] Fix mypy checking

Make mypy check our nested namespace packages by putting dbt in the mypy_path.
Fix a number of exposed mypy/type checker complaints. The checker mostly
passes now even if you add `--check-untyped-defs`, though there are a couple lingering issues so I'll leave that out of CI
Change the return type of RunOperation a bit - adds a couple fields to appease mypy

Also, bump the mypy version (it catches a few more issues).


automatic commit by git-black, original commits:
  9cc7a7a87fcfdc2c558b91c9316efb22bf3fdb27
  fee9382c7f12f3fe82f778cad1e29ce997822913
---
 core/dbt/adapters/cache.py | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)

diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py
index e74e6841ebe..593bd16851b 100644
--- a/core/dbt/adapters/cache.py
+++ b/core/dbt/adapters/cache.py
@@ -303,15 +303,11 @@ def add_link(self, referenced, dependent):
             return
         if ref_key not in self.relations:
             # Insert a dummy "external" relation.
-            referenced = referenced.replace(
-                type=referenced.External
-            )
+            referenced = referenced.replace(type=referenced.External)
             self.add(referenced)
         if dep_key not in self.relations:
             # Insert a dummy "external" relation.
-            dependent = dependent.replace(
-                type=referenced.External
-            )
+            dependent = dependent.replace(type=referenced.External)
             self.add(dependent)
         fire_event(AddLink(dep_key=dep_key, ref_key=ref_key))
         with self.lock:

From 7c34d3c16802c25166629e056f045d3bfcf7530e Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 9 Apr 2020 10:15:43 -0600
Subject: [PATCH 061/933] Add get_config_class_by_name function for adapters

automatic commit by git-black, original commits:
  ff9e0cd4cfbdff4627ba0a597adf5c307d70a580
---
 core/dbt/adapters/base/impl.py           | 14 ++++++++++++--
 core/dbt/adapters/factory.py             |  4 +---
 core/dbt/contracts/graph/model_config.py |  1 +
 3 files changed, 14 insertions(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 74388f5a78d..a0fd62d47d8 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -4,8 +4,18 @@
 from datetime import datetime
 from itertools import chain
 from typing import (
-    Optional, Tuple, Callable, Iterable, Type, Dict, Any, List, Mapping,
-    Iterator, Union, Set
+    Optional,
+    Tuple,
+    Callable,
+    Iterable,
+    Type,
+    Dict,
+    Any,
+    List,
+    Mapping,
+    Iterator,
+    Union,
+    Set,
 )
 
 import agate
diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index 39f51cb797b..8e92df6ea7b 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -49,9 +49,7 @@ def get_relation_class_by_name(self, name: str) -> Type[RelationProtocol]:
         adapter = self.get_adapter_class_by_name(name)
         return adapter.Relation
 
-    def get_config_class_by_name(
-        self, name: str
-    ) -> Type[AdapterConfig]:
+    def get_config_class_by_name(self, name: str) -> Type[AdapterConfig]:
         adapter = self.get_adapter_class_by_name(name)
         return adapter.AdapterSpecificConfigs
 
diff --git a/core/dbt/contracts/graph/model_config.py b/core/dbt/contracts/graph/model_config.py
index f4c5a88cb93..0e86a0a83eb 100644
--- a/core/dbt/contracts/graph/model_config.py
+++ b/core/dbt/contracts/graph/model_config.py
@@ -317,6 +317,7 @@ def update_from(
         """
         # sadly, this is a circular import
         from dbt.adapters.factory import get_config_class_by_name
+
         dct = self.to_dict(omit_none=False)
 
         adapter_config_cls = get_config_class_by_name(adapter_type)

From 0c99ed1bd9036e073c7857dcb9d5b177c079cedb Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 23 Dec 2019 11:24:59 -0700
Subject: [PATCH 062/933] improve errors on import to be more specifically
 correct

When the error is about not being able to import the plugin, indicate that
Otherwise log the original error stack trace at debug and re-raise it


automatic commit by git-black, original commits:
  df05037841f55f7bc235c8cefbad4896326cee76
---
 core/dbt/adapters/factory.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index 8e92df6ea7b..52b4fee395d 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -63,9 +63,9 @@ def load_plugin(self, name: str) -> Type[Credentials]:
         except ModuleNotFoundError as exc:
             # if we failed to import the target module in particular, inform
             # the user about it via a runtime error
-            if exc.name == 'dbt.adapters.' + name:
+            if exc.name == "dbt.adapters." + name:
                 fire_event(AdapterImportError(exc=exc))
-                raise RuntimeException(f'Could not find adapter type {name}!')
+                raise RuntimeException(f"Could not find adapter type {name}!")
             # otherwise, the error had to have come from some underlying
             # library. Log the stack trace.
 

From f8b85e7385e5013a70de1a512c5c0b8ed4800964 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 14 Oct 2019 10:04:21 -0600
Subject: [PATCH 063/933] Refactors for mypy:

initial refactoring of adapter factory stuff
Move HasCredentials protocol into connection contract and use that in the base connection


automatic commit by git-black, original commits:
  ef16a99f88910fe8bf07347eede9666ef28f0d6e
---
 core/dbt/adapters/factory.py | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index 52b4fee395d..fe283547a20 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -76,8 +76,8 @@ def load_plugin(self, name: str) -> Type[Credentials]:
 
         if plugin_type != name:
             raise RuntimeException(
-                f'Expected to find adapter with type named {name}, got '
-                f'adapter with type {plugin_type}'
+                f"Expected to find adapter with type named {name}, got "
+                f"adapter with type {plugin_type}"
             )
 
         with self.lock:
@@ -107,8 +107,7 @@ def lookup_adapter(self, adapter_name: str) -> Adapter:
         return self.adapters[adapter_name]
 
     def reset_adapters(self):
-        """Clear the adapters. This is useful for tests, which change configs.
-        """
+        """Clear the adapters. This is useful for tests, which change configs."""
         with self.lock:
             for adapter in self.adapters.values():
                 adapter.cleanup_connections()

From 32adf7ce7a35778e8193697b150bb2680ede5e51 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 19 Jun 2020 15:10:59 -0600
Subject: [PATCH 064/933] store package info on the factory instead of globally

adapter_types -> plugins
Use factory.packages instead of global PACKAGES


automatic commit by git-black, original commits:
  62a0bf87327f24486386315b776534409259aab6
---
 core/dbt/adapters/factory.py  | 8 ++------
 core/dbt/context/providers.py | 4 +---
 2 files changed, 3 insertions(+), 9 deletions(-)

diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index fe283547a20..ba9dbcaf399 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -137,9 +137,7 @@ def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]:
             try:
                 plugin = self.plugins[plugin_name]
             except KeyError:
-                raise InternalException(
-                    f'No plugin found for {plugin_name}'
-                ) from None
+                raise InternalException(f"No plugin found for {plugin_name}") from None
             plugins.append(plugin)
             seen.add(plugin_name)
             if plugin.dependencies is None:
@@ -162,9 +160,7 @@ def get_include_paths(self, name: Optional[str]) -> List[Path]:
             try:
                 path = self.packages[package_name]
             except KeyError:
-                raise InternalException(
-                    f'No internal package listing found for {package_name}'
-                )
+                raise InternalException(f"No internal package listing found for {package_name}")
             paths.append(path)
         return paths
 
diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py
index 276175bc0dd..e083ac90365 100644
--- a/core/dbt/context/providers.py
+++ b/core/dbt/context/providers.py
@@ -667,9 +667,7 @@ def __init__(
     # This overrides the method in ManifestContext, and provides
     # a model, which the ManifestContext builder does not
     def _get_namespace_builder(self):
-        internal_packages = get_adapter_package_names(
-            self.config.credentials.type
-        )
+        internal_packages = get_adapter_package_names(self.config.credentials.type)
         return MacroNamespaceBuilder(
             self.config.project_name,
             self.search_package,

From 5214ea9062f288d504966b20ed393ab11d5e560d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 31 Jul 2020 11:51:01 -0600
Subject: [PATCH 065/933] adapter macro -> python

Updated tests


automatic commit by git-black, original commits:
  dfe2ea4d0a5c338e2c7cb569a47e32b1ce533bf4
---
 core/dbt/adapters/factory.py |  4 +---
 core/dbt/context/macros.py   | 24 ++++++------------------
 core/dbt/context/manifest.py | 14 +++++---------
 3 files changed, 12 insertions(+), 30 deletions(-)

diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index ba9dbcaf399..58a64468960 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -148,9 +148,7 @@ def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]:
         return plugins
 
     def get_adapter_package_names(self, name: Optional[str]) -> List[str]:
-        package_names: List[str] = [
-            p.project_name for p in self.get_adapter_plugins(name)
-        ]
+        package_names: List[str] = [p.project_name for p in self.get_adapter_plugins(name)]
         package_names.append(GLOBAL_PROJECT_NAME)
         return package_names
 
diff --git a/core/dbt/context/macros.py b/core/dbt/context/macros.py
index 7d7caa1c14f..d349b882550 100644
--- a/core/dbt/context/macros.py
+++ b/core/dbt/context/macros.py
@@ -1,13 +1,9 @@
-from typing import (
-    Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set
-)
+from typing import Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set
 
 from dbt.clients.jinja import MacroGenerator, MacroStack
 from dbt.contracts.graph.parsed import ParsedMacro
 from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME
-from dbt.exceptions import (
-    raise_duplicate_macro_name, raise_compiler_error
-)
+from dbt.exceptions import raise_duplicate_macro_name, raise_compiler_error
 
 
 FlatNamespace = Dict[str, MacroGenerator]
@@ -68,9 +64,7 @@ def __getitem__(self, key: str) -> NamespaceMember:
                 return dct[key]
         raise KeyError(key)
 
-    def get_from_package(
-        self, package_name: Optional[str], name: str
-    ) -> Optional[MacroGenerator]:
+    def get_from_package(self, package_name: Optional[str], name: str) -> Optional[MacroGenerator]:
         pkg: FlatNamespace
         if package_name is None:
             return self.get(name)
@@ -79,9 +73,7 @@ def get_from_package(
         elif package_name in self.packages:
             return self.packages[package_name].get(name)
         else:
-            raise_compiler_error(
-                f"Could not find package '{package_name}'"
-            )
+            raise_compiler_error(f"Could not find package '{package_name}'")
 
 
 # This class builds the MacroNamespace by adding macros to
@@ -128,9 +120,7 @@ def _add_macro_to(
             hierarchy[macro.package_name] = namespace
 
         if macro.name in namespace:
-            raise_duplicate_macro_name(
-                macro_func.macro, macro, macro.package_name
-            )
+            raise_duplicate_macro_name(macro_func.macro, macro, macro.package_name)
         hierarchy[macro.package_name][macro.name] = macro_func
 
     def add_macro(self, macro: ParsedMacro, ctx: Dict[str, Any]):
@@ -139,9 +129,7 @@ def add_macro(self, macro: ParsedMacro, ctx: Dict[str, Any]):
         # MacroGenerator is in clients/jinja.py
         # a MacroGenerator object is a callable object that will
         # execute the MacroGenerator.__call__ function
-        macro_func: MacroGenerator = MacroGenerator(
-            macro, ctx, self.node, self.thread_ctx
-        )
+        macro_func: MacroGenerator = MacroGenerator(macro, ctx, self.node, self.thread_ctx)
 
         # internal macros (from plugins) will be processed separately from
         # project macros, so store them in a different place
diff --git a/core/dbt/context/manifest.py b/core/dbt/context/manifest.py
index c07a9cb9ed2..86896829194 100644
--- a/core/dbt/context/manifest.py
+++ b/core/dbt/context/manifest.py
@@ -17,6 +17,7 @@ class ManifestContext(ConfiguredContext):
     The given macros can override any previous context values, which will be
     available as if they were accessed relative to the package name.
     """
+
     # subclasses are QueryHeaderContext and ProviderContext
     def __init__(
         self,
@@ -45,9 +46,8 @@ def _build_namespace(self):
     def _get_namespace_builder(self) -> MacroNamespaceBuilder:
         # avoid an import loop
         from dbt.adapters.factory import get_adapter_package_names
-        internal_packages: List[str] = get_adapter_package_names(
-            self.config.credentials.type
-        )
+
+        internal_packages: List[str] = get_adapter_package_names(self.config.credentials.type)
         return MacroNamespaceBuilder(
             self.config.project_name,
             self.search_package,
@@ -70,14 +70,10 @@ def to_dict(self):
 
 
 class QueryHeaderContext(ManifestContext):
-    def __init__(
-        self, config: AdapterRequiredConfig, manifest: Manifest
-    ) -> None:
+    def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None:
         super().__init__(config, manifest, config.project_name)
 
 
-def generate_query_header_context(
-    config: AdapterRequiredConfig, manifest: Manifest
-):
+def generate_query_header_context(config: AdapterRequiredConfig, manifest: Manifest):
     ctx = QueryHeaderContext(config, manifest)
     return ctx.to_dict()

From 0f12b41556582341f3a1b877b4ca6cb2f210858b Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 17 Sep 2018 11:27:05 -0600
Subject: [PATCH 066/933] add a way for tests to reset the adapters known to
 dbt between runs

automatic commit by git-black, original commits:
  0b0e9e02e785adb6d6a75c758b2f967e3c2068ea
---
 core/dbt/adapters/factory.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py
index 58a64468960..c89204e0bdb 100644
--- a/core/dbt/adapters/factory.py
+++ b/core/dbt/adapters/factory.py
@@ -178,8 +178,7 @@ def get_adapter(config: AdapterRequiredConfig):
 
 
 def reset_adapters():
-    """Clear the adapters. This is useful for tests, which change configs.
-    """
+    """Clear the adapters. This is useful for tests, which change configs."""
     FACTORY.reset_adapters()
 
 

From e02764db0ef216a012fcb8070db51d0a19716b8c Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 067/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
---
 core/dbt/adapters/base/impl.py       |  8 ++++++--
 core/dbt/contracts/graph/manifest.py | 13 +++++--------
 2 files changed, 11 insertions(+), 10 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index a0fd62d47d8..ff29b9c77e7 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -22,9 +22,13 @@
 import pytz
 
 from dbt.exceptions import (
-    raise_database_error, raise_compiler_error, invalid_type_error,
+    raise_database_error,
+    raise_compiler_error,
+    invalid_type_error,
     get_relation_returned_multiple_results,
-    InternalException, NotImplementedException, RuntimeException,
+    InternalException,
+    NotImplementedException,
+    RuntimeException,
 )
 
 from dbt.adapters.protocol import (
diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py
index fe5288f1a94..44d9a088b45 100644
--- a/core/dbt/contracts/graph/manifest.py
+++ b/core/dbt/contracts/graph/manifest.py
@@ -356,16 +356,13 @@ def from_macro(
     def __eq__(self, other: object) -> bool:
         if not isinstance(other, MaterializationCandidate):
             return NotImplemented
-        equal = (
-            self.specificity == other.specificity and
-            self.locality == other.locality
-        )
+        equal = self.specificity == other.specificity and self.locality == other.locality
         if equal:
             raise_compiler_error(
-                'Found two materializations with the name {} (packages {} and '
-                '{}). dbt cannot resolve this ambiguity'
-                .format(self.macro.name, self.macro.package_name,
-                        other.macro.package_name)
+                "Found two materializations with the name {} (packages {} and "
+                "{}). dbt cannot resolve this ambiguity".format(
+                    self.macro.name, self.macro.package_name, other.macro.package_name
+                )
             )
 
         return equal

From 991cee9583bf2398bcfc57991df627ec6b5c8b14 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Dec 2018 14:30:42 -0700
Subject: [PATCH 068/933] run_operation -> execute_macro

automatic commit by git-black, original commits:
  33ffafc7d6c8aacbf71954ebbbc2394b3f5f5e34
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index ff29b9c77e7..f535396f7a2 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -60,7 +60,7 @@
 SeedModel = Union[ParsedSeedNode, CompiledSeedNode]
 
 
-GET_CATALOG_MACRO_NAME = 'get_catalog'
+GET_CATALOG_MACRO_NAME = "get_catalog"
 FRESHNESS_MACRO_NAME = "collect_freshness"
 
 

From 162e5d168807895bcd129f6eaf9ea6c4548a52c1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 23 Sep 2019 09:52:13 -0600
Subject: [PATCH 069/933] add filter field

automatic commit by git-black, original commits:
  d22f3653b78bebe9426c37ed4c46f3b555d4e434
---
 core/dbt/adapters/base/impl.py | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index f535396f7a2..58b5ac72054 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -90,9 +90,7 @@ def test(row: agate.Row) -> bool:
     return test
 
 
-def _utc(
-    dt: Optional[datetime], source: BaseRelation, field_name: str
-) -> datetime:
+def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime:
     """If dt has a timezone, return a new datetime that's in UTC. Otherwise,
     assume the datetime is already for UTC and add the timezone.
     """
@@ -1039,13 +1037,13 @@ def calculate_freshness(
         source: BaseRelation,
         loaded_at_field: str,
         filter: Optional[str],
-        manifest: Optional[Manifest] = None
+        manifest: Optional[Manifest] = None,
     ) -> Dict[str, Any]:
         """Calculate the freshness of sources in dbt, and return it"""
         kwargs: Dict[str, Any] = {
             "source": source,
-            'loaded_at_field': loaded_at_field,
-            'filter': filter,
+            "loaded_at_field": loaded_at_field,
+            "filter": filter,
         }
 
         # run the macro

From 81712f1e20dab95785381c67473e84af8bdbe3f0 Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Mon, 4 Mar 2019 20:39:31 -0500
Subject: [PATCH 070/933] pr feedback

automatic commit by git-black, original commits:
  d39a048e6e48ed666cfe3a2ddb1f35d72279bc24
---
 core/dbt/adapters/base/impl.py | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 58b5ac72054..87bfdec1c3c 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -97,17 +97,16 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
     if dt is None:
         raise raise_database_error(
             "Expected a non-null value when querying field '{}' of table "
-            " {} but received value 'null' instead".format(
-                field_name,
-                source))
+            " {} but received value 'null' instead".format(field_name, source)
+        )
 
-    elif not hasattr(dt, 'tzinfo'):
+    elif not hasattr(dt, "tzinfo"):
         raise raise_database_error(
             "Expected a timestamp value when querying field '{}' of table "
             "{} but received value of type '{}' instead".format(
-                field_name,
-                source,
-                type(dt).__name__))
+                field_name, source, type(dt).__name__
+            )
+        )
 
     elif dt.tzinfo:
         return dt.astimezone(pytz.UTC)

From e4fc34a3d5d1b77468a7dc61b92097925de67594 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 17 Sep 2019 10:12:12 -0600
Subject: [PATCH 071/933] expose the cache to macros

make new cache manipulation methods
mark the methods available to the sql context
move cache manipulation into appropriate macros and methods
update the changelog
fix some type checking


automatic commit by git-black, original commits:
  ca1c84c9d40a14ad19805a49eea3b5a7583fe46f
---
 core/dbt/adapters/base/impl.py | 15 ++++++---------
 1 file changed, 6 insertions(+), 9 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 87bfdec1c3c..1488aff5b81 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -116,7 +116,7 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet
 
 def _relation_name(rel: Optional[BaseRelation]) -> str:
     if rel is None:
-        return 'null relation'
+        return "null relation"
     else:
         return str(rel)
 
@@ -219,7 +219,7 @@ def connection_for(
         with self.connection_named(node.unique_id, node):
             yield
 
-    @available.parse(lambda *a, **k: ('', empty_table()))
+    @available.parse(lambda *a, **k: ("", empty_table()))
     def execute(
         self, sql: str, auto_begin: bool = False, fetch: bool = False
     ) -> Tuple[Union[str, AdapterResponse], agate.Table]:
@@ -375,9 +375,7 @@ def _relations_cache_for_schemas(self, manifest: Manifest) -> None:
             cache_update.add((relation.database, relation.schema))
         self.cache.update_schemas(cache_update)
 
-    def set_relations_cache(
-        self, manifest: Manifest, clear: bool = False
-    ) -> None:
+    def set_relations_cache(self, manifest: Manifest, clear: bool = False) -> None:
         """Run a query that gets a populated cache of the relations in the
         database and set the cache on this adapter.
         """
@@ -409,7 +407,7 @@ def cache_dropped(self, relation: Optional[BaseRelation]) -> str:
                 'Attempted to drop a null relation for {}'.format(name)
             )
         self.cache.drop(relation)
-        return ''
+        return ""
 
     @available
     def cache_renamed(
@@ -425,12 +423,11 @@ def cache_renamed(
             src_name = _relation_name(from_relation)
             dst_name = _relation_name(to_relation)
             raise_compiler_error(
-                'Attempted to rename {} to {} for {}'
-                .format(src_name, dst_name, name)
+                "Attempted to rename {} to {} for {}".format(src_name, dst_name, name)
             )
 
         self.cache.rename(from_relation, to_relation)
-        return ''
+        return ""
 
     ###
     # Abstract methods for database-specific values, attributes, and types

From 13079541b4625123150332499388980fc0f837a3 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 4 Oct 2018 12:05:20 -0600
Subject: [PATCH 072/933] Class hierarchy, deprecate and remove deprecated
 methods, abstract method definitions

automatic commit by git-black, original commits:
  350b81db99e8e926352b80053e763d6dfab7a940
---
 core/dbt/adapters/base/impl.py | 10 +++++-----
 core/dbt/adapters/sql/impl.py  |  3 +--
 2 files changed, 6 insertions(+), 7 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 1488aff5b81..464cef776be 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -157,6 +157,7 @@ class BaseAdapter(metaclass=AdapterMeta):
     Macros:
         - get_catalog
     """
+
     Relation: Type[BaseRelation] = BaseRelation
     Column: Type[BaseColumn] = BaseColumn
     ConnectionManager: Type[ConnectionManagerProtocol]
@@ -508,7 +509,7 @@ def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
     ) -> List[BaseColumn]:
         """Get a list of the columns in the given Relation. """
         raise NotImplementedException(
-            '`get_columns_in_relation` is not implemented for this adapter!'
+            "`get_columns_in_relation` is not implemented for this adapter!"
         )
 
     @available.deprecated("get_columns_in_relation", lambda *a, **k: [])
@@ -532,7 +533,7 @@ def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None
             database with columns of unspecified types.
         """
         raise NotImplementedException(
-            '`expand_target_column_types` is not implemented for this adapter!'
+            "`expand_target_column_types` is not implemented for this adapter!"
         )
 
     @abc.abstractmethod
@@ -549,8 +550,7 @@ def list_relations_without_caching(
         :rtype: List[self.Relation]
         """
         raise NotImplementedException(
-            '`list_relations_without_caching` is not implemented for this '
-            'adapter!'
+            "`list_relations_without_caching` is not implemented for this " "adapter!"
         )
 
     ###
@@ -669,7 +669,7 @@ def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelat
 
         # we can't build the relations cache because we don't have a
         # manifest so we can't run any operations.
-        relations = self.list_relations_without_caching(
+        relations = self.list_relations_without_caching(schema_relation)
         fire_event(
             ListRelations(
                 database=database, schema=schema, relations=[_make_key(x) for x in relations]
diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index a7a13c5ddbb..1e9f19c2789 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -114,8 +114,7 @@ def expand_column_types(self, goal, current):
         for column_name, reference_column in reference_columns.items():
             target_column = target_columns.get(column_name)
 
-            if target_column is not None and \
-               target_column.can_expand_to(reference_column):
+            if target_column is not None and target_column.can_expand_to(reference_column):
                 col_string_size = reference_column.string_size()
                 new_type = self.Column.string_type(col_string_size)
                 fire_event(

From 84976c1343a9c7d59ac240100ddb334bacf4ca74 Mon Sep 17 00:00:00 2001
From: Zach McQuiston 
Date: Wed, 7 Oct 2020 19:20:16 -0600
Subject: [PATCH 073/933] adding debug_query to base/impl.py enabling plugin
 authors to write their own debug_query

automatic commit by git-black, original commits:
  dcb6854683b610d5411ff5d6d7695236dd6f2e81
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 464cef776be..f9088d05942 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -191,7 +191,7 @@ def commit_if_has_connection(self) -> None:
         self.connections.commit_if_has_connection()
 
     def debug_query(self) -> None:
-        self.execute('select 1 as id')
+        self.execute("select 1 as id")
 
     def nice_connection_name(self) -> str:
         conn = self.connections.get_if_exists()

From b49a0b1a290a6013ccbceee28f0dda7bf42b44b7 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 20 Nov 2019 09:24:35 -0700
Subject: [PATCH 074/933] lazy-load connections

automatic commit by git-black, original commits:
  be36c5d974bd02faba5dd86aeb143899f897ff22
  ff158b8353306d75d3417bd230aea1ac15dd621d
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index f9088d05942..baf170f75fc 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -214,9 +214,7 @@ def connection_named(
                 self.connections.query_header.reset()
 
     @contextmanager
-    def connection_for(
-        self, node: CompileResultNode
-    ) -> Iterator[None]:
+    def connection_for(self, node: CompileResultNode) -> Iterator[None]:
         with self.connection_named(node.unique_id, node):
             yield
 

From 914dc09b97c26dc0382014fc4862f51199b2f905 Mon Sep 17 00:00:00 2001
From: Ran Ever-Hadani 
Date: Sat, 10 Oct 2020 17:44:07 -0700
Subject: [PATCH 075/933] Make partition metadata available to BigQuery users
 (rebased to dev/kiyoshi-kuromiya)

automatic commit by git-black, original commits:
  cce5945fd21bb5cc97c5cfece0b550c0154d4457
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index baf170f75fc..ce22512f04b 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -234,7 +234,7 @@ def execute(
         """
         return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
 
-    @available.parse(lambda *a, **k: ('', empty_table()))
+    @available.parse(lambda *a, **k: ("", empty_table()))
     def get_partitions_metadata(
         self, table: str
     ) -> Tuple[agate.Table]:

From c272cd922a3fe8008b183180d9b8b78bdfb7b81f Mon Sep 17 00:00:00 2001
From: Ran Ever-Hadani 
Date: Sun, 11 Oct 2020 11:03:53 -0700
Subject: [PATCH 076/933] Accommodate first round of comments

automatic commit by git-black, original commits:
  cce5945fd21bb5cc97c5cfece0b550c0154d4457
  eda86412cc7ccdce881be721cc14bf5acdaed4ad
---
 core/dbt/adapters/base/impl.py | 8 ++------
 1 file changed, 2 insertions(+), 6 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index ce22512f04b..082bf7fb7ec 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -235,9 +235,7 @@ def execute(
         return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch)
 
     @available.parse(lambda *a, **k: ("", empty_table()))
-    def get_partitions_metadata(
-        self, table: str
-    ) -> Tuple[agate.Table]:
+    def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
         """Obtain partitions metadata for a BigQuery partitioned table.
 
         :param str table_id: a partitioned table id, in standard SQL format.
@@ -245,9 +243,7 @@ def get_partitions_metadata(
             https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
         :rtype: agate.Table
         """
-        return self.connections.get_partitions_metadata(
-            table=table
-        )
+        return self.connections.get_partitions_metadata(table=table)
 
     ###
     # Methods that should never be overridden

From 7ef5aa970d1e1eeda9f42d12c0a7ae982b749421 Mon Sep 17 00:00:00 2001
From: Gerda Shank 
Date: Tue, 30 Mar 2021 12:16:08 -0400
Subject: [PATCH 077/933] Use Manifest instead of ParseResults [#3163]

automatic commit by git-black, original commits:
  307d47ebafd5ecf0d55b60dd5d1d44f643e7d3e6
---
 core/dbt/adapters/base/impl.py       |  2 +-
 core/dbt/contracts/graph/manifest.py | 21 +++++++--------------
 core/dbt/parser/manifest.py          | 19 +++++++++++--------
 core/dbt/parser/schemas.py           | 10 ++++++++--
 4 files changed, 27 insertions(+), 25 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 082bf7fb7ec..2661236c9a9 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -274,7 +274,7 @@ def load_macro_manifest(self) -> MacroManifest:
         if self._macro_manifest_lazy is None:
             # avoid a circular import
             from dbt.parser.manifest import ManifestLoader
-            manifest = ManifestLoader.load_macros(
+
                 self.config, self.connections.set_query_header
             )
             self._macro_manifest_lazy = manifest
diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py
index 44d9a088b45..6b6d30a3db1 100644
--- a/core/dbt/contracts/graph/manifest.py
+++ b/core/dbt/contracts/graph/manifest.py
@@ -116,7 +116,7 @@ def add_source(self, source: ParsedSourceDefinition):
 
     def populate(self, manifest):
         for source in manifest.sources.values():
-            if hasattr(source, 'source_name'):
+            if hasattr(source, "source_name"):
                 self.add_source(source)
 
     def perform_lookup(
@@ -968,7 +968,7 @@ def add_macro(self, source_file: SourceFile, macro: ParsedMacro):
             # note that the line wrap eats newlines, so if you want newlines,
             # this is the result :(
             msg = line_wrap_message(
-                f'''\
+                f"""\
                 dbt found two macros named "{macro.name}" in the project
                 "{macro.package_name}".
 
@@ -979,8 +979,8 @@ def add_macro(self, source_file: SourceFile, macro: ParsedMacro):
                     - {macro.original_file_path}
 
                     - {other_path}
-                ''',
-                subtract=2
+                """,
+                subtract=2,
             )
             raise_compiler_error(msg)
 
@@ -1143,11 +1143,7 @@ class WritableManifest(ArtifactMixin):
     metadata: ManifestMetadata = field(metadata=dict(
         description='Metadata about the manifest',
     ))
-
-
-def _check_duplicates(
-    value: HasUniqueID, src: Mapping[str, HasUniqueID]
-):
+def _check_duplicates(value: HasUniqueID, src: Mapping[str, HasUniqueID]):
     if value.unique_id in src:
         raise_duplicate_resource_name(value, src[value.unique_id])
 
@@ -1156,13 +1152,10 @@ def _check_duplicates(
 V_T = TypeVar('V_T')
 
 
-def _expect_value(
-    key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str
-) -> V_T:
+def _expect_value(key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str) -> V_T:
     if key not in src:
         raise CompilationException(
             'Expected to find "{}" in cached "result.{}" based '
-            'on cached file information: {}!'
-            .format(key, name, old_file)
+            "on cached file information: {}!".format(key, name, old_file)
         )
     return src[key]
diff --git a/core/dbt/parser/manifest.py b/core/dbt/parser/manifest.py
index 1cbd7a1205e..cc41f99c3cc 100644
--- a/core/dbt/parser/manifest.py
+++ b/core/dbt/parser/manifest.py
@@ -578,7 +578,8 @@ def is_partial_parsable(self, manifest: Manifest) -> Tuple[bool, Optional[str]]:
             reparse_reason = ReparseReason.prof_env_vars_changed
 
         missing_keys = {
-            k for k in self.manifest.state_check.project_hashes
+            k
+            for k in self.manifest.state_check.project_hashes
             if k not in manifest.state_check.project_hashes
         }
         if missing_keys:
@@ -677,12 +678,14 @@ def build_manifest_state_check(self):
         # to not pass, it doesn't matter.  If we move to more granular checking
         # of env_vars, that would need to change.
         vars_hash = FileHash.from_contents(
-            '\x00'.join([
-                getattr(config.args, 'vars', '{}') or '{}',
-                getattr(config.args, 'profile', '') or '',
-                getattr(config.args, 'target', '') or '',
-                __version__
-            ])
+            "\x00".join(
+                [
+                    getattr(config.args, "vars", "{}") or "{}",
+                    getattr(config.args, "profile", "") or "",
+                    getattr(config.args, "target", "") or "",
+                    __version__,
+                ]
+            )
         )
 
         # Create a FileHash of the env_vars in the project
@@ -709,7 +712,7 @@ def build_manifest_state_check(self):
         # Create a FileHashes for dbt_project for all dependencies
         project_hashes = {}
         for name, project in all_projects.items():
-            path = os.path.join(project.project_root, 'dbt_project.yml')
+            path = os.path.join(project.project_root, "dbt_project.yml")
             with open(path) as fp:
                 project_hashes[name] = FileHash.from_contents(fp.read())
 
diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py
index 6dcc26d56a1..387190df1fe 100644
--- a/core/dbt/parser/schemas.py
+++ b/core/dbt/parser/schemas.py
@@ -167,7 +167,10 @@ def _trimmed(inp: str) -> str:
 
 class SchemaParser(SimpleParser[GenericTestBlock, ParsedGenericTestNode]):
     def __init__(
-        self, project, manifest, root_project,
+        self,
+        project,
+        manifest,
+        root_project,
     ) -> None:
         super().__init__(project, manifest, root_project)
 
@@ -394,7 +397,10 @@ def render_test_update(self, node, config, builder, schema_file_id):
             try:
                 # make a base context that doesn't have the magic kwargs field
                 context = generate_test_context(
-                    node, self.root_project, self.manifest, config,
+                    node,
+                    self.root_project,
+                    self.manifest,
+                    config,
                     self.macro_resolver,
                 )
                 # update with rendered test kwargs (which collects any refs)

From d8ebdc8482095dcc7f4071a8dabc2b34f21d4cdc Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Aug 2020 09:04:40 -0600
Subject: [PATCH 078/933] include project macros in the manifest the adapter
 stores locally

automatic commit by git-black, original commits:
  285479c0bc73901a33e8d155e170373cf4f5d04a
---
 core/dbt/adapters/base/impl.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 2661236c9a9..0471195d1b2 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -275,8 +275,7 @@ def load_macro_manifest(self) -> MacroManifest:
             # avoid a circular import
             from dbt.parser.manifest import ManifestLoader
 
-                self.config, self.connections.set_query_header
-            )
+            manifest = ManifestLoader.load_macros(self.config, self.connections.set_query_header)
             self._macro_manifest_lazy = manifest
         return self._macro_manifest_lazy
 

From 6fd8349d808827a7acee9600936953324c8f6605 Mon Sep 17 00:00:00 2001
From: Nathaniel May 
Date: Thu, 2 Dec 2021 15:04:52 -0500
Subject: [PATCH 079/933] change json override strategy (#4396)

automatic commit by git-black, original commits:
  74fbaa18cd7522161b8e0db97393c73d869a0498
  b2aea11cdb0304c466d9968281dfa6a53ac97476
---
 core/dbt/adapters/base/impl.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 0471195d1b2..2f7ce215a34 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -291,11 +291,7 @@ def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
 
         if (database, schema) not in self.cache:
             fire_event(
-                CacheMiss(
-                    conn_name=self.nice_connection_name(),
-                    database=database,
-                    schema=schema
-                )
+                CacheMiss(conn_name=self.nice_connection_name(), database=database, schema=schema)
             )
             return False
         else:

From b04b7d55b609a5606689482599c642900e76c983 Mon Sep 17 00:00:00 2001
From: Jeremy Cohen 
Date: Thu, 12 Aug 2021 18:18:28 -0400
Subject: [PATCH 080/933] Use is_relational check for schema caching (#3716)

* Use is_relational check for schema caching

* Fix flake8

* Update changelog

automatic commit by git-black, original commits:
  b633adb8813ccfdf52aa421864ef172a2b422a22
  f80a759488cf69664fd60eadede753fa9026ee2a
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 2f7ce215a34..0d2e085c739 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -305,9 +305,7 @@ def _get_cache_schemas(self, manifest: Manifest) -> Set[BaseRelation]:
         return {
             self.Relation.create_from(self.config, node).without_identifier()
             for node in manifest.nodes.values()
-            if (
-                node.is_relational and not node.is_ephemeral_model
-            )
+            if (node.is_relational and not node.is_ephemeral_model)
         }
 
     def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:

From 696eb47a3489156ac4ca2a2cede482808dc188dc Mon Sep 17 00:00:00 2001
From: dave-connors-3 <73915542+dave-connors-3@users.noreply.github.com>
Date: Thu, 23 Sep 2021 10:54:05 -0500
Subject: [PATCH 081/933] Feature/catalog relational objects (#3922)

* filter to relational nodes

* cleanup

* flake formatting

* changelog

automatic commit by git-black, original commits:
  f4f5d319592f71924d3616acea247f1eee59fd9a
---
 core/dbt/adapters/base/impl.py | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 0d2e085c739..e0844b70939 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -319,9 +319,11 @@ def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap:
         """
         info_schema_name_map = SchemaSearchMap()
         nodes: Iterator[CompileResultNode] = chain(
-            [node for node in manifest.nodes.values() if (
-                node.is_relational and not node.is_ephemeral_model
-            )],
+            [
+                node
+                for node in manifest.nodes.values()
+                if (node.is_relational and not node.is_ephemeral_model)
+            ],
             manifest.sources.values(),
         )
         for node in nodes:

From 9bc9076ac54a1391dab33f997cf00db2daf629c6 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 082/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
  9ffbb3ad02ab322694bf6ff33b08517d31f7920e
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index e0844b70939..db4d8aff83a 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -379,9 +379,7 @@ def cache_added(self, relation: Optional[BaseRelation]) -> str:
         """Cache a new relation in dbt. It will show up in `list relations`."""
         if relation is None:
             name = self.nice_connection_name()
-            raise_compiler_error(
-                'Attempted to cache a null relation for {}'.format(name)
-            )
+            raise_compiler_error("Attempted to cache a null relation for {}".format(name))
         self.cache.add(relation)
         # so jinja doesn't render things
         return ""

From 508a889439e88a934a620167756cfb1f47654a72 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 083/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
  ca1c84c9d40a14ad19805a49eea3b5a7583fe46f
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index db4d8aff83a..c02ebf6bbe3 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -391,9 +391,7 @@ def cache_dropped(self, relation: Optional[BaseRelation]) -> str:
         """
         if relation is None:
             name = self.nice_connection_name()
-            raise_compiler_error(
-                'Attempted to drop a null relation for {}'.format(name)
-            )
+            raise_compiler_error("Attempted to drop a null relation for {}".format(name))
         self.cache.drop(relation)
         return ""
 

From e4c3703a261f9d3ef52c6e4c5150106f4aca5c0c Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 084/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
  350b81db99e8e926352b80053e763d6dfab7a940
---
 core/dbt/adapters/base/impl.py | 23 ++++++-----------------
 1 file changed, 6 insertions(+), 17 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index c02ebf6bbe3..d8caafcf96a 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -421,8 +421,7 @@ def cache_renamed(
     @abc.abstractclassmethod
     def date_function(cls) -> str:
         """Get the date function used by this adapter's database."""
-        raise NotImplementedException(
-            '`date_function` is not implemented for this adapter!')
+        raise NotImplementedException("`date_function` is not implemented for this adapter!")
 
     @abc.abstractclassmethod
     def is_cancelable(cls) -> bool:
@@ -464,9 +463,7 @@ def drop_relation(self, relation: BaseRelation) -> None:
 
         *Implementors must call self.cache.drop() to preserve cache state!*
         """
-        raise NotImplementedException(
-            '`drop_relation` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`drop_relation` is not implemented for this adapter!")
 
     @abc.abstractmethod
     @available.parse_none
@@ -483,9 +480,7 @@ def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation
 
         Implementors must call self.cache.rename() to preserve cache state.
         """
-        raise NotImplementedException(
-            '`rename_relation` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`rename_relation` is not implemented for this adapter!")
 
     @abc.abstractmethod
     @available.parse_list
@@ -736,25 +731,19 @@ def already_exists(self, schema: str, name: str) -> bool:
     @available.parse_none
     def create_schema(self, relation: BaseRelation):
         """Create the given schema if it does not exist."""
-        raise NotImplementedException(
-            '`create_schema` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`create_schema` is not implemented for this adapter!")
 
     @abc.abstractmethod
     @available.parse_none
     def drop_schema(self, relation: BaseRelation):
         """Drop the given schema (and everything in it) if it exists."""
-        raise NotImplementedException(
-            '`drop_schema` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
 
     @available
     @abc.abstractclassmethod
     def quote(cls, identifier: str) -> str:
         """Quote the given identifier, as appropriate for the database."""
-        raise NotImplementedException(
-            '`quote` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`quote` is not implemented for this adapter!")
 
     @available
     def quote_as_configured(self, identifier: str, quote_key: str) -> str:

From 0539ab452919c609510b2d4761fe5de048b10147 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 085/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
  9ffbb3ad02ab322694bf6ff33b08517d31f7920e
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index d8caafcf96a..ceff36ca124 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -425,9 +425,7 @@ def date_function(cls) -> str:
 
     @abc.abstractclassmethod
     def is_cancelable(cls) -> bool:
-        raise NotImplementedException(
-            '`is_cancelable` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
 
     ###
     # Abstract methods about schemas

From 68b7bf59f95fc49e4c3c8db171bd6c0a639dec61 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 086/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
  350b81db99e8e926352b80053e763d6dfab7a940
  8927aa8e026feacaa55344836a570dd431ac42c6
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index ceff36ca124..f7e852841c5 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -433,9 +433,7 @@ def is_cancelable(cls) -> bool:
     @abc.abstractmethod
     def list_schemas(self, database: str) -> List[str]:
         """Get a list of existing schemas in database"""
-        raise NotImplementedException(
-            '`list_schemas` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`list_schemas` is not implemented for this adapter!")
 
     @available.parse(lambda *a, **k: False)
     def check_schema_exists(self, database: str, schema: str) -> bool:

From b4a0e7613251b1013ff1aa80ac866e94fdbef288 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Mar 2019 21:08:38 -0700
Subject: [PATCH 087/933] per-thread connections

parsing now always opens a connection, instead of waiting to need it
remove model_name/available_raw/etc


automatic commit by git-black, original commits:
  9ffbb3ad02ab322694bf6ff33b08517d31f7920e
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/base/impl.py | 15 +++------------
 1 file changed, 3 insertions(+), 12 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index f7e852841c5..cc5120346e7 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -443,10 +443,7 @@ def check_schema_exists(self, database: str, schema: str) -> bool:
         and adapters should implement it if there is an optimized path (and
         there probably is)
         """
-        search = (
-            s.lower() for s in
-            self.list_schemas(database=database)
-        )
+        search = (s.lower() for s in self.list_schemas(database=database))
         return schema.lower() in search
 
     ###
@@ -555,15 +552,9 @@ def get_missing_columns(
                 expected_type=self.Relation,
             )
 
-        from_columns = {
-            col.name: col for col in
-            self.get_columns_in_relation(from_relation)
-        }
+        from_columns = {col.name: col for col in self.get_columns_in_relation(from_relation)}
 
-        to_columns = {
-            col.name: col for col in
-            self.get_columns_in_relation(to_relation)
-        }
+        to_columns = {col.name: col for col in self.get_columns_in_relation(to_relation)}
 
         missing_columns = set(from_columns.keys()) - set(to_columns.keys())
 

From 7f3298c4d8c91a3e032c075ad5509a97102e715d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 088/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  0cf38bcd192d13048515f45f0f166a6222cb0435
  1b0d63515292e3b98cc96c8582b3aaf404185586
  350b81db99e8e926352b80053e763d6dfab7a940
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index cc5120346e7..0f0c4e2f9dd 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -462,9 +462,7 @@ def drop_relation(self, relation: BaseRelation) -> None:
     @available.parse_none
     def truncate_relation(self, relation: BaseRelation) -> None:
         """Truncate the given relation."""
-        raise NotImplementedException(
-            '`truncate_relation` is not implemented for this adapter!'
-        )
+        raise NotImplementedException("`truncate_relation` is not implemented for this adapter!")
 
     @abc.abstractmethod
     @available.parse_none

From bbdbcec672797c27e5a5dc17d8c32ebf03f5a1fb Mon Sep 17 00:00:00 2001
From: matt-winkler <75497565+matt-winkler@users.noreply.github.com>
Date: Wed, 21 Jul 2021 13:49:19 -0600
Subject: [PATCH 089/933] Feature: on_schema_change for incremental models
 (#3387)

* detect and act on schema changes

* update incremental helpers code

* update changelog

* fix error in diff_columns from testing

* abstract code a bit further

* address matching names vs. data types

* Update CHANGELOG.md

Co-authored-by: Jeremy Cohen 

* updates from Jeremy's feedback

* multi-column add / remove with full_refresh

* simple changes from JC's feedback

* updated for snowflake

* reorganize postgres code

* reorganize approach

* updated full refresh trigger logic

* fixed unintentional wipe behavior

* catch final else condition

* remove WHERE string replace

* touch ups

* port core to snowflake

* added bigquery code

* updated impacted unit tests

* updates from linting tests

* updates from linting again

* snowflake updates from further testing

* fix logging

* clean up incremental logic

* updated for bigquery

* update postgres with new strategy

* update nodeconfig

* starting integration tests

* integration test for ignore case

* add test for append_new_columns

* add integration test for sync

* remove extra tests

* add unique key and snowflake test

* move incremental integration test dir

* update integration tests

* update integration tests

* Suggestions for #3387 (#3558)

* PR feedback: rationalize macros + logging, fix + expand tests

* Rm alter_column_types, always true for sync_all_columns

* update logging and integration test on sync

* update integration tests

* test fix SF integration tests

Co-authored-by: Matt Winkler 

* rename integration test folder

* Update core/dbt/include/global_project/macros/materializations/incremental/incremental.sql

Accept Jeremy's suggested change

Co-authored-by: Jeremy Cohen 

* Update changelog [skip ci]

Co-authored-by: Jeremy Cohen 

automatic commit by git-black, original commits:
  2799a8c34d02c14f2384d875fb57abb690fed8f6
  bd7010678a7f11fe18dd360132de1f304bce0229
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 0f0c4e2f9dd..9e5ccdbd60e 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -476,9 +476,7 @@ def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation
     @abc.abstractmethod
     @available.parse_list
     def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
-        self, relation: BaseRelation
-    ) -> List[BaseColumn]:
-        """Get a list of the columns in the given Relation. """
+        """Get a list of the columns in the given Relation."""
         raise NotImplementedException(
             "`get_columns_in_relation` is not implemented for this adapter!"
         )

From 286a647bb47c10c5d91086960ff23a674a9ea5fc Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 5 Feb 2019 09:34:33 -0700
Subject: [PATCH 090/933] backwards compatibility work

create deprecation warning decorator for deprecated available methods
make already_exists just take schema/name again and direct users to get_relation
remove test that cannot fail (the deprecation does not exist!)
add a little deprecation warning check to test_simple_copy


automatic commit by git-black, original commits:
  faadb34affd68482b1b71602991216584a9a0e06
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 9e5ccdbd60e..c5f07981496 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -488,7 +488,7 @@ def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]
             database=self.config.credentials.database,
             schema=schema,
             identifier=identifier,
-            quote_policy=self.config.quoting
+            quote_policy=self.config.quoting,
         )
         return self.get_columns_in_relation(relation)
 

From 4ee61a456c4617bb9bb5987f26bad4e310fa3bc0 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 6 May 2020 13:20:07 -0600
Subject: [PATCH 091/933] Change list_relations_without_caching macro to take a
 single argument

The argument is a Relation object with no identifier field, configured with the appropriate quoting information

Unique quoted/unquoted representations will be treated as distinct
The logic for generating what schemas to search for relations is now distinct from the catalog search logic.
Schema creation/dropping takes a similar relation argument
Add tests


automatic commit by git-black, original commits:
  2799a8c34d02c14f2384d875fb57abb690fed8f6
  e392212c0e34ee1fe8759d46b402d13342d874f7
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index c5f07981496..8191f2b7173 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -506,9 +506,7 @@ def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None
         )
 
     @abc.abstractmethod
-    def list_relations_without_caching(
-        self, schema_relation: BaseRelation
-    ) -> List[BaseRelation]:
+    def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]:
         """List relations in the given schema, bypassing the cache.
 
         This is used as the underlying behavior to fill the cache.

From 0370badbacd4a3e57493c9035c3223f60de27ddb Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Jun 2019 08:40:37 -0600
Subject: [PATCH 092/933] archives -> snapshots, except legacy stuff

automatic commit by git-black, original commits:
  f3701ab837add35e4adfc40eb656e19b32e1d731
---
 core/dbt/adapters/base/impl.py | 2 +-
 core/dbt/config/project.py     | 2 +-
 core/dbt/main.py               | 6 +++---
 core/dbt/task/list.py          | 2 +-
 4 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 8191f2b7173..d950b249a08 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -565,7 +565,7 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None:
         """
         if not isinstance(relation, self.Relation):
             invalid_type_error(
-                method_name='valid_snapshot_target',
+                method_name="valid_snapshot_target",
                 arg_name='relation',
                 got_value=relation,
                 expected_type=self.Relation)
diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py
index 6ad02ed4be9..59b6585cd23 100644
--- a/core/dbt/config/project.py
+++ b/core/dbt/config/project.py
@@ -606,7 +606,7 @@ def to_project_config(self, with_packages=False):
                 "analysis-paths": self.analysis_paths,
             'asset-paths': self.asset_paths,
                 "asset-paths": self.asset_paths,
-            'snapshot-paths': self.snapshot_paths,
+                "target-path": self.target_path,
                 "snapshot-paths": self.snapshot_paths,
                 "clean-targets": self.clean_targets,
                 "log-path": self.log_path,
diff --git a/core/dbt/main.py b/core/dbt/main.py
index 24f8c86a1c2..4541965d982 100644
--- a/core/dbt/main.py
+++ b/core/dbt/main.py
@@ -857,7 +857,7 @@ def _build_list_subparser(subparsers, base_subparser):
 
 def _build_run_operation_subparser(subparsers, base_subparser):
     sub = subparsers.add_parser(
-        'run-operation',
+        "run-operation",
         parents=[base_subparser],
         help='''
         Run the named macro with any supplied arguments.
@@ -871,9 +871,9 @@ def _build_run_operation_subparser(subparsers, base_subparser):
         ''',
     )
     sub.add_argument(
-        '--args',
+        "--args",
         type=str,
-        default='{}',
+        default="{}",
         help='''
         Supply arguments to the macro. This dictionary will be mapped to the
         keyword arguments defined in the selected macro. This argument should
diff --git a/core/dbt/task/list.py b/core/dbt/task/list.py
index 67d153f0ea3..57c1cdbd542 100644
--- a/core/dbt/task/list.py
+++ b/core/dbt/task/list.py
@@ -18,7 +18,7 @@
 class ListTask(GraphRunnableTask):
     DEFAULT_RESOURCE_VALUES = frozenset((
         NodeType.Model,
-        NodeType.Snapshot,
+            NodeType.Model,
         NodeType.Seed,
         NodeType.Test,
         NodeType.Source,

From 0b7b53f903f75fc304cdbd4bfab618cb6c1d6ca4 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 25 Mar 2019 08:00:11 -0600
Subject: [PATCH 093/933] get archive blocks working

tests
fix event tracking test
Fix print statements
make archives not inherit configs from models
archive now uses the name/alias properly for everything instead of target_table
skip non-archive blocks in archive parsing instead of raising
make archives ref-able
 - test for archive ref, test for archive selects
raise a more useful message on incorrect archive targets
add "--models" and "--exclude" arguments to archives
 - pass them through to selection
 - change get_fqn to take a full node object, have archives use that so selection behaves well
 - added tests

Improve error handling on invalid archive configs

Added a special archive-only node that has extra config restrictions
add tests for invalid archive config


automatic commit by git-black, original commits:
  be3445b78a9035b43946ff51ed861fa443def91a
---
 core/dbt/adapters/base/impl.py | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index d950b249a08..875d7d335dd 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -566,17 +566,18 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None:
         if not isinstance(relation, self.Relation):
             invalid_type_error(
                 method_name="valid_snapshot_target",
-                arg_name='relation',
+                arg_name="relation",
                 got_value=relation,
-                expected_type=self.Relation)
+                expected_type=self.Relation,
+            )
 
         columns = self.get_columns_in_relation(relation)
         names = set(c.name.lower() for c in columns)
-        expanded_keys = ('scd_id', 'valid_from', 'valid_to')
+        expanded_keys = ("scd_id", "valid_from", "valid_to")
         extra = []
         missing = []
         for legacy in expanded_keys:
-            desired = 'dbt_' + legacy
+            desired = "dbt_" + legacy
             if desired not in names:
                 missing.append(desired)
                 if legacy in names:
@@ -586,11 +587,12 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None:
             if extra:
                 msg = (
                     'Snapshot target has ("{}") but not ("{}") - is it an '
-                    'unmigrated previous version archive?'
-                    .format('", "'.join(extra), '", "'.join(missing))
+                    "unmigrated previous version archive?".format(
+                        '", "'.join(extra), '", "'.join(missing)
+                    )
                 )
             else:
-                msg = (
+                msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
                     'Snapshot target is not a snapshot table (missing "{}")'
                     .format('", "'.join(missing))
                 )

From 3431f9e174d74ced5e6a7df41f56f858f6ebc0a3 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Jun 2019 08:40:37 -0600
Subject: [PATCH 094/933] archives -> snapshots, except legacy stuff

automatic commit by git-black, original commits:
  be3445b78a9035b43946ff51ed861fa443def91a
  f3701ab837add35e4adfc40eb656e19b32e1d731
---
 core/dbt/adapters/base/impl.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 875d7d335dd..ecac7142603 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -593,8 +593,7 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None:
                 )
             else:
                 msg = 'Snapshot target is not a snapshot table (missing "{}")'.format(
-                    'Snapshot target is not a snapshot table (missing "{}")'
-                    .format('", "'.join(missing))
+                    '", "'.join(missing)
                 )
             raise_compiler_error(msg)
 

From 58b30e74935a82ca0c6ef2c7e25635739c0e37e9 Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Fri, 26 Apr 2019 14:00:42 -0400
Subject: [PATCH 095/933] Implement archival using a merge abstraction

automatic commit by git-black, original commits:
  7d490d4886daab2c16c2a1d517f86a27a8f5dac9
---
 core/dbt/adapters/base/impl.py                             | 7 ++++---
 .../macros/materializations/snapshots/snapshot_merge.sql   | 1 -
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index ecac7142603..ca1346f7578 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -603,10 +603,11 @@ def expand_target_column_types(
     ) -> None:
         if not isinstance(from_relation, self.Relation):
             invalid_type_error(
-                method_name='expand_target_column_types',
-                arg_name='from_relation',
+                method_name="expand_target_column_types",
+                arg_name="from_relation",
                 got_value=from_relation,
-                expected_type=self.Relation)
+                expected_type=self.Relation,
+            )
 
         if not isinstance(to_relation, self.Relation):
             invalid_type_error(
diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql
index b2b21c59de8..6bc50fd3bf4 100644
--- a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql
+++ b/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql
@@ -23,4 +23,3 @@
         values ({{ insert_cols_csv }})
 
 {% endmacro %}
-

From d04a9f33cb6bb906db4b0ba0167ebbc24414dfc1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 24 Sep 2019 09:40:55 -0600
Subject: [PATCH 096/933] Convert Relation types to hologram.JsonSchemaMixin

Fix a lot of mypy things, add a number of adapter-ish modules to it
Split relations and columns into separate files
split context.common into base + common
 - base is all that's required for the config renderer
Move Credentials into connection contracts since that's what they really are
Removed model_name/table_name -> consolidated to identifier
 - I hope I did not break seeds, which claimed to care about render(False)
Unify shared 'external' relation type with bigquery's own
hack workarounds for some import cycles with plugin registration and config p
arsing
Assorted backwards compatibility fixes around types, deep_merge vs shallow merge
Remove APIObject


automatic commit by git-black, original commits:
  088442e9c13ee3aa86c71e37278c4aeb0ac9120c
  8b58b208cacd685e21312f7a225a542705712842
  eb9bfcda4ab314e4ee5bf548fab07ee4fadbee4d
---
 core/dbt/adapters/base/impl.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index ca1346f7578..f982276b848 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -624,10 +624,7 @@ def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelat
             return self.cache.get_relations(database, schema)
 
         schema_relation = self.Relation.create(
-            database=database,
-            schema=schema,
-            identifier='',
-            quote_policy=self.config.quoting
+            database=database, schema=schema, identifier="", quote_policy=self.config.quoting
         ).without_identifier()
 
         # we can't build the relations cache because we don't have a

From bbda7e5024145521e765f51162bad2decfb6cc8a Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 18 Sep 2018 14:24:43 -0600
Subject: [PATCH 097/933] make adapters into objects, fix unit tests

automatic commit by git-black, original commits:
  8e84f53c65bba0eb5197e1580d81b48aa2b8ac74
---
 core/dbt/adapters/base/impl.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index f982276b848..b199db31b7c 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -640,10 +640,10 @@ def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelat
 
     def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dict[str, str]:
         quoting = self.config.quoting
-        if identifier is not None and quoting['identifier'] is False:
+        if identifier is not None and quoting["identifier"] is False:
             identifier = identifier.lower()
 
-        if schema is not None and quoting['schema'] is False:
+        if schema is not None and quoting["schema"] is False:
             schema = schema.lower()
 
         if database is not None and quoting['database'] is False:

From 6a6b9cfd700740fb60a8968682aca764fcbfe36d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 16 Apr 2019 10:20:13 -0600
Subject: [PATCH 098/933] Quote databases when we list them Fix a copy+paste
 error that broke database quoting configuration

automatic commit by git-black, original commits:
  97a6a51bece3a0ec033f274a84dd0a10164ee8e5
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index b199db31b7c..464c5601959 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -646,7 +646,7 @@ def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dic
         if schema is not None and quoting["schema"] is False:
             schema = schema.lower()
 
-        if database is not None and quoting['database'] is False:
+        if database is not None and quoting["database"] is False:
             database = database.lower()
 
         return filter_null_values(

From 8871cfd06809d513c3b68a5a690fae65c4eb16d3 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 099/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  1b0d63515292e3b98cc96c8582b3aaf404185586
  874ead97514e66686244cef629f041d87da7e3b3
---
 core/dbt/adapters/base/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 464c5601959..6c99aeb2e59 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -687,9 +687,7 @@ def get_relation(self, database: str, schema: str, identifier: str) -> Optional[
                 "schema": schema,
                 "database": database,
             }
-            get_relation_returned_multiple_results(
-                kwargs, matches
-            )
+            get_relation_returned_multiple_results(kwargs, matches)
 
         elif matches:
             return matches[0]

From 79f073b6b3f510cf7ce0d6e04ba8192c9cea675c Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 18 Nov 2019 15:10:35 -0700
Subject: [PATCH 100/933] make column quoting optional

automatic commit by git-black, original commits:
  c2b2f70a69866b72040c22749007e1481a289290
---
 core/dbt/adapters/base/impl.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 6c99aeb2e59..a92fcbf9309 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -743,9 +743,7 @@ def quote_as_configured(self, identifier: str, quote_key: str) -> str:
             return identifier
 
     @available
-    def quote_seed_column(
-        self, column: str, quote_config: Optional[bool]
-    ) -> str:
+    def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str:
         quote_columns: bool = True
         if isinstance(quote_config, bool):
             quote_columns = quote_config
@@ -754,7 +752,7 @@ def quote_seed_column(
         else:
             raise_compiler_error(
                 f'The seed configuration value of "quote_columns" has an '
-                f'invalid type {type(quote_config)}'
+                f"invalid type {type(quote_config)}"
             )
 
         if quote_columns:

From ca9d47a1f6b53d527a9fda86dd4490e804e7e4fd Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 4 Dec 2019 10:31:07 -0700
Subject: [PATCH 101/933] add a formal search order for materializations, tests

automatic commit by git-black, original commits:
  0372fefae0e194dfe357d952e1e7cb2f336fe231
  1b0d63515292e3b98cc96c8582b3aaf404185586
---
 core/dbt/adapters/base/impl.py | 12 ++++--------
 1 file changed, 4 insertions(+), 8 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index a92fcbf9309..44f05915e0e 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -773,8 +773,7 @@ def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         :param col_idx: The index into the agate table for the column.
         :return: The name of the type in the database
         """
-        raise NotImplementedException(
-            '`convert_text_type` is not implemented for this adapter!')
+        raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
 
     @abc.abstractclassmethod
     def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
@@ -785,8 +784,7 @@ def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         :param col_idx: The index into the agate table for the column.
         :return: The name of the type in the database
         """
-        raise NotImplementedException(
-            '`convert_number_type` is not implemented for this adapter!')
+        raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
 
     @abc.abstractclassmethod
     def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
@@ -821,8 +819,7 @@ def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         :param col_idx: The index into the agate table for the column.
         :return: The name of the type in the database
         """
-        raise NotImplementedException(
-            '`convert_date_type` is not implemented for this adapter!')
+        raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
 
     @abc.abstractclassmethod
     def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
@@ -833,8 +830,7 @@ def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         :param col_idx: The index into the agate table for the column.
         :return: The name of the type in the database
         """
-        raise NotImplementedException(
-            '`convert_time_type` is not implemented for this adapter!')
+        raise NotImplementedException("`convert_time_type` is not implemented for this adapter!")
 
     @available
     @classmethod

From 28063f7c7db8e438f216325d5b964c77697ba693 Mon Sep 17 00:00:00 2001
From: Buck Ryan 
Date: Sat, 10 Feb 2018 11:28:24 -0500
Subject: [PATCH 102/933] Seed rewrite (#618)

* loader for seed data files

* Functioning rework of seed task

* Make CompilerRunner fns private and impl. SeedRunner.compile

Trying to distinguish between the public/private interface for this
class. And the SeedRunner doesn't need the functionality in the compile
function, it just needs a compile function to exist for use in the
compilation process.

* Test changes and fixes

* make the DB setup script usable locally

* convert simple copy test to use seeed

* Fixes to get Snowflake working

* New seed flag and make it non-destructive by default

* Convert update SQL script to another seed

* cleanup

* implement bigquery csv load

* context handling of StringIO

* Better typing

* strip seeder and csvkit dependency

* update bigquery to use new data typing and to fix unicode issue

* update seed test

* fix abstract functions in base adapter

* support time type

* try pinning crypto, pyopenssl versions

* remove unnecessary version pins

* insert all at once, rather than one query per row

* do not quote field names on creation

* bad

* quiet down parsedatetime logger

* pep8

* UI updates + node conformity for seed nodes

* add seed to list of resource types, cleanup

* show option for CSVs

* typo

* pep8

* move agate import to avoid strange warnings

* deprecation warning for --drop-existing

* quote column names in seed files

* revert quoting change (breaks Snowflake). Hush warnings


automatic commit by git-black, original commits:
  0372fefae0e194dfe357d952e1e7cb2f336fe231
---
 core/dbt/adapters/base/impl.py | 6 ++++--
 core/dbt/main.py               | 8 ++++----
 core/dbt/node_types.py         | 2 +-
 3 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 44f05915e0e..a00c849f8b6 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -796,7 +796,8 @@ def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         :return: The name of the type in the database
         """
         raise NotImplementedException(
-            '`convert_boolean_type` is not implemented for this adapter!')
+            "`convert_boolean_type` is not implemented for this adapter!"
+        )
 
     @abc.abstractclassmethod
     def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
@@ -808,7 +809,8 @@ def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         :return: The name of the type in the database
         """
         raise NotImplementedException(
-            '`convert_datetime_type` is not implemented for this adapter!')
+            "`convert_datetime_type` is not implemented for this adapter!"
+        )
 
     @abc.abstractclassmethod
     def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
diff --git a/core/dbt/main.py b/core/dbt/main.py
index 4541965d982..8fb8bfa72c3 100644
--- a/core/dbt/main.py
+++ b/core/dbt/main.py
@@ -673,15 +673,15 @@ def _build_seed_subparser(subparsers, base_subparser):
         ''',
     )
     seed_sub.add_argument(
-        '--full-refresh',
-        action='store_true',
+        "--full-refresh",
+        action="store_true",
         help='''
         Drop existing seed tables and recreate them
         ''',
     )
     seed_sub.add_argument(
-        '--show',
-        action='store_true',
+        "--show",
+        action="store_true",
         help='''
         Show a sample of the loaded data in the terminal
         '''
diff --git a/core/dbt/node_types.py b/core/dbt/node_types.py
index 5f4e1c5e578..d6a0a2ab70d 100644
--- a/core/dbt/node_types.py
+++ b/core/dbt/node_types.py
@@ -9,7 +9,7 @@ class NodeType(StrEnum):
     Test = 'test'
     Snapshot = 'snapshot'
     Operation = 'operation'
-    Seed = 'seed'
+    Seed = "seed"
     # TODO: rm?
     RPCCall = 'rpc'
     SqlOperation = 'sql'

From c968bd8a3608a6efe81277f0b9830d7b513971b1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 29 Jan 2020 13:29:43 -0700
Subject: [PATCH 103/933] More macro search changes

Unify the lookup for the various macros, all of them are done in the manifest now:
 - dbt's built-in package + all plugins are part of 'core'
 - finding macros has the concept of an optional package name
 - materializations are a special case of finding macros
 - the `generate_*_name` macros are another special case (non-core dependency packages are ignored)
 - generating the macro execution context now uses the concept of an optional search_package_name
 - macro execution now binds these two together

Unify the lookup for models/docs/sources
 - added a search_name property to those three types
 - create a 'Searchable' protocol, have the node search code accept that
 - simplify matching logic/push it into the correct types accordingly

Rename get_materialization_macro to find_materialization_macro_by_name (consistency)

context namespacing behavior:
 - dbt run-operation now passes the named package along to macro execution if a package name is specified
 - so `dbt run-operation dependency.ad` runs with the dependency namespace as local, overriding globals
 - but `dbt run-operation my_macro` runs in the current package namespace as local, even if it ultimately runs the dependency's my_macro()
 - the current package namespace is always treated as "global", overriding core macros

Tons of tests


automatic commit by git-black, original commits:
  62755fe5b1360620b0e3edbc52b01325c1ae809f
---
 core/dbt/adapters/base/impl.py       |  4 +---
 core/dbt/contracts/graph/manifest.py | 28 ++++++++++++++++------------
 core/dbt/contracts/graph/parsed.py   |  2 +-
 core/dbt/node_types.py               |  4 ++--
 4 files changed, 20 insertions(+), 18 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index a00c849f8b6..10a74af2ea1 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -889,9 +889,7 @@ def execute_macro(
         if manifest is None:
             manifest = self._macro_manifest
 
-        macro = manifest.find_macro_by_name(
-            macro_name, self.config.project_name, project
-        )
+        macro = manifest.find_macro_by_name(macro_name, self.config.project_name, project)
         if macro is None:
             if project is None:
                 package_name = 'any package'
diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py
index 6b6d30a3db1..423a3972fb8 100644
--- a/core/dbt/contracts/graph/manifest.py
+++ b/core/dbt/contracts/graph/manifest.py
@@ -4,7 +4,7 @@
 from mashumaro import DataClassMessagePackMixin
 from multiprocessing.synchronize import Lock
 from typing import (
-    Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple,
+    Dict,
     TypeVar, Callable, Generic, cast, AbstractSet, ClassVar
 )
 from typing_extensions import Protocol
@@ -346,7 +346,7 @@ class MaterializationCandidate(MacroCandidate):
     @classmethod
     def from_macro(
         cls, candidate: MacroCandidate, specificity: Specificity
-    ) -> 'MaterializationCandidate':
+    ) -> "MaterializationCandidate":
         return cls(
             locality=candidate.locality,
             macro=candidate.macro,
@@ -381,7 +381,7 @@ def __lt__(self, other: object) -> bool:
         return False
 
 
-M = TypeVar('M', bound=MacroCandidate)
+M = TypeVar("M", bound=MacroCandidate)
 
 
 class CandidateList(List[M]):
@@ -409,7 +409,7 @@ class Searchable(Protocol):
 
     @property
     def search_name(self) -> str:
-        raise NotImplementedError('search_name not implemented')
+        raise NotImplementedError("search_name not implemented")
 
 
 D = TypeVar('D')
@@ -672,7 +672,8 @@ def build_disabled_by_file_id(self):
         return disabled_by_file_id
 
     def _materialization_candidates_for(
-        self, project_name: str,
+        self,
+        project_name: str,
         materialization_name: str,
         adapter_type: Optional[str],
     ) -> CandidateList:
@@ -695,13 +696,16 @@ def _materialization_candidates_for(
     def find_materialization_macro_by_name(
         self, project_name: str, materialization_name: str, adapter_type: str
     ) -> Optional[ParsedMacro]:
-        candidates: CandidateList = CandidateList(chain.from_iterable(
-            self._materialization_candidates_for(
-                project_name=project_name,
-                materialization_name=materialization_name,
-                adapter_type=atype,
-            ) for atype in (adapter_type, None)
-        ))
+        candidates: CandidateList = CandidateList(
+            chain.from_iterable(
+                self._materialization_candidates_for(
+                    project_name=project_name,
+                    materialization_name=materialization_name,
+                    adapter_type=atype,
+                )
+                for atype in (adapter_type, None)
+            )
+        )
         return candidates.last()
 
     def get_resource_fqns(self) -> Mapping[str, PathSet]:
diff --git a/core/dbt/contracts/graph/parsed.py b/core/dbt/contracts/graph/parsed.py
index f51ff7dedd5..14734ae2ea4 100644
--- a/core/dbt/contracts/graph/parsed.py
+++ b/core/dbt/contracts/graph/parsed.py
@@ -759,7 +759,7 @@ def has_freshness(self):
 
     @property
     def search_name(self):
-        return f'{self.source_name}.{self.name}'
+        return f"{self.source_name}.{self.name}"
 
 
 @dataclass
diff --git a/core/dbt/node_types.py b/core/dbt/node_types.py
index d6a0a2ab70d..5376b0b9142 100644
--- a/core/dbt/node_types.py
+++ b/core/dbt/node_types.py
@@ -20,7 +20,7 @@ class NodeType(StrEnum):
     Metric = 'metric'
 
     @classmethod
-    def executable(cls) -> List['NodeType']:
+    def executable(cls) -> List["NodeType"]:
         return [
             cls.Model,
             cls.Test,
@@ -34,7 +34,7 @@ def executable(cls) -> List['NodeType']:
         ]
 
     @classmethod
-    def refable(cls) -> List['NodeType']:
+    def refable(cls) -> List["NodeType"]:
         return [
             cls.Model,
             cls.Seed,

From 11393183f157418cea37dc880743f64fe600562d Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Tue, 22 Jan 2019 17:57:53 -0500
Subject: [PATCH 104/933] add run-operation subtask

automatic commit by git-black, original commits:
  1090a1612ac8c0931304cff3cb063f17a638b720
---
 core/dbt/adapters/base/impl.py | 7 ++++---
 core/dbt/task/run_operation.py | 2 +-
 2 files changed, 5 insertions(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 10a74af2ea1..57146e6b8a1 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -892,13 +892,14 @@ def execute_macro(
         macro = manifest.find_macro_by_name(macro_name, self.config.project_name, project)
         if macro is None:
             if project is None:
-                package_name = 'any package'
+                package_name = "any package"
             else:
                 package_name = 'the "{}" package'.format(project)
 
             raise RuntimeException(
-                'dbt could not find a macro with the name "{}" in {}'
-                .format(macro_name, package_name)
+                'dbt could not find a macro with the name "{}" in {}'.format(
+                    macro_name, package_name
+                )
             )
         # This causes a reference cycle, as generate_runtime_macro_context()
         # ends up calling get_adapter, so the import has to be here.
diff --git a/core/dbt/task/run_operation.py b/core/dbt/task/run_operation.py
index b36a96a4b33..158cdc527c1 100644
--- a/core/dbt/task/run_operation.py
+++ b/core/dbt/task/run_operation.py
@@ -19,7 +19,7 @@
 class RunOperationTask(ManifestTask):
     def _get_macro_parts(self):
         macro_name = self.args.macro
-        if '.' in macro_name:
+        if "." in macro_name:
             package_name, macro_name = macro_name.split(".", 1)
         else:
             package_name = None

From c134a08be09ff104102b6d56ee2e48dc3cab404e Mon Sep 17 00:00:00 2001
From: Gerda Shank 
Date: Tue, 26 Oct 2021 11:16:36 -0400
Subject: [PATCH 105/933] [#3885] Handle env_vars in partial parsing of SQL
 files (#4101)

* [#3885] Handle env_vars in partial parsing

* Comment method to build env_vars_to_source_files

automatic commit by git-black, original commits:
  73af9a56e50c18ff116aabfbd3fca448db113d5e
---
 core/dbt/adapters/base/impl.py | 1 +
 core/dbt/contracts/files.py    | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 57146e6b8a1..4de576b6fa4 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -904,6 +904,7 @@ def execute_macro(
         # This causes a reference cycle, as generate_runtime_macro_context()
         # ends up calling get_adapter, so the import has to be here.
         from dbt.context.providers import generate_runtime_macro_context
+
         macro_context = generate_runtime_macro_context(
             macro=macro,
             config=self.config,
diff --git a/core/dbt/contracts/files.py b/core/dbt/contracts/files.py
index 3bf64340b27..396438bf558 100644
--- a/core/dbt/contracts/files.py
+++ b/core/dbt/contracts/files.py
@@ -255,7 +255,7 @@ def source_patches(self):
     def __post_serialize__(self, dct):
         dct = super().__post_serialize__(dct)
         # Remove partial parsing specific data
-        for key in ('pp_test_index', 'pp_dict'):
+        for key in ("pp_test_index", "pp_dict"):
             if key in dct:
                 del dct[key]
         return dct

From d7d4c43b1a375c1040059afa36fc76126a3b4a00 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 29 Jan 2020 10:05:45 -0700
Subject: [PATCH 106/933] dbt Contexts

Rewrite/reorganize contexts
 - now they are objects that have a to_dict()
 - special decorators on properties/methods to indicate membership
 - reorganize contexts/ to remove many many import cycles

Context behavior changes:
 - 'dbt_version' is alwways available
 - 'target' is available as long as the profile has been parsed
 - 'project_name' is available in: query headers, hooks, models, and macro execution

Profiles/projects now render at load time
 - reading a profile or project file requires a ConfigRenderer
 - projects get an extra-fiddly special load for use during initial parsing
    - it returns the profile name and a function that, given a renderer, can return the rest of the Project
    - idea is: use the profile name to load the Profile, use that to build a ConfigRenderer that has a TargetContext, render the project with that
 - profiles.yml is rendered with the 'base' context
 - dbt_project.yml/schema.yml/packages.yml are rendered with the 'target' context: 'base' context + 'target'
 - docs are rendered with the docs context: 'target' context + the 'doc' function
 - query headers are rendered with the query header context: 'target' context + macros + 'project_name'
 - executed macros/models should have the same context as previously (query headers + adapter/model/etc related functions)

Moved actual ref/source searching into the manifest
Moved the rest of the parse utils into parser/manifest.py
Made the ref/source resolvers defined in the provider context a bit more sane/well-defined
Picked consistent-but-not-great names for all the various context generation functions
Moved write_node into ParsedNode


automatic commit by git-black, original commits:
  62755fe5b1360620b0e3edbc52b01325c1ae809f
  b8febddad5f16e1cd02be8d8a00b3e8effb0e105
---
 core/dbt/adapters/base/impl.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 4de576b6fa4..1c12916fabf 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -906,10 +906,7 @@ def execute_macro(
         from dbt.context.providers import generate_runtime_macro_context
 
         macro_context = generate_runtime_macro_context(
-            macro=macro,
-            config=self.config,
-            manifest=manifest,
-            package_name=project
+            macro=macro, config=self.config, manifest=manifest, package_name=project
         )
         macro_context.update(context_override)
 

From bd59157bdfb868d6fde5cecc9ddce1c43fbac39a Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Sun, 1 Mar 2020 20:32:23 -0700
Subject: [PATCH 107/933] PR feedback: Add exception handler around macro
 execution, add a message about what is going on when list_schemas fails

automatic commit by git-black, original commits:
  47cef1d9078cf2383290d0f914cdeb07596510da
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 1c12916fabf..245b12b6bf2 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -912,7 +912,7 @@ def execute_macro(
 
         macro_function = MacroGenerator(macro, macro_context)
 
-        with self.connections.exception_handler(f'macro {macro_name}'):
+        with self.connections.exception_handler(f"macro {macro_name}"):
             result = macro_function(**kwargs)
         return result
 

From 90b2849b5bb0b922b31e1776df031c732276761c Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 17 Mar 2020 12:34:09 -0600
Subject: [PATCH 108/933] Fix issue with table databases/schemas/names not
 being interpreted as strings if they look like numbers

automatic commit by git-black, original commits:
  3a77626cd728f047a0c3a096a750fe90af467d6e
---
 core/dbt/adapters/base/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index 245b12b6bf2..d8316dee9c7 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -925,7 +925,7 @@ def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate.
         table = table_from_rows(
             table.rows,
             table.column_names,
-            text_only_columns=['table_database', 'table_schema', 'table_name']
+            text_only_columns=["table_database", "table_schema", "table_name"],
         )
         return table.where(_catalog_filter_schemas(manifest))
 

From ab2d582f97bff967d2bca0097b33a3016e63e548 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 31 Jan 2020 11:10:45 -0700
Subject: [PATCH 109/933] PR feedback w/ improved catalog results behavior

automatic commit by git-black, original commits:
  c1af3abbdcd05f3ceac86e5b162e2c594302d817
---
 core/dbt/adapters/base/impl.py       | 15 ++++-----------
 core/dbt/task/generate.py            |  2 +-
 third-party-stubs/agate/__init__.pyi |  2 +-
 3 files changed, 6 insertions(+), 13 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index d8316dee9c7..f2e3eedd01b 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -948,9 +948,7 @@ def _get_one_catalog(
         results = self._catalog_filter_table(table, manifest)
         return results
 
-    def get_catalog(
-        self, manifest: Manifest
-    ) -> Tuple[agate.Table, List[Exception]]:
+    def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]:
         schema_map = self._get_catalog_schemas(manifest)
 
         with executor(self.config) as tpe:
@@ -1139,7 +1137,7 @@ def get_rows_different_sql(
 
 
 def catch_as_completed(
-    futures  # typing: List[Future[agate.Table]]
+    futures,  # typing: List[Future[agate.Table]]
 ) -> Tuple[agate.Table, List[Exception]]:
 
     # catalogs: agate.Table = agate.Table(rows=[])
@@ -1152,15 +1150,10 @@ def catch_as_completed(
         if exc is None:
             catalog = future.result()
             tables.append(catalog)
-        elif (
-            isinstance(exc, KeyboardInterrupt) or
-            not isinstance(exc, Exception)
-        ):
+        elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
             raise exc
         else:
-            warn_or_error(
-                f'Encountered an error while generating catalog: {str(exc)}'
-            )
+            warn_or_error(f"Encountered an error while generating catalog: {str(exc)}")
             # exc is not None, derives from Exception, and isn't ctrl+c
             exceptions.append(exc)
     return merge_tables(tables), exceptions
diff --git a/core/dbt/task/generate.py b/core/dbt/task/generate.py
index 1f63af1a13f..39fb37cf28a 100644
--- a/core/dbt/task/generate.py
+++ b/core/dbt/task/generate.py
@@ -274,7 +274,7 @@ def get_catalog_results(
         sources: Dict[str, CatalogTable],
         generated_at: datetime,
         compile_results: Optional[Any],
-        errors: Optional[List[str]]
+        errors: Optional[List[str]],
     ) -> CatalogArtifact:
         return CatalogArtifact.from_results(
             generated_at=generated_at,
diff --git a/third-party-stubs/agate/__init__.pyi b/third-party-stubs/agate/__init__.pyi
index 92bd711003c..9b2e611c068 100644
--- a/third-party-stubs/agate/__init__.pyi
+++ b/third-party-stubs/agate/__init__.pyi
@@ -60,7 +60,7 @@ class Table:
         cls, path: Iterable[str], *, column_types: Optional["TypeTester"] = None
     ) -> "Table": ...
     @classmethod
-    def merge(cls, tables: Iterable['Table']) -> 'Table': ...
+    def merge(cls, tables: Iterable["Table"]) -> "Table": ...
     def rename(self, column_names: Optional[Iterable[str]] = None, row_names: Optional[Any] = None, slug_columns: bool = False, slug_rows: bool=False, **kwargs: Any) -> 'Table': ...
 
 

From 0b7808819fab423d900431f5b3bca50d2be8ca17 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Mar 2019 21:08:38 -0700
Subject: [PATCH 110/933] per-thread connections

parsing now always opens a connection, instead of waiting to need it
remove model_name/available_raw/etc


automatic commit by git-black, original commits:
  5e8ab9ce4a8ac905d66bf85414a0be121fc58dbd
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/base/impl.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index f2e3eedd01b..d5721fdc753 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -986,11 +986,7 @@ def calculate_freshness(
         }
 
         # run the macro
-        table = self.execute_macro(
-            FRESHNESS_MACRO_NAME,
-            kwargs=kwargs,
-            manifest=manifest
-        )
+        table = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest)
         # now we have a 1-row table of the maximum `loaded_at_field` value and
         # the current time according to the db.
         if len(table) != 1 or len(table[0]) != 2:

From 414101772a622c1d7261d55b2eb5c65715470bd8 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 21 Aug 2020 07:54:57 -0600
Subject: [PATCH 111/933] add more test helper methods

automatic commit by git-black, original commits:
  b6a951903e6ac78dfb03994b10ec72bd88008ce7
---
 core/dbt/adapters/base/impl.py                | 22 +++++++++----------
 .../postgres/dbt/adapters/postgres/impl.py    |  4 +---
 2 files changed, 12 insertions(+), 14 deletions(-)

diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py
index d5721fdc753..2c40482d5e8 100644
--- a/core/dbt/adapters/base/impl.py
+++ b/core/dbt/adapters/base/impl.py
@@ -1037,6 +1037,7 @@ def post_model_hook(self, config: Mapping[str, Any], context: Any) -> None:
 
     def get_compiler(self):
         from dbt.compilation import Compiler
+
         return Compiler(self.config)
 
     # Methods used in adapter tests
@@ -1047,14 +1048,12 @@ def update_column_sql(
         clause: str,
         where_clause: Optional[str] = None,
     ) -> str:
-        clause = f'update {dst_name} set {dst_column} = {clause}'
+        clause = f"update {dst_name} set {dst_column} = {clause}"
         if where_clause is not None:
-            clause += f' where {where_clause}'
+            clause += f" where {where_clause}"
         return clause
 
-    def timestamp_add_sql(
-        self, add_to: str, number: int = 1, interval: str = 'hour'
-    ) -> str:
+    def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str:
         # for backwards compatibility, we're compelled to set some sort of
         # default. A lot of searching has lead me to believe that the
         # '+ interval' syntax used in postgres/redshift is relatively common
@@ -1062,16 +1061,17 @@ def timestamp_add_sql(
         return f"{add_to} + interval '{number} {interval}'"
 
     def string_add_sql(
-        self, add_to: str, value: str, location='append',
+        self,
+        add_to: str,
+        value: str,
+        location="append",
     ) -> str:
-        if location == 'append':
+        if location == "append":
             return f"{add_to} || '{value}'"
-        elif location == 'prepend':
+        elif location == "prepend":
             return f"'{value}' || {add_to}"
         else:
-            raise RuntimeException(
-                f'Got an unexpected location value of "{location}"'
-            )
+            raise RuntimeException(f'Got an unexpected location value of "{location}"')
 
     def get_rows_different_sql(
         self,
diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py
index 7a3806a4c8b..0a88e676e73 100644
--- a/plugins/postgres/dbt/adapters/postgres/impl.py
+++ b/plugins/postgres/dbt/adapters/postgres/impl.py
@@ -134,7 +134,5 @@ def _relations_cache_for_schemas(self, manifest):
         super()._relations_cache_for_schemas(manifest)
         self._link_cached_relations(manifest)
 
-    def timestamp_add_sql(
-        self, add_to: str, number: int = 1, interval: str = 'hour'
-    ) -> str:
+    def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str:
         return f"{add_to} + interval '{number} {interval}'"

From bac7955f6d7e7000074764494e73c581f397e9c4 Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Mon, 21 Dec 2020 13:24:09 -0500
Subject: [PATCH 112/933] update naming

automatic commit by git-black, original commits:
  aa3bdfeb17c2e7f6974a336a40148376dd73234d
  dddf1bcb76190e5b6b790897e39921354afb0b6f
---
 core/dbt/adapters/sql/connections.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 7a948f8e301..19875dae3c5 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -7,9 +7,7 @@
 import dbt.clients.agate_helper
 import dbt.exceptions
 from dbt.adapters.base import BaseConnectionManager
-from dbt.contracts.connection import (
-    Connection, ConnectionState, AdapterResponse
-)
+from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse
 from dbt.events.functions import fire_event
 from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus
 

From 16fba5e2706dae4eae47d6afc5fedbc7bfe15f68 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 27 Jul 2020 15:49:34 -0600
Subject: [PATCH 113/933] only try to cancel open connections

automatic commit by git-black, original commits:
  44e3c7eb6dfef37e0f218ec08c8d9c18ca596637
---
 core/dbt/adapters/sql/connections.py                  | 5 +----
 plugins/postgres/dbt/adapters/postgres/connections.py | 6 ++----
 2 files changed, 3 insertions(+), 8 deletions(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 19875dae3c5..33c1d780ce3 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -39,10 +39,7 @@ def cancel_open(self) -> List[str]:
 
                 # if the connection failed, the handle will be None so we have
                 # nothing to cancel.
-                if (
-                    connection.handle is not None and
-                    connection.state == ConnectionState.OPEN
-                ):
+                if connection.handle is not None and connection.state == ConnectionState.OPEN:
                     self.cancel(connection)
                 if connection.name is not None:
                     names.append(connection.name)
diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py
index 8c9cce5f503..71345e33be2 100644
--- a/plugins/postgres/dbt/adapters/postgres/connections.py
+++ b/plugins/postgres/dbt/adapters/postgres/connections.py
@@ -151,10 +151,8 @@ def cancel(self, connection):
             pid = connection.handle.get_backend_pid()
         except psycopg2.InterfaceError as exc:
             # if the connection is already closed, not much to cancel!
-            if 'already closed' in str(exc):
-                logger.debug(
-                    f'Connection {connection_name} was already closed'
-                )
+            if "already closed" in str(exc):
+                logger.debug(f"Connection {connection_name} was already closed")
                 return
             # probably bad, re-raise it
             raise

From 352a0067e77f94442d28699e5a76c20f5051dcf4 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 30 Aug 2019 12:09:19 -0600
Subject: [PATCH 114/933] PR feedback

automatic commit by git-black, original commits:
  43c5c011d8c4f6e2093bf9d2a76d1f6d46b9c7eb
---
 core/dbt/adapters/sql/connections.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 33c1d780ce3..17ce54e9158 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -59,7 +59,7 @@ def add_query(
 
         with self.exception_handler(sql):
             if abridge_sql_log:
-                log_sql = '{}...'.format(sql[:512])
+                log_sql = "{}...".format(sql[:512])
             else:
                 log_sql = sql
 

From fb0b2e9d8bd026f40a02a4876707e1ab18ee0d39 Mon Sep 17 00:00:00 2001
From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com>
Date: Mon, 13 Dec 2021 11:55:19 -0600
Subject: [PATCH 115/933] made change to test of str (#4463)

* made change to test of str

* changelog update

automatic commit by git-black, original commits:
  76f7bf990069e06cfb2e63b7f5d480dc72eaae8a
  b3039fdc7629b5027096e92ed9b7612419cf3fea
---
 core/dbt/adapters/sql/connections.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 17ce54e9158..d938c97a9ff 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -71,8 +71,7 @@ def add_query(
 
             fire_event(
                 SQLQueryStatus(
-                    status=str(self.get_response(cursor)),
-                    elapsed=round((time.time() - pre), 2)
+                    status=str(self.get_response(cursor)), elapsed=round((time.time() - pre), 2)
                 )
             )
 

From ec0690ec507d0ef574f00141047a315b2a797a16 Mon Sep 17 00:00:00 2001
From: Tushar Mittal 
Date: Tue, 16 Mar 2021 21:42:36 +0530
Subject: [PATCH 116/933] Add unit tests for SQL process_results

automatic commit by git-black, original commits:
  564fe624001fdad573f884e69e15aa9aea2b63e9
---
 core/dbt/adapters/sql/connections.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index d938c97a9ff..3a0c2b84d9d 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -93,7 +93,7 @@ def process_results(
             col_name = column_names[idx]
             if col_name in unique_col_names:
                 unique_col_names[col_name] += 1
-                column_names[idx] = f'{col_name}_{unique_col_names[col_name]}'
+                column_names[idx] = f"{col_name}_{unique_col_names[col_name]}"
             else:
                 unique_col_names[column_names[idx]] = 1
         return [dict(zip(column_names, row)) for row in rows]

From c09d9a31515146b53948888e06440fb844a93da4 Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Thu, 16 Apr 2020 21:25:28 -0400
Subject: [PATCH 117/933] (#2337) Handle array values in agate dataframe
 building

automatic commit by git-black, original commits:
  0781cef8b1bb2d641081e1d315b834a710fa2997
---
 core/dbt/adapters/sql/connections.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 3a0c2b84d9d..33262d426fd 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -108,10 +108,7 @@ def get_result_from_cursor(cls, cursor: Any) -> agate.Table:
             rows = cursor.fetchall()
             data = cls.process_results(column_names, rows)
 
-        return dbt.clients.agate_helper.table_from_data_flat(
-            data,
-            column_names
-        )
+        return dbt.clients.agate_helper.table_from_data_flat(data, column_names)
 
     def execute(
         self, sql: str, auto_begin: bool = False, fetch: bool = False

From d75759a527342343be4fb040cace50be8409601b Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 24 Jun 2019 09:39:06 -0400
Subject: [PATCH 118/933] Convert dbt to use dataclasses and hologram for
 representing things

Most of the things that previously used manually created jsonschemas
Split tests into their own node type
Change tests to reflect that tables require a freshness block
add a lot more debug-logging on exceptions
Make things that get passed to Var() tell it about their vars
finally make .empty a property
documentation resource type is now a property, not serialized
added a Mergeable helper mixin to perform simple merges
Convert some oneOf checks into if-else chains to get better errors
Add more tests
Use "Any" as value in type defs
 - accept the warning from hologram for now, PR out to suppress it
set default values for enabled/materialized
Clean up the Parsed/Compiled type hierarchy
Allow generic snapshot definitions
remove the "graph" entry in the context
 - This improves performance on large projects significantly
Update changelog to reflect removing graph


automatic commit by git-black, original commits:
  49f7cf8ecab4bd74b9b67958df4ace56353b1bed
---
 core/dbt/adapters/sql/connections.py          |  3 +-
 core/dbt/config/profile.py                    |  5 ++-
 core/dbt/contracts/connection.py              |  8 ++--
 core/dbt/contracts/graph/parsed.py            |  8 ++--
 core/dbt/contracts/graph/unparsed.py          | 14 +++----
 core/dbt/contracts/project.py                 |  9 ++--
 core/dbt/contracts/results.py                 |  2 +-
 core/dbt/exceptions.py                        | 41 +++++++++++--------
 core/dbt/hooks.py                             |  2 +-
 core/dbt/node_types.py                        |  4 +-
 core/dbt/semver.py                            | 10 ++---
 core/dbt/task/run.py                          |  2 +-
 core/dbt/utils.py                             | 12 +++---
 .../dbt/adapters/postgres/connections.py      |  2 +-
 14 files changed, 65 insertions(+), 57 deletions(-)

diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py
index 33262d426fd..fa3dccd6028 100644
--- a/core/dbt/adapters/sql/connections.py
+++ b/core/dbt/adapters/sql/connections.py
@@ -133,7 +133,8 @@ def begin(self):
         if connection.transaction_open is True:
             raise dbt.exceptions.InternalException(
                 'Tried to begin a new transaction on connection "{}", but '
-                'it already had one open!'.format(connection.name))
+                "it already had one open!".format(connection.name)
+            )
 
         self.add_begin_query()
 
diff --git a/core/dbt/config/profile.py b/core/dbt/config/profile.py
index 3008b93cd59..65704db6b40 100644
--- a/core/dbt/config/profile.py
+++ b/core/dbt/config/profile.py
@@ -128,7 +128,7 @@ def to_profile_info(
             "target_name": self.target_name,
             'user_config': self.user_config,
             "threads": self.threads,
-            'credentials': self.credentials,
+            "credentials": self.credentials,
         }
         if serialize_credentials:
             result['user_config'] = self.user_config.to_dict(omit_none=True)
@@ -189,7 +189,8 @@ def _credentials_from_profile(
             msg = str(e) if isinstance(e, RuntimeException) else e.message
             raise DbtProfileError(
                 'Credentials in profile "{}", target "{}" invalid: {}'.format(
-                .format(profile_name, target_name, msg)
+                    profile_name, target_name, msg
+                )
             ) from e
 
         return credentials
diff --git a/core/dbt/contracts/connection.py b/core/dbt/contracts/connection.py
index 32110a0a5fd..95ba1943350 100644
--- a/core/dbt/contracts/connection.py
+++ b/core/dbt/contracts/connection.py
@@ -36,10 +36,10 @@ def __str__(self):
 
 
 class ConnectionState(StrEnum):
-    INIT = 'init'
-    OPEN = 'open'
-    CLOSED = 'closed'
-    FAIL = 'fail'
+    INIT = "init"
+    OPEN = "open"
+    CLOSED = "closed"
+    FAIL = "fail"
 
 
 @dataclass(init=False)
diff --git a/core/dbt/contracts/graph/parsed.py b/core/dbt/contracts/graph/parsed.py
index 14734ae2ea4..b91185173b3 100644
--- a/core/dbt/contracts/graph/parsed.py
+++ b/core/dbt/contracts/graph/parsed.py
@@ -51,7 +51,7 @@ class ColumnInfo(
     Replaceable
 ):
     name: str
-    description: str = ''
+    description: str = ""
     meta: Dict[str, Any] = field(default_factory=dict)
     data_type: Optional[str] = None
     quote: Optional[bool] = None
@@ -133,7 +133,7 @@ def is_relational(self):
 
     @property
     def is_ephemeral(self):
-        return self.config.materialized == 'ephemeral'
+        return self.config.materialized == "ephemeral"
 
     @property
     def is_ephemeral_model(self):
@@ -202,7 +202,7 @@ class ParsedNodeDefaults(NodeInfoMixin, ParsedNodeMandatory):
     refs: List[List[str]] = field(default_factory=list)
     sources: List[List[str]] = field(default_factory=list)
     depends_on: DependsOn = field(default_factory=DependsOn)
-    description: str = field(default='')
+    description: str = field(default="")
     columns: Dict[str, ColumnInfo] = field(default_factory=dict)
     meta: Dict[str, Any] = field(default_factory=dict)
     docs: Docs = field(default_factory=Docs)
@@ -651,7 +651,7 @@ class ParsedSourceDefinition(
     loaded_at_field: Optional[str] = None
     freshness: Optional[FreshnessThreshold] = None
     external: Optional[ExternalTable] = None
-    description: str = ''
+    description: str = ""
     columns: Dict[str, ColumnInfo] = field(default_factory=dict)
     meta: Dict[str, Any] = field(default_factory=dict)
     source_meta: Dict[str, Any] = field(default_factory=dict)
diff --git a/core/dbt/contracts/graph/unparsed.py b/core/dbt/contracts/graph/unparsed.py
index 4b794a86896..d4908bb1b00 100644
--- a/core/dbt/contracts/graph/unparsed.py
+++ b/core/dbt/contracts/graph/unparsed.py
@@ -85,7 +85,7 @@ class Docs(dbtClassMixin, Replaceable):
 class HasDocs(AdditionalPropertiesMixin, ExtensibleDbtClassMixin,
               Replaceable):
     name: str
-    description: str = ''
+    description: str = ""
     meta: Dict[str, Any] = field(default_factory=dict)
     data_type: Optional[str] = None
     docs: Docs = field(default_factory=Docs)
@@ -159,12 +159,12 @@ class UnparsedMacroUpdate(HasConfig, HasDocs, HasYamlMetadata):
 
 
 class TimePeriod(StrEnum):
-    minute = 'minute'
-    hour = 'hour'
-    day = 'day'
+    minute = "minute"
+    hour = "hour"
+    day = "day"
 
     def plural(self) -> str:
-        return str(self) + 's'
+        return str(self) + "s"
 
 
 @dataclass
@@ -265,11 +265,11 @@ def __post_serialize__(self, dct):
 @dataclass
 class UnparsedSourceDefinition(dbtClassMixin, Replaceable):
     name: str
-    description: str = ''
+    description: str = ""
     meta: Dict[str, Any] = field(default_factory=dict)
     database: Optional[str] = None
     schema: Optional[str] = None
-    loader: str = ''
+    loader: str = ""
     quoting: Quoting = field(default_factory=Quoting)
     freshness: Optional[FreshnessThreshold] = field(
         default_factory=FreshnessThreshold
diff --git a/core/dbt/contracts/project.py b/core/dbt/contracts/project.py
index 756dc2af120..4399b614eec 100644
--- a/core/dbt/contracts/project.py
+++ b/core/dbt/contracts/project.py
@@ -117,8 +117,7 @@ class ProjectPackageMetadata:
 
     @classmethod
     def from_project(cls, project):
-        return cls(name=project.project_name,
-                   packages=project.packages.packages)
+        return cls(name=project.project_name, packages=project.packages.packages)
 
 
 @dataclass
@@ -255,8 +254,8 @@ class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract):
 
 @dataclass
 class ProfileConfig(HyphenatedDbtClassMixin, Replaceable):
-    profile_name: str = field(metadata={'preserve_underscore': True})
-    target_name: str = field(metadata={'preserve_underscore': True})
+    profile_name: str = field(metadata={"preserve_underscore": True})
+    target_name: str = field(metadata={"preserve_underscore": True})
     user_config: UserConfig = field(metadata={'preserve_underscore': True})
     threads: int
     # TODO: make this a dynamic union of some kind?
@@ -275,7 +274,7 @@ class ConfiguredQuoting(Quoting, Replaceable):
 class Configuration(Project, ProfileConfig):
     cli_vars: Dict[str, Any] = field(
         default_factory=dict,
-        metadata={'preserve_underscore': True},
+        metadata={"preserve_underscore": True},
     )
     quoting: Optional[ConfiguredQuoting] = None
 
diff --git a/core/dbt/contracts/results.py b/core/dbt/contracts/results.py
index 1eca13e7a8a..6eb2c37237f 100644
--- a/core/dbt/contracts/results.py
+++ b/core/dbt/contracts/results.py
@@ -273,7 +273,7 @@ def skipped(self):
 
 
 class FreshnessErrorEnum(StrEnum):
-    runtime_error = 'runtime error'
+    runtime_error = "runtime error"
 
 
 @dataclass
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 45ebfa4dc44..84457bee466 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -18,7 +18,7 @@ def validator_error_message(exc):
     if not isinstance(exc, dbt.dataclass_schema.ValidationError):
         return str(exc)
     path = "[%s]" % "][".join(map(repr, exc.relative_path))
-    return 'at path {}: {}'.format(path, exc.message)
+    return "at path {}: {}".format(path, exc.message)
 
 
 class Exception(builtins.Exception):
@@ -68,12 +68,9 @@ def type(self):
     def node_to_string(self, node):
         if node is None:
             return ""
-        if not hasattr(node, 'name'):
+        if not hasattr(node, "name"):
             # we probably failed to parse a block, so we can't know the name
-            return '{} ({})'.format(
-                node.resource_type,
-                node.original_file_path
-            )
+            return "{} ({})".format(node.resource_type, node.original_file_path)
 
         if hasattr(node, 'contents'):
             # handle FileBlocks. They aren't really nodes but we want to render
@@ -131,8 +128,8 @@ def data(self):
             return result
 
         result.update({
-            'raw_sql': self.node.raw_sql,
-            # the node isn't always compiled, but if it is, include that!
+            {
+                "raw_sql": self.node.raw_sql,
             'compiled_sql': getattr(self.node, 'compiled_sql', None),
         })
         return result
@@ -215,7 +212,7 @@ class DatabaseException(RuntimeException):
     def process_stack(self):
         lines = []
 
-        if hasattr(self.node, 'build_path') and self.node.build_path:
+        if hasattr(self.node, "build_path") and self.node.build_path:
             lines.append("compiled SQL at {}".format(self.node.build_path))
 
         return lines + RuntimeException.process_stack(self)
@@ -588,7 +585,7 @@ def _get_target_failure_msg(
 
     source_path_string = ''
     if include_path:
-        source_path_string = ' ({})'.format(model.original_file_path)
+        source_path_string = " ({})".format(model.original_file_path)
 
     return "{} '{}'{} depends on a {} named '{}' {}which {}".format(
         model.resource_type.title(),
@@ -663,7 +660,7 @@ def source_target_not_found(
 def dependency_not_found(model, target_model_name):
     raise_compiler_error(
         "'{}' depends on '{}' which is not in the graph!"
-        .format(model.unique_id, target_model_name),
+            model.unique_id, target_model_name
         model)
 
 
@@ -671,7 +668,9 @@ def macro_not_found(model, target_macro_id):
     raise_compiler_error(
         model,
         "'{}' references macro '{}' which is not defined!"
-        .format(model.unique_id, target_macro_id))
+            model.unique_id, target_macro_id
+        ),
+    )
 
 
 def macro_invalid_dispatch_arg(macro_name) -> NoReturn:
@@ -724,7 +723,7 @@ def raise_cache_inconsistent(message):
 def missing_config(model, name):
     raise_compiler_error(
         "Model '{}' does not define a required config parameter '{}'."
-        .format(model.unique_id, name),
+            model.unique_id, name
         model)
 
 
@@ -863,8 +862,12 @@ def raise_duplicate_resource_name(node_1, node_2):
         'these resources:\n- {} ({})\n- {} ({})'.format(
             duped_name,
             get_func,
-            node_1.unique_id, node_1.original_file_path,
-            node_2.unique_id, node_2.original_file_path))
+            node_1.unique_id,
+            node_1.original_file_path,
+            node_2.unique_id,
+            node_2.original_file_path,
+        )
+    )
 
 
 def raise_ambiguous_alias(node_1, node_2, duped_name=None):
@@ -877,8 +880,12 @@ def raise_ambiguous_alias(node_1, node_2, duped_name=None):
         'To fix this,\nchange the configuration of one of these resources:'
         '\n- {} ({})\n- {} ({})'.format(
             duped_name,
-            node_1.unique_id, node_1.original_file_path,
-            node_2.unique_id, node_2.original_file_path))
+            node_1.unique_id,
+            node_1.original_file_path,
+            node_2.unique_id,
+            node_2.original_file_path,
+        )
+    )
 
 
 def raise_ambiguous_catalog_match(unique_id, match_1, match_2):
diff --git a/core/dbt/hooks.py b/core/dbt/hooks.py
index 0603f2adb7a..7767a1aa57f 100644
--- a/core/dbt/hooks.py
+++ b/core/dbt/hooks.py
@@ -18,4 +18,4 @@ def get_hook_dict(source: Union[str, Dict[str, Any]]) -> Dict[str, Any]:
     try:
         return json.loads(source)
     except ValueError:
-        return {'sql': source}
+        return {"sql": source}
diff --git a/core/dbt/node_types.py b/core/dbt/node_types.py
index 5376b0b9142..e1356f1edac 100644
--- a/core/dbt/node_types.py
+++ b/core/dbt/node_types.py
@@ -11,11 +11,11 @@ class NodeType(StrEnum):
     Operation = 'operation'
     Seed = "seed"
     # TODO: rm?
-    RPCCall = 'rpc'
+    RPCCall = "rpc"
     SqlOperation = 'sql'
     Documentation = 'docs'
     Source = 'source'
-    Macro = 'macro'
+    Macro = "macro"
     Exposure = 'exposure'
     Metric = 'metric'
 
diff --git a/core/dbt/semver.py b/core/dbt/semver.py
index c3c5acec37b..f366e7419c3 100644
--- a/core/dbt/semver.py
+++ b/core/dbt/semver.py
@@ -13,11 +13,11 @@
 
 
 class Matchers(StrEnum):
-    GREATER_THAN = '>'
-    GREATER_THAN_OR_EQUAL = '>='
-    LESS_THAN = '<'
-    LESS_THAN_OR_EQUAL = '<='
-    EXACT = '='
+    GREATER_THAN = ">"
+    GREATER_THAN_OR_EQUAL = ">="
+    LESS_THAN = "<"
+    LESS_THAN_OR_EQUAL = "<="
+    EXACT = "="
 
 
 @dataclass
diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py
index 1e6d9f3810a..6c7c96952ce 100644
--- a/core/dbt/task/run.py
+++ b/core/dbt/task/run.py
@@ -300,7 +300,7 @@ def get_hook_sql(self, adapter, hook, idx, num_hooks, extra_context):
         statement = compiled.compiled_sql
         hook_index = hook.index or num_hooks
         hook_obj = get_hook(statement, index=hook_index)
-        return hook_obj.sql or ''
+        return hook_obj.sql or ""
 
     def _hook_keyfunc(self, hook: ParsedHookNode) -> Tuple[str, Optional[int]]:
         package_name = hook.package_name
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index 2647bd494e4..e19cb0502ab 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -64,9 +64,9 @@ def get_model_name_or_none(model):
         name = model
     elif isinstance(model, dict):
         name = model.get('alias', model.get('name'))
-    elif hasattr(model, 'alias'):
+    elif hasattr(model, "alias"):
         name = model.alias
-    elif hasattr(model, 'name'):
+    elif hasattr(model, "name"):
         name = model.name
     else:
         name = str(model)
@@ -79,13 +79,13 @@ def get_model_name_or_none(model):
 
 def get_dbt_macro_name(name):
     if name is None:
-        raise dbt.exceptions.InternalException('Got None for a macro name!')
+        raise dbt.exceptions.InternalException("Got None for a macro name!")
     return f'{MACRO_PREFIX}{name}'
 
 
 def get_dbt_docs_name(name):
     if name is None:
-        raise dbt.exceptions.InternalException('Got None for a doc name!')
+        raise dbt.exceptions.InternalException("Got None for a doc name!")
     return f'{DOCS_PREFIX}{name}'
 
 
@@ -257,11 +257,11 @@ def md5(string):
 
 
 def get_hash(model):
-    return hashlib.md5(model.unique_id.encode('utf-8')).hexdigest()
+    return hashlib.md5(model.unique_id.encode("utf-8")).hexdigest()
 
 
 def get_hashed_contents(model):
-    return hashlib.md5(model.raw_sql.encode('utf-8')).hexdigest()
+    return hashlib.md5(model.raw_sql.encode("utf-8")).hexdigest()
 
 
 def flatten_nodes(dep_list):
diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py
index 71345e33be2..cf5ce519680 100644
--- a/plugins/postgres/dbt/adapters/postgres/connections.py
+++ b/plugins/postgres/dbt/adapters/postgres/connections.py
@@ -92,7 +92,7 @@ def open(cls, connection):
         # we don't want to pass 0 along to connect() as postgres will try to
         # call an invalid setsockopt() call (contrary to the docs).
         if credentials.keepalives_idle:
-            kwargs['keepalives_idle'] = credentials.keepalives_idle
+            kwargs["keepalives_idle"] = credentials.keepalives_idle
 
         # psycopg2 doesn't support search_path officially,
         # see https://github.com/psycopg/psycopg2/issues/465

From 6bd38808c71b7ff3be9819735f1df3771c721ed7 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 3 Jan 2019 13:43:08 -0700
Subject: [PATCH 119/933] Move SQL previously embedded into adapters into
 macros

Adapters now store an internal manifest that only has the dbt internal projects
Adapters use that manifest if none is provided to execute_manifest
The internal manifest is lazy-loaded to avoid recursion issues
Moved declared plugin paths down one level
Connection management changes to accomadate calling macro -> adapter -> macro
Split up precision and scale when describing number columns so agate doesn't eat commas
Manifest building now happens in the RunManager instead of the compiler

Now macros:
  create/drop schema
  get_columns_in_relation
  alter column type
  rename/drop/truncate
  list_schemas/check_schema_exists
  list_relations_without_caching


automatic commit by git-black, original commits:
  70069f53b13d7c53ae0dc77ed171d6e77542b588
---
 core/dbt/adapters/sql/impl.py                 | 48 ++++++++++---------
 core/dbt/task/runnable.py                     |  2 +-
 .../postgres/dbt/adapters/postgres/impl.py    |  9 ++--
 3 files changed, 31 insertions(+), 28 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index 1e9f19c2789..0a2589e7cfd 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -13,16 +13,16 @@
 
 from dbt.adapters.base.relation import BaseRelation
 
-LIST_RELATIONS_MACRO_NAME = 'list_relations_without_caching'
-GET_COLUMNS_IN_RELATION_MACRO_NAME = 'get_columns_in_relation'
-LIST_SCHEMAS_MACRO_NAME = 'list_schemas'
-CHECK_SCHEMA_EXISTS_MACRO_NAME = 'check_schema_exists'
-CREATE_SCHEMA_MACRO_NAME = 'create_schema'
-DROP_SCHEMA_MACRO_NAME = 'drop_schema'
-RENAME_RELATION_MACRO_NAME = 'rename_relation'
-TRUNCATE_RELATION_MACRO_NAME = 'truncate_relation'
-DROP_RELATION_MACRO_NAME = 'drop_relation'
-ALTER_COLUMN_TYPE_MACRO_NAME = 'alter_column_type'
+LIST_RELATIONS_MACRO_NAME = "list_relations_without_caching"
+GET_COLUMNS_IN_RELATION_MACRO_NAME = "get_columns_in_relation"
+LIST_SCHEMAS_MACRO_NAME = "list_schemas"
+CHECK_SCHEMA_EXISTS_MACRO_NAME = "check_schema_exists"
+CREATE_SCHEMA_MACRO_NAME = "create_schema"
+DROP_SCHEMA_MACRO_NAME = "drop_schema"
+RENAME_RELATION_MACRO_NAME = "rename_relation"
+TRUNCATE_RELATION_MACRO_NAME = "truncate_relation"
+DROP_RELATION_MACRO_NAME = "drop_relation"
+ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
 
 
 class SQLAdapter(BaseAdapter):
@@ -137,9 +137,9 @@ def alter_column_type(
         4. Rename the new column to existing column
         """
         kwargs = {
-            'relation': relation,
-            'column_name': column_name,
-            'new_column_type': new_column_type,
+            "relation": relation,
+            "column_name": column_name,
+            "new_column_type": new_column_type,
         }
         self.execute_macro(
             ALTER_COLUMN_TYPE_MACRO_NAME,
@@ -149,8 +149,8 @@ def alter_column_type(
     def drop_relation(self, relation):
         if relation.type is None:
             dbt.exceptions.raise_compiler_error(
-                'Tried to drop relation {}, but its type is null.'
-                .format(relation))
+                "Tried to drop relation {}, but its type is null.".format(relation)
+            )
 
         self.cache_dropped(relation)
         self.execute_macro(
@@ -167,7 +167,7 @@ def truncate_relation(self, relation):
     def rename_relation(self, from_relation, to_relation):
         self.cache_renamed(from_relation, to_relation)
 
-        kwargs = {'from_relation': from_relation, 'to_relation': to_relation}
+        kwargs = {"from_relation": from_relation, "to_relation": to_relation}
         self.execute_macro(
             RENAME_RELATION_MACRO_NAME,
             kwargs=kwargs
@@ -221,13 +221,15 @@ def list_relations_without_caching(
                 _type = self.Relation.get_relation_type(_type)
             except ValueError:
                 _type = self.Relation.External
-            relations.append(self.Relation.create(
-                database=_database,
-                schema=_schema,
-                identifier=name,
-                quote_policy=quote_policy,
-                type=_type
-            ))
+            relations.append(
+                self.Relation.create(
+                    database=_database,
+                    schema=_schema,
+                    identifier=name,
+                    quote_policy=quote_policy,
+                    type=_type,
+                )
+            )
         return relations
 
     def quote(self, identifier):
diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py
index 6dbef5a5133..371fe3240d5 100644
--- a/core/dbt/task/runnable.py
+++ b/core/dbt/task/runnable.py
@@ -58,7 +58,7 @@
 from dbt.ui import warning_tag
 
 RESULT_FILE_NAME = 'run_results.json'
-MANIFEST_FILE_NAME = 'manifest.json'
+MANIFEST_FILE_NAME = "manifest.json"
 RUNNING_STATE = DbtProcessState('running')
 
 
diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py
index 0a88e676e73..c71d4aa0b34 100644
--- a/plugins/postgres/dbt/adapters/postgres/impl.py
+++ b/plugins/postgres/dbt/adapters/postgres/impl.py
@@ -13,7 +13,7 @@
 
 
 # note that this isn't an adapter macro, so just a single underscore
-GET_RELATIONS_MACRO_NAME = 'postgres_get_relations'
+GET_RELATIONS_MACRO_NAME = "postgres_get_relations"
 
 
 @dataclass
@@ -77,11 +77,12 @@ def verify_database(self, database):
         expected = self.config.credentials.database
         if database.lower() != expected.lower():
             raise dbt.exceptions.NotImplementedException(
-                'Cross-db references not allowed in {} ({} vs {})'
-                .format(self.type(), database, expected)
+                "Cross-db references not allowed in {} ({} vs {})".format(
+                    self.type(), database, expected
+                )
             )
         # return an empty string on success so macros can call this
-        return ''
+        return ""
 
     @available
     def parse_index(self, raw_index: Any) -> Optional[PostgresIndexConfig]:

From 60f8254c74f5205b15b7e724dc1cb62dfca33db9 Mon Sep 17 00:00:00 2001
From: Fokko Driesprong 
Date: Fri, 6 Dec 2019 16:00:53 +0100
Subject: [PATCH 120/933] Limit the line length

automatic commit by git-black, original commits:
  5a929b095cb95bee81dd6252dc8cfd6b587e137a
---
 core/dbt/adapters/sql/impl.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index 0a2589e7cfd..4711a46fcf3 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -70,9 +70,7 @@ def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         return "text"
 
     @classmethod
-    def convert_number_type(
-        cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         decimals = agate_table.aggregate(agate.MaxPrecision(col_idx))
         return "float8" if decimals else "integer"
 

From d24fc503c41e560398fee27584350aa64b733e5e Mon Sep 17 00:00:00 2001
From: Fokko Driesprong 
Date: Fri, 6 Dec 2019 08:36:32 +0100
Subject: [PATCH 121/933] Fix code style voilations

automatic commit by git-black, original commits:
  9975cd5f2773884055e6f442fbb00f32cbae1916
---
 core/dbt/adapters/sql/impl.py | 12 +++---------
 1 file changed, 3 insertions(+), 9 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index 4711a46fcf3..2d0642883ec 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -75,15 +75,11 @@ def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         return "float8" if decimals else "integer"
 
     @classmethod
-    def convert_boolean_type(
-            cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         return "boolean"
 
     @classmethod
-    def convert_datetime_type(
-            cls, agate_table: agate.Table, col_idx: int
-    ) -> str:
+    def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
         return "timestamp without time zone"
 
     @classmethod
@@ -125,9 +121,7 @@ def expand_column_types(self, goal, current):
 
                 self.alter_column_type(current, column_name, new_type)
 
-    def alter_column_type(
-            self, relation, column_name, new_column_type
-    ) -> None:
+    def alter_column_type(self, relation, column_name, new_column_type) -> None:
         """
         1. Create a new column (w/ temp name and correct type)
         2. Copy data over to it

From 10318608cbc5048f5c08a5e3ce51014efe854570 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Mar 2019 21:08:38 -0700
Subject: [PATCH 122/933] per-thread connections

parsing now always opens a connection, instead of waiting to need it
remove model_name/available_raw/etc


automatic commit by git-black, original commits:
  350b81db99e8e926352b80053e763d6dfab7a940
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/sql/impl.py | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index 2d0642883ec..ba4a760132b 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -95,15 +95,9 @@ def is_cancelable(cls) -> bool:
         return True
 
     def expand_column_types(self, goal, current):
-        reference_columns = {
-            c.name: c for c in
-            self.get_columns_in_relation(goal)
-        }
+        reference_columns = {c.name: c for c in self.get_columns_in_relation(goal)}
 
-        target_columns = {
-            c.name: c for c
-            in self.get_columns_in_relation(current)
-        }
+        target_columns = {c.name: c for c in self.get_columns_in_relation(current)}
 
         for column_name, reference_column in reference_columns.items():
             target_column = target_columns.get(column_name)

From c26c8e97dc3fe938fd4534af4ad36934d5464c01 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Mar 2019 21:08:38 -0700
Subject: [PATCH 123/933] per-thread connections

parsing now always opens a connection, instead of waiting to need it
remove model_name/available_raw/etc


automatic commit by git-black, original commits:
  70069f53b13d7c53ae0dc77ed171d6e77542b588
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/sql/impl.py | 33 +++++++--------------------------
 1 file changed, 7 insertions(+), 26 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index ba4a760132b..43a42060fef 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -127,10 +127,7 @@ def alter_column_type(self, relation, column_name, new_column_type) -> None:
             "column_name": column_name,
             "new_column_type": new_column_type,
         }
-        self.execute_macro(
-            ALTER_COLUMN_TYPE_MACRO_NAME,
-            kwargs=kwargs
-        )
+        self.execute_macro(ALTER_COLUMN_TYPE_MACRO_NAME, kwargs=kwargs)
 
     def drop_relation(self, relation):
         if relation.type is None:
@@ -139,16 +136,10 @@ def drop_relation(self, relation):
             )
 
         self.cache_dropped(relation)
-        self.execute_macro(
-            DROP_RELATION_MACRO_NAME,
-            kwargs={'relation': relation}
-        )
+        self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation})
 
     def truncate_relation(self, relation):
-        self.execute_macro(
-            TRUNCATE_RELATION_MACRO_NAME,
-            kwargs={'relation': relation}
-        )
+        self.execute_macro(TRUNCATE_RELATION_MACRO_NAME, kwargs={"relation": relation})
 
     def rename_relation(self, from_relation, to_relation):
         self.cache_renamed(from_relation, to_relation)
@@ -161,8 +152,7 @@ def rename_relation(self, from_relation, to_relation):
 
     def get_columns_in_relation(self, relation):
         return self.execute_macro(
-            GET_COLUMNS_IN_RELATION_MACRO_NAME,
-            kwargs={'relation': relation}
+            GET_COLUMNS_IN_RELATION_MACRO_NAME, kwargs={"relation": relation}
         )
 
     def create_schema(self, relation: BaseRelation) -> None:
@@ -191,10 +181,7 @@ def list_relations_without_caching(
         schema_relation: BaseRelation,
     ) -> List[BaseRelation]:
         kwargs = {"schema_relation": schema_relation}
-        results = self.execute_macro(
-            LIST_RELATIONS_MACRO_NAME,
-            kwargs=kwargs
-        )
+        results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs)
 
         relations = []
         quote_policy = {
@@ -222,10 +209,7 @@ def quote(self, identifier):
         return '"{}"'.format(identifier)
 
     def list_schemas(self, database: str) -> List[str]:
-        results = self.execute_macro(
-            LIST_SCHEMAS_MACRO_NAME,
-            kwargs={'database': database}
-        )
+        results = self.execute_macro(LIST_SCHEMAS_MACRO_NAME, kwargs={"database": database})
 
         return [row[0] for row in results]
 
@@ -238,8 +222,5 @@ def check_schema_exists(self, database: str, schema: str) -> bool:
         ).information_schema()
 
         kwargs = {'information_schema': information_schema, 'schema': schema}
-        results = self.execute_macro(
-            CHECK_SCHEMA_EXISTS_MACRO_NAME,
-            kwargs=kwargs
-        )
+        results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
         return results[0][0] > 0

From 684a57be49518c8cddcbdf2907a5593ef428b880 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 4 Mar 2019 21:08:38 -0700
Subject: [PATCH 124/933] per-thread connections

parsing now always opens a connection, instead of waiting to need it
remove model_name/available_raw/etc


automatic commit by git-black, original commits:
  350b81db99e8e926352b80053e763d6dfab7a940
  70069f53b13d7c53ae0dc77ed171d6e77542b588
  e2af871a5adc1b6b0269ef900e04320550ac1bb8
---
 core/dbt/adapters/sql/impl.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index 43a42060fef..bc23d19fb8f 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -145,10 +145,7 @@ def rename_relation(self, from_relation, to_relation):
         self.cache_renamed(from_relation, to_relation)
 
         kwargs = {"from_relation": from_relation, "to_relation": to_relation}
-        self.execute_macro(
-            RENAME_RELATION_MACRO_NAME,
-            kwargs=kwargs
-        )
+        self.execute_macro(RENAME_RELATION_MACRO_NAME, kwargs=kwargs)
 
     def get_columns_in_relation(self, relation):
         return self.execute_macro(

From 78a7da664d344a712f8a226694e2322ebb963601 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 16 Apr 2019 10:20:13 -0600
Subject: [PATCH 125/933] Quote databases when we list them Fix a copy+paste
 error that broke database quoting configuration

automatic commit by git-black, original commits:
  70069f53b13d7c53ae0dc77ed171d6e77542b588
  97a6a51bece3a0ec033f274a84dd0a10164ee8e5
---
 core/dbt/adapters/sql/impl.py | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index bc23d19fb8f..a18aa01f374 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -181,11 +181,7 @@ def list_relations_without_caching(
         results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs)
 
         relations = []
-        quote_policy = {
-            'database': True,
-            'schema': True,
-            'identifier': True
-        }
+        quote_policy = {"database": True, "schema": True, "identifier": True}
         for _database, name, _schema, _type in results:
             try:
                 _type = self.Relation.get_relation_type(_type)

From 87742844947edb4a800c21ee46cf1e099629c811 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 22 Apr 2019 10:17:50 -0600
Subject: [PATCH 126/933] add quote policy to Relation.create calls

automatic commit by git-black, original commits:
  8b58b208cacd685e21312f7a225a542705712842
---
 core/dbt/adapters/sql/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index a18aa01f374..b1b63905a25 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -211,7 +211,7 @@ def check_schema_exists(self, database: str, schema: str) -> bool:
             database=database,
             schema=schema,
             identifier="INFORMATION_SCHEMA",
-            quote_policy=self.config.quoting
+            quote_policy=self.config.quoting,
         ).information_schema()
 
         kwargs = {'information_schema': information_schema, 'schema': schema}

From fc75aca8aca8fb3c7e8ce6c741c95b9aed0bec85 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 5 Mar 2019 18:02:13 -0700
Subject: [PATCH 127/933] fix macro kwargs

automatic commit by git-black, original commits:
  2501783d62c3b1efebcc28d6ed991986751baaed
---
 core/dbt/adapters/sql/impl.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py
index b1b63905a25..ca5aacfc515 100644
--- a/core/dbt/adapters/sql/impl.py
+++ b/core/dbt/adapters/sql/impl.py
@@ -214,6 +214,6 @@ def check_schema_exists(self, database: str, schema: str) -> bool:
             quote_policy=self.config.quoting,
         ).information_schema()
 
-        kwargs = {'information_schema': information_schema, 'schema': schema}
+        kwargs = {"information_schema": information_schema, "schema": schema}
         results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs)
         return results[0][0] > 0

From e4c50b463b3ea436ec9b155721961c486d352f45 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 25 Mar 2019 14:28:41 -0600
Subject: [PATCH 128/933] make the block lexer include raw toplevel data

automatic commit by git-black, original commits:
  73418b5c167abd009b985a90028c0e6d6e9340a4
---
 core/dbt/clients/_jinja_blocks.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 8a5a1dae948..bd8e3c3158f 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -10,8 +10,9 @@ def regex(pat):
 
 class BlockData:
     """raw plaintext data from the top level of the file."""
+
     def __init__(self, contents):
-        self.block_type_name = '__dbt__data'
+        self.block_type_name = "__dbt__data"
         self.contents = contents
         self.full_block = contents
 

From 0d71c9ce86d060168956b5d6b62424c68420821e Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 15 Apr 2019 11:59:53 -0600
Subject: [PATCH 129/933] Fix a number of bugs

After we find the start of a comment block, advance immediately
 - this is so we do not mistake "{#}" as both open and close of comment
support do/set statements (no {% enddo %}/{% endset %})
fix some edge-case bugs around quoting
fix a bug around materialization parsing


automatic commit by git-black, original commits:
  9b1aede911304db8bdf2056a11e7da493795f5ce
---
 core/dbt/clients/_jinja_blocks.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index bd8e3c3158f..90a122675ee 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -18,8 +18,7 @@ def __init__(self, contents):
 
 
 class BlockTag:
-    def __init__(self, block_type_name, block_name, contents=None,
-                 full_block=None, **kw):
+    def __init__(self, block_type_name, block_name, contents=None, full_block=None, **kw):
         self.block_type_name = block_type_name
         self.block_name = block_name
         self.contents = contents

From 77f294409307845f9c4b5db2e39a8324f8354210 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 13 Mar 2019 09:12:14 -0600
Subject: [PATCH 130/933] Jinja block parsing/lexing implemented

automatic commit by git-black, original commits:
  98d530f0b11903de0494167c5f2b85afdb432f63
---
 core/dbt/clients/_jinja_blocks.py | 56 +++++++++++++++++--------------
 1 file changed, 31 insertions(+), 25 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 90a122675ee..1696211cfbe 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -25,62 +25,68 @@ def __init__(self, block_type_name, block_name, contents=None, full_block=None,
         self.full_block = full_block
 
     def __str__(self):
-        return 'BlockTag({!r}, {!r})'.format(self.block_type_name,
-                                             self.block_name)
+        return "BlockTag({!r}, {!r})".format(self.block_type_name, self.block_name)
 
     def __repr__(self):
         return str(self)
 
     @property
     def end_block_type_name(self):
-        return 'end{}'.format(self.block_type_name)
+        return "end{}".format(self.block_type_name)
 
     def end_pat(self):
         # we don't want to use string formatting here because jinja uses most
         # of the string formatting operators in its syntax...
-        pattern = ''.join((
-            r'(?P((?:\s*\{\%\-|\{\%)\s*',
-            self.end_block_type_name,
-            r'\s*(?:\-\%\}\s*|\%\})))',
-        ))
+        pattern = "".join(
+            (
+                r"(?P((?:\s*\{\%\-|\{\%)\s*",
+                self.end_block_type_name,
+                r"\s*(?:\-\%\}\s*|\%\})))",
+            )
+        )
         return regex(pattern)
 
 
 Tag = namedtuple('Tag', 'block_type_name block_name start end')
 
 
-_NAME_PATTERN = r'[A-Za-z_][A-Za-z_0-9]*'
+_NAME_PATTERN = r"[A-Za-z_][A-Za-z_0-9]*"
 
-COMMENT_START_PATTERN = regex(r'(?:(?P(\s*\{\#)))')
-COMMENT_END_PATTERN = regex(r'(.*?)(\s*\#\})')
-RAW_START_PATTERN = regex(
-    r'(?:\s*\{\%\-|\{\%)\s*(?P(raw))\s*(?:\-\%\}\s*|\%\})'
+COMMENT_START_PATTERN = regex(r"(?:(?P(\s*\{\#)))")
+COMMENT_END_PATTERN = regex(r"(.*?)(\s*\#\})")
+RAW_START_PATTERN = regex(r"(?:\s*\{\%\-|\{\%)\s*(?P(raw))\s*(?:\-\%\}\s*|\%\})")
+EXPR_START_PATTERN = regex(r"(?P(\{\{\s*))")
 )
 EXPR_START_PATTERN = regex(r'(?P(\{\{\s*))')
 EXPR_END_PATTERN = regex(r'(?P(\s*\}\}))')
 
-BLOCK_START_PATTERN = regex(''.join((
-    r'(?:\s*\{\%\-|\{\%)\s*',
-    r'(?P({}))'.format(_NAME_PATTERN),
-    # some blocks have a 'block name'.
-    r'(?:\s+(?P({})))?'.format(_NAME_PATTERN),
-)))
+BLOCK_START_PATTERN = regex(
+    "".join(
+        (
+            r"(?:\s*\{\%\-|\{\%)\s*",
+            r"(?P({}))".format(_NAME_PATTERN),
+            # some blocks have a 'block name'.
+            r"(?:\s+(?P({})))?".format(_NAME_PATTERN),
+        )
+    )
+)
 
 
 RAW_BLOCK_PATTERN = regex(''.join((
     r'(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})',
     r'(?:.*?)',
     r'(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})',
-)))
+            r"(?:.*?)",
+            r"(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})",
+        )
+    )
+)
 
 TAG_CLOSE_PATTERN = regex(r'(?:(?P(\-\%\}\s*|\%\})))')
 
 # stolen from jinja's lexer. Note that we've consumed all prefix whitespace by
 # the time we want to use this.
-STRING_PATTERN = regex(
-    r"(?P('([^'\\]*(?:\\.[^'\\]*)*)'|"
-    r'"([^"\\]*(?:\\.[^"\\]*)*)"))'
-)
+STRING_PATTERN = regex(r"(?P('([^'\\]*(?:\\.[^'\\]*)*)'|" r'"([^"\\]*(?:\\.[^"\\]*)*)"))')
 
 QUOTE_START_PATTERN = regex(r'''(?P(['"]))''')
 
@@ -120,7 +126,7 @@ def _first_match(self, *patterns, **kwargs):
         matches = []
         for pattern in patterns:
             # default to 'search', but sometimes we want to 'match'.
-            if kwargs.get('method', 'search') == 'search':
+            if kwargs.get("method", "search") == "search":
                 match = self._search(pattern)
             else:
                 match = self._match(pattern)

From 138c734380206694c20d1ebdc46abed75bbd8357 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 18 Jun 2019 10:41:04 -0600
Subject: [PATCH 131/933] clean up block parsing to make it dumber and more
 effective

automatic commit by git-black, original commits:
  92ef7839481f091f1d91c93131cd48134c52b036
---
 core/dbt/clients/_jinja_blocks.py | 79 ++++++++++++++-----------------
 1 file changed, 35 insertions(+), 44 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 1696211cfbe..b8d09b7211f 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -47,7 +47,7 @@ def end_pat(self):
         return regex(pattern)
 
 
-Tag = namedtuple('Tag', 'block_type_name block_name start end')
+Tag = namedtuple("Tag", "block_type_name block_name start end")
 
 
 _NAME_PATTERN = r"[A-Za-z_][A-Za-z_0-9]*"
@@ -72,17 +72,17 @@ def end_pat(self):
 )
 
 
-RAW_BLOCK_PATTERN = regex(''.join((
-    r'(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})',
+RAW_BLOCK_PATTERN = regex(
+    "".join(
     r'(?:.*?)',
-    r'(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})',
+            r"(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})",
             r"(?:.*?)",
             r"(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})",
         )
     )
 )
 
-TAG_CLOSE_PATTERN = regex(r'(?:(?P(\-\%\}\s*|\%\})))')
+TAG_CLOSE_PATTERN = regex(r"(?:(?P(\-\%\}\s*|\%\})))")
 
 # stolen from jinja's lexer. Note that we've consumed all prefix whitespace by
 # the time we want to use this.
@@ -177,7 +177,7 @@ def handle_expr(self, match):
 
     def handle_comment(self, match):
         self.advance(match.end())
-        match = self._expect_match('#}', COMMENT_END_PATTERN)
+        match = self._expect_match("#}", COMMENT_END_PATTERN)
         self.advance(match.end())
 
     def _expect_block_close(self):
@@ -194,22 +194,19 @@ def _expect_block_close(self):
         """
         while True:
             end_match = self._expect_match(
-                'tag close ("%}")',
-                QUOTE_START_PATTERN,
-                TAG_CLOSE_PATTERN
+                'tag close ("%}")', QUOTE_START_PATTERN, TAG_CLOSE_PATTERN
             )
             self.advance(end_match.end())
-            if end_match.groupdict().get('tag_close') is not None:
+            if end_match.groupdict().get("tag_close") is not None:
                 return
             # must be a string. Rewind to its start and advance past it.
             self.rewind()
-            string_match = self._expect_match('string', STRING_PATTERN)
+            string_match = self._expect_match("string", STRING_PATTERN)
             self.advance(string_match.end())
 
     def handle_raw(self):
         # raw blocks are super special, they are a single complete regex
-        match = self._expect_match('{% raw %}...{% endraw %}',
-                                   RAW_BLOCK_PATTERN)
+        match = self._expect_match("{% raw %}...{% endraw %}", RAW_BLOCK_PATTERN)
         self.advance(match.end())
         return match.end()
 
@@ -226,22 +223,18 @@ def handle_tag(self, match):
         """
         groups = match.groupdict()
         # always a value
-        block_type_name = groups['block_type_name']
+        block_type_name = groups["block_type_name"]
         # might be None
-        block_name = groups.get('block_name')
+        block_name = groups.get("block_name")
         start_pos = self.pos
         if block_type_name == 'raw':
-            match = self._expect_match('{% raw %}...{% endraw %}',
-                                       RAW_BLOCK_PATTERN)
+            match = self._expect_match("{% raw %}...{% endraw %}", RAW_BLOCK_PATTERN)
             self.advance(match.end())
         else:
             self.advance(match.end())
             self._expect_block_close()
         return Tag(
-            block_type_name=block_type_name,
-            block_name=block_name,
-            start=start_pos,
-            end=self.pos
+            block_type_name=block_type_name, block_name=block_name, start=start_pos, end=self.pos
         )
 
     def find_tags(self):
@@ -279,21 +272,18 @@ def __iter__(self):
 
 
 duplicate_tags = (
-    'Got nested tags: {outer.block_type_name} (started at {outer.start}) did '
-    'not have a matching {{% end{outer.block_type_name} %}} before a '
-    'subsequent {inner.block_type_name} was found (started at {inner.start})'
+    "Got nested tags: {outer.block_type_name} (started at {outer.start}) did "
+    "not have a matching {{% end{outer.block_type_name} %}} before a "
+    "subsequent {inner.block_type_name} was found (started at {inner.start})"
 )
 
 
 _CONTROL_FLOW_TAGS = {
-    'if': 'endif',
-    'for': 'endfor',
+    "if": "endif",
+    "for": "endfor",
 }
 
-_CONTROL_FLOW_END_TAGS = {
-    v: k
-    for k, v in _CONTROL_FLOW_TAGS.items()
-}
+_CONTROL_FLOW_END_TAGS = {v: k for k, v in _CONTROL_FLOW_TAGS.items()}
 
 
 class BlockIterator:
@@ -316,9 +306,9 @@ def data(self):
 
     def is_current_end(self, tag):
         return (
-            tag.block_type_name.startswith('end') and
-            self.current is not None and
-            tag.block_type_name[3:] == self.current.block_type_name
+            tag.block_type_name.startswith("end")
+            and self.current is not None
+            and tag.block_type_name[3:] == self.current.block_type_name
         )
 
     def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
@@ -345,9 +335,9 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
                     ))
                 expected = _CONTROL_FLOW_TAGS[found]
                 if expected != tag.block_type_name:
-                    dbt.exceptions.raise_compiler_error((
+                    dbt.exceptions.raise_compiler_error(
                         'Got an unexpected control flow end tag, got {} but '
-                        'expected {} next (@ {})'
+                            "Got an unexpected control flow end tag, got {} but "
                     ).format(
                         tag.block_type_name,
                         expected,
@@ -356,16 +346,16 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
 
             if tag.block_type_name in allowed_blocks:
                 if self.stack:
-                    dbt.exceptions.raise_compiler_error((
-                        'Got a block definition inside control flow at {}. '
-                        'All dbt block definitions must be at the top level'
+                    dbt.exceptions.raise_compiler_error(
+                        (
+                            "Got a block definition inside control flow at {}. "
                     ).format(self.tag_parser.linepos(tag.start)))
                 if self.current is not None:
                     dbt.exceptions.raise_compiler_error(
                         duplicate_tags.format(outer=self.current, inner=tag)
                     )
                 if collect_raw_data:
-                    raw_data = self.data[self.last_position:tag.start]
+                    raw_data = self.data[self.last_position : tag.start]
                     self.last_position = tag.start
                     if raw_data:
                         yield BlockData(raw_data)
@@ -377,23 +367,24 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
                 yield BlockTag(
                     block_type_name=self.current.block_type_name,
                     block_name=self.current.block_name,
-                    contents=self.data[self.current.end:tag.start],
-                    full_block=self.data[self.current.start:tag.end]
+                    contents=self.data[self.current.end : tag.start],
+                    full_block=self.data[self.current.start : tag.end],
                 )
                 self.current = None
 
         if self.current:
             linecount = self.data[:self.current.end].count('\n') + 1
-            dbt.exceptions.raise_compiler_error((
+            dbt.exceptions.raise_compiler_error(
                 'Reached EOF without finding a close tag for '
                 '{} (searched from line {})'
             ).format(self.current.block_type_name, linecount))
 
         if collect_raw_data:
-            raw_data = self.data[self.last_position:]
+            raw_data = self.data[self.last_position :]
             if raw_data:
                 yield BlockData(raw_data)
 
     def lex_for_blocks(self, allowed_blocks=None, collect_raw_data=True):
         return list(self.find_blocks(allowed_blocks=allowed_blocks,
-                                     collect_raw_data=collect_raw_data))
+            self.find_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data)
+        )

From b133925a24a7abb6ce500538e0cfea35b4fb7ea7 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Jun 2019 13:39:45 -0600
Subject: [PATCH 132/933] Fix docs blocks parsing issues

Rename documentation node type to docs so we can filter on it (is this breaking?)
Fix block extractor bug with macros/docs that contain quotes
Fix block extractor bug with expressions


automatic commit by git-black, original commits:
  98d530f0b11903de0494167c5f2b85afdb432f63
  ab59ebe4f2ab702bc96cc893cb75f0dea1f37ab6
---
 core/dbt/clients/_jinja_blocks.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index b8d09b7211f..59a41ab7153 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -56,9 +56,7 @@ def end_pat(self):
 COMMENT_END_PATTERN = regex(r"(.*?)(\s*\#\})")
 RAW_START_PATTERN = regex(r"(?:\s*\{\%\-|\{\%)\s*(?P(raw))\s*(?:\-\%\}\s*|\%\})")
 EXPR_START_PATTERN = regex(r"(?P(\{\{\s*))")
-)
-EXPR_START_PATTERN = regex(r'(?P(\{\{\s*))')
-EXPR_END_PATTERN = regex(r'(?P(\s*\}\}))')
+EXPR_END_PATTERN = regex(r"(?P(\s*\}\}))")
 
 BLOCK_START_PATTERN = regex(
     "".join(

From 64aba7840790f70f280d44c03cf733b43c587246 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 26 Mar 2020 13:21:53 -0600
Subject: [PATCH 133/933] Make the contents of the raw blocks regex pattern
 non-greedy

This fixes an issue where a file with multiple raw blocks got treated as one _big_ one
 - it extended from the first {% raw %} to the last {% endraw %}


automatic commit by git-black, original commits:
  8e3c95b48cc8faa3bef21314aa878b1e1b7fc5ab
---
 core/dbt/clients/_jinja_blocks.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 59a41ab7153..bebdab7455d 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -72,7 +72,7 @@ def end_pat(self):
 
 RAW_BLOCK_PATTERN = regex(
     "".join(
-    r'(?:.*?)',
+        (
             r"(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})",
             r"(?:.*?)",
             r"(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})",

From a72da866fa48f6e67e92a17ebe57a3524bb0d3c1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Jun 2019 13:39:45 -0600
Subject: [PATCH 134/933] Fix docs blocks parsing issues

Rename documentation node type to docs so we can filter on it (is this breaking?)
Fix block extractor bug with macros/docs that contain quotes
Fix block extractor bug with expressions


automatic commit by git-black, original commits:
  ab59ebe4f2ab702bc96cc893cb75f0dea1f37ab6
---
 core/dbt/clients/_jinja_blocks.py | 10 ++++------
 core/dbt/node_types.py            |  2 +-
 2 files changed, 5 insertions(+), 7 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index bebdab7455d..0229daba67d 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -86,7 +86,7 @@ def end_pat(self):
 # the time we want to use this.
 STRING_PATTERN = regex(r"(?P('([^'\\]*(?:\\.[^'\\]*)*)'|" r'"([^"\\]*(?:\\.[^"\\]*)*)"))')
 
-QUOTE_START_PATTERN = regex(r'''(?P(['"]))''')
+QUOTE_START_PATTERN = regex(r"""(?P(['"]))""")
 
 
 class TagIterator:
@@ -160,15 +160,13 @@ def handle_expr(self, match):
         """
         self.advance(match.end())
         while True:
-            match = self._expect_match('}}',
-                                       EXPR_END_PATTERN,
-                                       QUOTE_START_PATTERN)
-            if match.groupdict().get('expr_end') is not None:
+            match = self._expect_match("}}", EXPR_END_PATTERN, QUOTE_START_PATTERN)
+            if match.groupdict().get("expr_end") is not None:
                 break
             else:
                 # it's a quote. we haven't advanced for this match yet, so
                 # just slurp up the whole string, no need to rewind.
-                match = self._expect_match('string', STRING_PATTERN)
+                match = self._expect_match("string", STRING_PATTERN)
                 self.advance(match.end())
 
         self.advance(match.end())
diff --git a/core/dbt/node_types.py b/core/dbt/node_types.py
index e1356f1edac..52f981ff1c8 100644
--- a/core/dbt/node_types.py
+++ b/core/dbt/node_types.py
@@ -13,7 +13,7 @@ class NodeType(StrEnum):
     # TODO: rm?
     RPCCall = "rpc"
     SqlOperation = 'sql'
-    Documentation = 'docs'
+    Documentation = "docs"
     Source = 'source'
     Macro = "macro"
     Exposure = 'exposure'

From 7133f82f053aa1d581dedc25906e96d827976af1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 30 Apr 2020 08:39:51 -0600
Subject: [PATCH 135/933] improve parsing errors, include line:char instead of
 absolute character index

automatic commit by git-black, original commits:
  d17e706351e8d0e7944d4e39104baa23a0c9b02a
---
 core/dbt/clients/_jinja_blocks.py | 26 ++++++++++++--------------
 core/dbt/parser/macros.py         |  4 ++--
 2 files changed, 14 insertions(+), 16 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 0229daba67d..d972d5cc321 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -103,10 +103,10 @@ def linepos(self, end=None) -> str:
         end_val: int = self.pos if end is None else end
         data = self.data[:end_val]
         # if not found, rfind returns -1, and -1+1=0, which is perfect!
-        last_line_start = data.rfind('\n') + 1
+        last_line_start = data.rfind("\n") + 1
         # it's easy to forget this, but line numbers are 1-indexed
-        line_number = data.count('\n') + 1
-        return f'{line_number}:{end_val - last_line_start}'
+        line_number = data.count("\n") + 1
+        return f"{line_number}:{end_val - last_line_start}"
 
     def advance(self, new_position):
         self.pos = new_position
@@ -324,28 +324,26 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
                     dbt.exceptions.raise_compiler_error((
                         'Got an unexpected control flow end tag, got {} but '
                         'never saw a preceeding {} (@ {})'
-                    ).format(
-                        tag.block_type_name,
-                        expected,
-                        self.tag_parser.linepos(tag.start)
-                    ))
+                            "never saw a preceeding {} (@ {})"
+                        ).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
+                    )
                 expected = _CONTROL_FLOW_TAGS[found]
                 if expected != tag.block_type_name:
                     dbt.exceptions.raise_compiler_error(
                         'Got an unexpected control flow end tag, got {} but '
                             "Got an unexpected control flow end tag, got {} but "
-                    ).format(
-                        tag.block_type_name,
-                        expected,
-                        self.tag_parser.linepos(tag.start)
-                    ))
+                            "expected {} next (@ {})"
+                        ).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
+                    )
 
             if tag.block_type_name in allowed_blocks:
                 if self.stack:
                     dbt.exceptions.raise_compiler_error(
                         (
                             "Got a block definition inside control flow at {}. "
-                    ).format(self.tag_parser.linepos(tag.start)))
+                            "All dbt block definitions must be at the top level"
+                        ).format(self.tag_parser.linepos(tag.start))
+                    )
                 if self.current is not None:
                     dbt.exceptions.raise_compiler_error(
                         duplicate_tags.format(outer=self.current, inner=tag)
diff --git a/core/dbt/parser/macros.py b/core/dbt/parser/macros.py
index 6d4313dac1c..67b36a4f7c0 100644
--- a/core/dbt/parser/macros.py
+++ b/core/dbt/parser/macros.py
@@ -54,8 +54,8 @@ def parse_unparsed_macros(
     ) -> Iterable[ParsedMacro]:
         try:
             blocks: List[jinja.BlockTag] = [
-                t for t in
-                jinja.extract_toplevel_blocks(
+                t
+                for t in jinja.extract_toplevel_blocks(
                     base_node.raw_sql,
                     allowed_blocks={'macro', 'materialization', 'test'},
                     collect_raw_data=False,

From 3a1a32e663610d1d8694749836420b5fe5cd3882 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 21 Mar 2019 08:42:40 -0600
Subject: [PATCH 136/933] flake8, pep8, unit tests

automatic commit by git-black, original commits:
  af8622e8fffbbf6d2022fb1e40f9b07ce2bb12b7
---
 core/dbt/clients/_jinja_blocks.py | 2 +-
 core/dbt/exceptions.py            | 4 ++--
 core/dbt/main.py                  | 2 +-
 core/dbt/semver.py                | 3 ++-
 4 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index d972d5cc321..e50c859c898 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -140,7 +140,7 @@ def _expect_match(self, expected_name, *patterns, **kwargs):
         match = self._first_match(*patterns, **kwargs)
         if match is None:
             msg = 'unexpected EOF, expected {}, got "{}"'.format(
-                expected_name, self.data[self.pos:]
+                expected_name, self.data[self.pos :]
             )
             dbt.exceptions.raise_compiler_error(msg)
         return match
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 84457bee466..842092894fd 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -892,8 +892,8 @@ def raise_ambiguous_catalog_match(unique_id, match_1, match_2):
 
     def get_match_string(match):
         return "{}.{}".format(
-            match.get('metadata', {}).get('schema'),
-            match.get('metadata', {}).get('name'))
+            match.get("metadata", {}).get("schema"), match.get("metadata", {}).get("name")
+        )
 
     raise_compiler_error(
         'dbt found two relations in your warehouse with similar database '
diff --git a/core/dbt/main.py b/core/dbt/main.py
index 8fb8bfa72c3..5ecadc31289 100644
--- a/core/dbt/main.py
+++ b/core/dbt/main.py
@@ -333,7 +333,7 @@ def _build_source_subparser(subparsers, base_subparser):
 
 def _build_init_subparser(subparsers, base_subparser):
     sub = subparsers.add_parser(
-        'init',
+        "init",
         parents=[base_subparser],
         help='''
         Initialize a new DBT project.
diff --git a/core/dbt/semver.py b/core/dbt/semver.py
index f366e7419c3..71be4925af3 100644
--- a/core/dbt/semver.py
+++ b/core/dbt/semver.py
@@ -62,7 +62,8 @@ class VersionSpecification(dbtClassMixin):
 """.format(
     matchers=_MATCHERS,
     base_version_regex=_BASE_VERSION_REGEX,
-    version_extra_regex=_VERSION_EXTRA_REGEX)
+    version_extra_regex=_VERSION_EXTRA_REGEX,
+)
 
 _VERSION_REGEX = re.compile(_VERSION_REGEX_PAT_STR, re.VERBOSE)
 

From 9d4d3236dd6689b1b4e1190db50fec60c0021665 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 13 Jun 2019 11:56:41 -0600
Subject: [PATCH 137/933] Block parsing now uses recursive descent...

automatic commit by git-black, original commits:
  a2cae7df2989ff6efb124ce970929b1d4c8cc8ce
---
 core/dbt/clients/_jinja_blocks.py | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index e50c859c898..1df77c183b7 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -223,7 +223,7 @@ def handle_tag(self, match):
         # might be None
         block_name = groups.get("block_name")
         start_pos = self.pos
-        if block_type_name == 'raw':
+        if block_type_name == "raw":
             match = self._expect_match("{% raw %}...{% endraw %}", RAW_BLOCK_PATTERN)
             self.advance(match.end())
         else:
@@ -247,9 +247,9 @@ def find_tags(self):
             # start = self.pos
 
             groups = match.groupdict()
-            comment_start = groups.get('comment_start')
-            expr_start = groups.get('expr_start')
-            block_type_name = groups.get('block_type_name')
+            comment_start = groups.get("comment_start")
+            expr_start = groups.get("expr_start")
+            block_type_name = groups.get("block_type_name")
 
             if comment_start is not None:
                 self.handle_comment(match)
@@ -259,8 +259,8 @@ def find_tags(self):
                 yield self.handle_tag(match)
             else:
                 raise dbt.exceptions.InternalException(
-                    'Invalid regex match in next_block, expected block start, '
-                    'expr start, or comment start'
+                    "Invalid regex match in next_block, expected block start, "
+                    "expr start, or comment start"
                 )
 
     def __iter__(self):

From 970f18c5c2d0716265f520d296bfaa2ea22efb09 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 18 Jun 2019 10:41:04 -0600
Subject: [PATCH 138/933] clean up block parsing to make it dumber and more
 effective

automatic commit by git-black, original commits:
  92ef7839481f091f1d91c93131cd48134c52b036
  a2cae7df2989ff6efb124ce970929b1d4c8cc8ce
---
 core/dbt/clients/_jinja_blocks.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 1df77c183b7..c3e445c226c 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -236,9 +236,7 @@ def handle_tag(self, match):
     def find_tags(self):
         while True:
             match = self._first_match(
-                BLOCK_START_PATTERN,
-                COMMENT_START_PATTERN,
-                EXPR_START_PATTERN
+                BLOCK_START_PATTERN, COMMENT_START_PATTERN, EXPR_START_PATTERN
             )
             if match is None:
                 break

From 3ad0734858fa817a43fe71b5c8dc4972457cda86 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 19 Jun 2019 14:31:11 -0600
Subject: [PATCH 139/933] PR feedback

automatic commit by git-black, original commits:
  18953536f1db8bdd1846f43fcc47e967bf49ae44
---
 core/dbt/clients/_jinja_blocks.py | 12 ++++++------
 core/dbt/clients/jinja.py         |  3 +--
 2 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index c3e445c226c..32da4b4d084 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -308,7 +308,7 @@ def is_current_end(self, tag):
     def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
         """Find all top-level blocks in the data."""
         if allowed_blocks is None:
-            allowed_blocks = {'snapshot', 'macro', 'materialization', 'docs'}
+            allowed_blocks = {"snapshot", "macro", "materialization", "docs"}
 
         for tag in self.tag_parser.find_tags():
             if tag.block_type_name in _CONTROL_FLOW_TAGS:
@@ -319,16 +319,16 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
                     found = self.stack.pop()
                 else:
                     expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name]
-                    dbt.exceptions.raise_compiler_error((
-                        'Got an unexpected control flow end tag, got {} but '
-                        'never saw a preceeding {} (@ {})'
+                    dbt.exceptions.raise_compiler_error(
+                        (
+                            "Got an unexpected control flow end tag, got {} but "
                             "never saw a preceeding {} (@ {})"
                         ).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
                     )
                 expected = _CONTROL_FLOW_TAGS[found]
                 if expected != tag.block_type_name:
                     dbt.exceptions.raise_compiler_error(
-                        'Got an unexpected control flow end tag, got {} but '
+                        (
                             "Got an unexpected control flow end tag, got {} but "
                             "expected {} next (@ {})"
                         ).format(tag.block_type_name, expected, self.tag_parser.linepos(tag.start))
@@ -377,6 +377,6 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
                 yield BlockData(raw_data)
 
     def lex_for_blocks(self, allowed_blocks=None, collect_raw_data=True):
-        return list(self.find_blocks(allowed_blocks=allowed_blocks,
+        return list(
             self.find_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data)
         )
diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index a368cef75fa..f095592f33d 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -619,8 +619,7 @@ def extract_toplevel_blocks(
         `collect_raw_data` is `True`) `BlockData` objects.
     """
     return BlockIterator(data).lex_for_blocks(
-        allowed_blocks=allowed_blocks,
-        collect_raw_data=collect_raw_data
+        allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data
     )
 
 

From 3c066d7370e3bfd2d5ab2ab56f6b8ce2f09e55ea Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 24 Jan 2020 13:08:53 -0700
Subject: [PATCH 140/933] PR feedback: lines not chars

automatic commit by git-black, original commits:
  de5ff68943c10ce3078d38cc7af106696ca25ea2
---
 core/dbt/clients/_jinja_blocks.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index 32da4b4d084..a6dbdaa0403 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -365,11 +365,12 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
                 self.current = None
 
         if self.current:
-            linecount = self.data[:self.current.end].count('\n') + 1
+            linecount = self.data[: self.current.end].count("\n") + 1
             dbt.exceptions.raise_compiler_error(
                 'Reached EOF without finding a close tag for '
-                '{} (searched from line {})'
-            ).format(self.current.block_type_name, linecount))
+                    "Reached EOF without finding a close tag for " "{} (searched from line {})"
+                ).format(self.current.block_type_name, linecount)
+            )
 
         if collect_raw_data:
             raw_data = self.data[self.last_position :]

From 7215d2358f85a8f2e0aecf02cbea3e2e47bfdf5f Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 24 Jan 2020 08:21:58 -0700
Subject: [PATCH 141/933] fix an RPC hang when enddocs tags were missing, add
 tests

automatic commit by git-black, original commits:
  ba3e14cefddd5ffbc0dc52d2d18d84a0a004abcd
---
 core/dbt/clients/_jinja_blocks.py | 2 +-
 core/dbt/exceptions.py            | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py
index a6dbdaa0403..761c6dfcb4d 100644
--- a/core/dbt/clients/_jinja_blocks.py
+++ b/core/dbt/clients/_jinja_blocks.py
@@ -367,7 +367,7 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True):
         if self.current:
             linecount = self.data[: self.current.end].count("\n") + 1
             dbt.exceptions.raise_compiler_error(
-                'Reached EOF without finding a close tag for '
+                (
                     "Reached EOF without finding a close tag for " "{} (searched from line {})"
                 ).format(self.current.block_type_name, linecount)
             )
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 842092894fd..5ca431c153e 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -72,11 +72,11 @@ def node_to_string(self, node):
             # we probably failed to parse a block, so we can't know the name
             return "{} ({})".format(node.resource_type, node.original_file_path)
 
-        if hasattr(node, 'contents'):
+        if hasattr(node, "contents"):
             # handle FileBlocks. They aren't really nodes but we want to render
             # out the path we know at least. This indicates an error during
             # block parsing.
-            return '{}'.format(node.path.original_file_path)
+            return "{}".format(node.path.original_file_path)
         return "{} {} ({})".format(
             node.resource_type,
             node.name,

From 8806773b6d95bb123fc760fd82633aafb1ddf7c0 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 10 May 2019 07:50:46 -0600
Subject: [PATCH 142/933] PR feedback

automatic commit by git-black, original commits:
  26427d2af025f51cab52086a85666fb24ba86744
---
 core/dbt/clients/agate_helper.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index 06ccf5168f6..df6e23873eb 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -10,7 +10,7 @@
 from dbt.exceptions import RuntimeException
 
 
-BOM = BOM_UTF8.decode('utf-8')  # '\ufeff'
+BOM = BOM_UTF8.decode("utf-8")  # '\ufeff'
 
 
 class Number(agate.data_types.Number):

From 9c1369e4bfd7627048e276bcd93f52b540d7912c Mon Sep 17 00:00:00 2001
From: Jeremy Cohen 
Date: Sun, 19 Dec 2021 16:30:25 +0100
Subject: [PATCH 143/933] Fix bool coercion to 0/1 (#4512)

* Fix bool coercion

* Fix unit test

automatic commit by git-black, original commits:
  417ccdc3b44e2f1b30fedd2eff3ad47708d524f9
---
 core/dbt/clients/agate_helper.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index df6e23873eb..aed1b62f92c 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -18,9 +18,7 @@ class Number(agate.data_types.Number):
     # i.e. do not cast True and False to numeric 1 and 0
     def cast(self, d):
         if type(d) == bool:
-            raise agate.exceptions.CastError(
-                'Do not cast True to 1 or False to 0.'
-            )
+            raise agate.exceptions.CastError("Do not cast True to 1 or False to 0.")
         else:
             return super().cast(d)
 
@@ -53,7 +51,7 @@ def build_type_tester(
 ) -> agate.TypeTester:
 
     types = [
-        Number(null_values=('null', '')),
+        Number(null_values=("null", "")),
         agate.data_types.Date(null_values=('null', ''),
                               date_format='%Y-%m-%d'),
         agate.data_types.DateTime(null_values=('null', ''),

From 79f9dcafab492b7983dfbf499016caec9966b134 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 13 Nov 2019 07:48:00 -0700
Subject: [PATCH 144/933] Agate fixes

Restrict agate date+datetime formats
remove timedelta
When column types are specified for a seed, parse those columns as Text unconditionally


automatic commit by git-black, original commits:
  0b18212e69479abc6c3210fd93c0159f0129b78b
---
 core/dbt/clients/agate_helper.py       | 16 +++++++---------
 third-party-stubs/isodate/__init__.pyi |  3 ++-
 2 files changed, 9 insertions(+), 10 deletions(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index aed1b62f92c..cb549bf22ab 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -40,9 +40,7 @@ def cast(self, d):
         except:  # noqa
             pass
 
-        raise agate.exceptions.CastError(
-            'Can not parse value "%s" as datetime.' % d
-        )
+        raise agate.exceptions.CastError('Can not parse value "%s" as datetime.' % d)
 
 
 def build_type_tester(
@@ -52,12 +50,12 @@ def build_type_tester(
 
     types = [
         Number(null_values=("null", "")),
-        agate.data_types.Date(null_values=('null', ''),
-                              date_format='%Y-%m-%d'),
-        agate.data_types.DateTime(null_values=('null', ''),
-                                  datetime_format='%Y-%m-%d %H:%M:%S'),
-        ISODateTime(null_values=('null', '')),
-        agate.data_types.Boolean(true_values=('true',),
+        agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"),
+        agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"),
+        ISODateTime(null_values=("null", "")),
+        agate.data_types.Boolean(
+            true_values=("true",), false_values=("false",), null_values=("null", "")
+        ),
                                  false_values=('false',),
                                  null_values=('null', '')),
         agate.data_types.Text(null_values=string_null_values)
diff --git a/third-party-stubs/isodate/__init__.pyi b/third-party-stubs/isodate/__init__.pyi
index be3c3d6d9e4..96b67c34a0d 100644
--- a/third-party-stubs/isodate/__init__.pyi
+++ b/third-party-stubs/isodate/__init__.pyi
@@ -1,3 +1,4 @@
 import datetime
 
-def parse_datetime(datetimestring: str): datetime.datetime
+def parse_datetime(datetimestring: str):
+    datetime.datetime

From ad4d041e909d6f3670213e49c8299f87c7813cb4 Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Sat, 26 Jun 2021 12:39:16 -0400
Subject: [PATCH 145/933] (#2984) Prevent Agate from coercing values in query
 result sets

automatic commit by git-black, original commits:
  1ad1c834f3dca6523ca68347b90f70dd34d1daa1
---
 core/dbt/clients/agate_helper.py | 12 +++---------
 1 file changed, 3 insertions(+), 9 deletions(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index cb549bf22ab..18a2895d1d4 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -44,8 +44,7 @@ def cast(self, d):
 
 
 def build_type_tester(
-    text_columns: Iterable[str],
-    string_null_values: Optional[Iterable[str]] = ('null', '')
+    text_columns: Iterable[str], string_null_values: Optional[Iterable[str]] = ("null", "")
 ) -> agate.TypeTester:
 
     types = [
@@ -80,10 +79,7 @@ def table_from_rows(
     else:
         # If text_only_columns are present, prevent coercing empty string or
         # literal 'null' strings to a None representation.
-        column_types = build_type_tester(
-            text_only_columns,
-            string_null_values=()
-        )
+        column_types = build_type_tester(text_only_columns, string_null_values=())
 
     return agate.Table(rows, column_names, column_types=column_types)
 
@@ -128,9 +124,7 @@ def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table:
         rows.append(row)
 
     return table_from_rows(
-        rows=rows,
-        column_names=column_names,
-        text_only_columns=text_only_columns
+        rows=rows, column_names=column_names, text_only_columns=text_only_columns
     )
 
 

From 226767f91a24eac7a4aa134a41e81c8567412f83 Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Sat, 26 Jun 2021 12:39:16 -0400
Subject: [PATCH 146/933] (#2984) Prevent Agate from coercing values in query
 result sets

automatic commit by git-black, original commits:
  0b18212e69479abc6c3210fd93c0159f0129b78b
  1ad1c834f3dca6523ca68347b90f70dd34d1daa1
---
 core/dbt/clients/agate_helper.py | 9 ++-------
 1 file changed, 2 insertions(+), 7 deletions(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index 18a2895d1d4..fde36190e7d 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -55,14 +55,9 @@ def build_type_tester(
         agate.data_types.Boolean(
             true_values=("true",), false_values=("false",), null_values=("null", "")
         ),
-                                 false_values=('false',),
-                                 null_values=('null', '')),
-        agate.data_types.Text(null_values=string_null_values)
+        agate.data_types.Text(null_values=string_null_values),
     ]
-    force = {
-        k: agate.data_types.Text(null_values=string_null_values)
-        for k in text_columns
-    }
+    force = {k: agate.data_types.Text(null_values=string_null_values) for k in text_columns}
     return agate.TypeTester(force=force, types=types)
 
 

From c49e58aa7b0053ebb566d3790a697fc69e8c16d1 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 21 Jun 2019 11:16:11 -0400
Subject: [PATCH 147/933] Remove python 2.7

remove tests for 2.7
remove all dbt.compat
remove six
assorted 2-removal related cleanups
do things that we could not do before due to py2
make super super()
classes always derive from obect in 3.x
Enum-ify enum-y things
azure pipelines -> python 3.7
mock is part of unittest now
update freezegun


automatic commit by git-black, original commits:
  c7385ec51256fa229ec24994a43048df9cd58066
---
 core/dbt/clients/agate_helper.py | 2 +-
 core/dbt/exceptions.py           | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index fde36190e7d..f37639f1228 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -137,7 +137,7 @@ def as_matrix(table):
 
 def from_csv(abspath, text_columns):
     type_tester = build_type_tester(text_columns=text_columns)
-    with open(abspath, encoding='utf-8') as fp:
+    with open(abspath, encoding="utf-8") as fp:
         if fp.read(1) != BOM:
             fp.seek(0)
         return agate.Table.from_csv(fp, column_types=type_tester)
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 5ca431c153e..9763f269a40 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -1084,6 +1084,7 @@ def inner(*args, **kwargs):
             except RuntimeException as exc:
                 exc.add_node(model)
                 raise exc
+
         return inner
     return wrap
 

From d26996655aaeb53a31b7e1d7e51ed73395aac792 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 4 Feb 2020 07:44:21 -0700
Subject: [PATCH 148/933] PR feedback

Added a custom table merge implementation that tracks if a row is all null and merges those as "any type".
 - added unit tests for that!
Removed some schema casing things
fixed pluralization (it was reversed)


automatic commit by git-black, original commits:
  04bc2a800ad5aea519d7965308cc1510d0c73770
---
 core/dbt/clients/agate_helper.py | 13 ++++---------
 1 file changed, 4 insertions(+), 9 deletions(-)

diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py
index f37639f1228..ceadf0bf9a7 100644
--- a/core/dbt/clients/agate_helper.py
+++ b/core/dbt/clients/agate_helper.py
@@ -169,8 +169,8 @@ def __setitem__(self, key, value):
         elif not isinstance(value, type(existing_type)):
             # actual type mismatch!
             raise RuntimeException(
-                f'Tables contain columns with the same names ({key}), '
-                f'but different types ({value} vs {existing_type})'
+                f"Tables contain columns with the same names ({key}), "
+                f"but different types ({value} vs {existing_type})"
             )
 
     def finalize(self) -> Dict[str, agate.data_types.DataType]:
@@ -184,9 +184,7 @@ def finalize(self) -> Dict[str, agate.data_types.DataType]:
         return result
 
 
-def _merged_column_types(
-    tables: List[agate.Table]
-) -> Dict[str, agate.data_types.DataType]:
+def _merged_column_types(tables: List[agate.Table]) -> Dict[str, agate.data_types.DataType]:
     # this is a lot like agate.Table.merge, but with handling for all-null
     # rows being "any type".
     new_columns: ColumnTypeBuilder = ColumnTypeBuilder()
@@ -212,10 +210,7 @@ def merge_tables(tables: List[agate.Table]) -> agate.Table:
 
     rows: List[agate.Row] = []
     for table in tables:
-        if (
-            table.column_names == column_names and
-            table.column_types == column_types
-        ):
+        if table.column_names == column_names and table.column_types == column_types:
             rows.extend(table.rows)
         else:
             for row in table.rows:

From d6c6f2326c01a274ad24bff26947828d1802931a Mon Sep 17 00:00:00 2001
From: Jeremy Cohen 
Date: Thu, 27 May 2021 17:13:47 -0400
Subject: [PATCH 149/933] Fix statically extracting macro calls for
 macro.depends_on.macros to be (#3363)

used in parsing schema tests by looking at the arguments to
adapter.dispatch. Includes providing an alternative way of specifying
macro search order in project config.
Collaboratively developed with Jeremy Cohen.

Co-authored-by: Gerda Shank 

automatic commit by git-black, original commits:
  98c015b7754779793e44e056905614296c6e4527
---
 core/dbt/clients/jinja_static.py | 47 ++++++++++++++++++--------------
 core/dbt/config/project.py       |  6 ++--
 core/dbt/context/configured.py   |  4 +--
 core/dbt/contracts/project.py    | 11 +++++---
 core/dbt/parser/manifest.py      |  7 ++---
 5 files changed, 41 insertions(+), 34 deletions(-)

diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py
index e082241484d..348cfe4d3d6 100644
--- a/core/dbt/clients/jinja_static.py
+++ b/core/dbt/clients/jinja_static.py
@@ -8,11 +8,11 @@ def statically_extract_macro_calls(string, ctx, db_wrapper=None):
     env = get_environment(None, capture_macros=True)
     parsed = env.parse(string)
 
-    standard_calls = ['source', 'ref', 'config']
+    standard_calls = ["source", "ref", "config"]
     possible_macro_calls = []
     for func_call in parsed.find_all(jinja2.nodes.Call):
         func_name = None
-        if hasattr(func_call, 'node') and hasattr(func_call.node, 'name'):
+        if hasattr(func_call, "node") and hasattr(func_call.node, "name"):
             func_name = func_call.node.name
         else:
             # func_call for dbt_utils.current_timestamp macro
@@ -30,22 +30,25 @@ def statically_extract_macro_calls(string, ctx, db_wrapper=None):
             #   dyn_args=None,
             #   dyn_kwargs=None
             # )
-            if (hasattr(func_call, 'node') and
-                    hasattr(func_call.node, 'node') and
-                    type(func_call.node.node).__name__ == 'Name' and
-                    hasattr(func_call.node, 'attr')):
+            if (
+                hasattr(func_call, "node")
+                and hasattr(func_call.node, "node")
+                and type(func_call.node.node).__name__ == "Name"
+                and hasattr(func_call.node, "attr")
+            ):
                 package_name = func_call.node.node.name
                 macro_name = func_call.node.attr
-                if package_name == 'adapter':
-                    if macro_name == 'dispatch':
+                if package_name == "adapter":
+                    if macro_name == "dispatch":
                         ad_macro_calls = statically_parse_adapter_dispatch(
-                            func_call, ctx, db_wrapper)
+                            func_call, ctx, db_wrapper
+                        )
                         possible_macro_calls.extend(ad_macro_calls)
                     else:
                         # This skips calls such as adapter.parse_index
                         continue
                 else:
-                    func_name = f'{package_name}.{macro_name}'
+                    func_name = f"{package_name}.{macro_name}"
             else:
                 continue
         if not func_name:
@@ -110,29 +113,33 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
         for kwarg in func_call.kwargs:
             if kwarg.key == 'macro_name':
                 # This will remain to enable static resolution
-                if type(kwarg.value).__name__ == 'Const':
+                if type(kwarg.value).__name__ == "Const":
                     func_name = kwarg.value.value
                     possible_macro_calls.append(func_name)
                 else:
-                    raise_compiler_error(f"The macro_name parameter ({kwarg.value.value}) "
-                                         "to adapter.dispatch was not a string")
-            elif kwarg.key == 'macro_namespace':
+                    raise_compiler_error(
+                        f"The macro_name parameter ({kwarg.value.value}) "
+                        "to adapter.dispatch was not a string"
+                    )
+            elif kwarg.key == "macro_namespace":
                 # This will remain to enable static resolution
                 kwarg_type = type(kwarg.value).__name__
-                if kwarg_type == 'Const':
+                if kwarg_type == "Const":
                     macro_namespace = kwarg.value.value
                 else:
-                    raise_compiler_error("The macro_namespace parameter to adapter.dispatch "
-                                         f"is a {kwarg_type}, not a string")
+                    raise_compiler_error(
+                        "The macro_namespace parameter to adapter.dispatch "
+                        f"is a {kwarg_type}, not a string"
+                    )
 
     # positional arguments
     if packages_arg:
-        if packages_arg_type == 'List':
+        if packages_arg_type == "List":
             # This will remain to enable static resolution
             packages = []
             for item in packages_arg.items:
                 packages.append(item.value)
-        elif packages_arg_type == 'Const':
+        elif packages_arg_type == "Const":
             # This will remain to enable static resolution
             macro_namespace = packages_arg.value
 
@@ -149,6 +156,6 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
         else:
             packages = []
         for package_name in packages:
-            possible_macro_calls.append(f'{package_name}.{func_name}')
+            possible_macro_calls.append(f"{package_name}.{func_name}")
 
     return possible_macro_calls
diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py
index 59b6585cd23..5274d483f4f 100644
--- a/core/dbt/config/project.py
+++ b/core/dbt/config/project.py
@@ -613,7 +613,7 @@ def to_project_config(self, with_packages=False):
                 "quoting": self.quoting,
                 "models": self.models,
                 "on-run-start": self.on_run_start,
-            'dispatch': self.dispatch,
+                "on-run-end": self.on_run_end,
                 "dispatch": self.dispatch,
             'snapshots': self.snapshots,
             'sources': self.sources,
@@ -683,6 +683,6 @@ def get_default_selector_name(self) -> Union[str, None]:
 
     def get_macro_search_order(self, macro_namespace: str):
         for dispatch_entry in self.dispatch:
-            if dispatch_entry['macro_namespace'] == macro_namespace:
-                return dispatch_entry['search_order']
+            if dispatch_entry["macro_namespace"] == macro_namespace:
+                return dispatch_entry["search_order"]
         return None
diff --git a/core/dbt/context/configured.py b/core/dbt/context/configured.py
index d82b12f3384..7d87464d5f1 100644
--- a/core/dbt/context/configured.py
+++ b/core/dbt/context/configured.py
@@ -111,9 +111,7 @@ def __init__(self, config):
 
     @contextproperty
     def var(self) -> ConfiguredVar:
-        return ConfiguredVar(
-            self._ctx, self.config, self.config.project_name
-        )
+        return ConfiguredVar(self._ctx, self.config, self.config.project_name)
 
 
 def generate_schema_yml_context(
diff --git a/core/dbt/contracts/project.py b/core/dbt/contracts/project.py
index 4399b614eec..9c7d0e7180c 100644
--- a/core/dbt/contracts/project.py
+++ b/core/dbt/contracts/project.py
@@ -227,11 +227,14 @@ def validate(cls, data):
                 f"Invalid project name: {data['name']} is a reserved word"
             )
         # validate dispatch config
-        if 'dispatch' in data and data['dispatch']:
-            entries = data['dispatch']
+        if "dispatch" in data and data["dispatch"]:
+            entries = data["dispatch"]
             for entry in entries:
-                if ('macro_namespace' not in entry or 'search_order' not in entry or
-                        not isinstance(entry['search_order'], list)):
+                if (
+                    "macro_namespace" not in entry
+                    or "search_order" not in entry
+                    or not isinstance(entry["search_order"], list)
+                ):
                     raise ValidationError(f"Invalid project dispatch config: {entry}")
 
 
diff --git a/core/dbt/parser/manifest.py b/core/dbt/parser/manifest.py
index cc41f99c3cc..3379605db68 100644
--- a/core/dbt/parser/manifest.py
+++ b/core/dbt/parser/manifest.py
@@ -505,14 +505,13 @@ def macro_depends_on(self):
             self.macro_resolver, {}, None, MacroStack(), []
         )
         adapter = get_adapter(self.root_project)
-        db_wrapper = ParseProvider().DatabaseWrapper(
-            adapter, macro_namespace
-        )
+        db_wrapper = ParseProvider().DatabaseWrapper(adapter, macro_namespace)
         for macro in self.manifest.macros.values():
             if macro.created_at < self.started_at:
                 continue
             possible_macro_calls = statically_extract_macro_calls(
-                macro.macro_sql, macro_ctx, db_wrapper)
+                macro.macro_sql, macro_ctx, db_wrapper
+            )
             for macro_name in possible_macro_calls:
                 # adapter.dispatch calls can generate a call with the same name as the macro
                 # it ought to be an adapter prefix (postgres_) or default_

From 031ee4046da9a4278606a5122ea2647293b1c179 Mon Sep 17 00:00:00 2001
From: Emily Rockman 
Date: Tue, 5 Oct 2021 09:05:23 -0500
Subject: [PATCH 150/933] =?UTF-8?q?enacted=20deprecation=20for=20dispatch-?=
 =?UTF-8?q?packages,=20cleaned=20up=20deprecations=20te=E2=80=A6=20(#3975)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

* enacted deprecation for dispatch-packages, cleaned up deprecations tests for unused macros/models. still need to clean up unused code.

* more work to catch packages use

* fixed tests for removing packages on adapter.dispatch.

* cleaned out folder for 012_deprecation_tests to remove unused models/data/macros

* removed obsolete code due to patching for packages arg in adapter.dispatch

* updated exception name

* added deprecation change to changelog.

automatic commit by git-black, original commits:
  3caec08ccb8607484ce96a3522be002c4cdd6e58
---
 core/dbt/clients/jinja_static.py | 2 +-
 core/dbt/context/providers.py    | 4 ++--
 core/dbt/exceptions.py           | 4 ++--
 3 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py
index 348cfe4d3d6..3d37cbd413b 100644
--- a/core/dbt/clients/jinja_static.py
+++ b/core/dbt/clients/jinja_static.py
@@ -111,7 +111,7 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
     # keyword arguments
     if func_call.kwargs:
         for kwarg in func_call.kwargs:
-            if kwarg.key == 'macro_name':
+            if kwarg.key == "macro_name":
                 # This will remain to enable static resolution
                 if type(kwarg.value).__name__ == "Const":
                     func_name = kwarg.value.value
diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py
index e083ac90365..4748e5d2ece 100644
--- a/core/dbt/context/providers.py
+++ b/core/dbt/context/providers.py
@@ -129,7 +129,7 @@ def dispatch(
         search_packages: List[Optional[str]]
 
         if '.' in macro_name:
-            suggest_macro_namespace, suggest_macro_name = macro_name.split('.', 1)
+            suggest_macro_namespace, suggest_macro_name = macro_name.split(".", 1)
             msg = (
                 f'In adapter.dispatch, got a macro name of "{macro_name}", '
                 f'but "." is not a valid macro name component. Did you mean '
@@ -152,7 +152,7 @@ def dispatch(
         else:
             # Not a string and not None so must be a list
             raise CompilationException(
-                f'In adapter.dispatch, got a list macro_namespace argument '
+                f"In adapter.dispatch, got a list macro_namespace argument "
                 f'("{macro_namespace}"), but macro_namespace should be None or a string.'
             )
 
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 9763f269a40..9158939be1c 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -674,7 +674,7 @@ def macro_not_found(model, target_macro_id):
 
 
 def macro_invalid_dispatch_arg(macro_name) -> NoReturn:
-    msg = '''\
+    msg = """\
     The "packages" argument of adapter.dispatch() has been deprecated.
     Use the "macro_namespace" argument instead.
 
@@ -683,7 +683,7 @@ def macro_invalid_dispatch_arg(macro_name) -> NoReturn:
     For more information, see:
 
     https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch
-    '''
+    """
     raise_compiler_error(msg.format(macro_name))
 
 

From b6146fd2942d98dac83b5d7ae5c4ea3fe20ed3ac Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Fri, 4 Jun 2021 10:11:31 -0400
Subject: [PATCH 151/933] Add deprecation warning for providing `packages` to
 `adapter.dispatch` (#3420)

* add deprecation warning

* update changelog

* update deprecation message

* fix flake8

automatic commit by git-black, original commits:
  4fac086556223737f0713f0bb964fede5d19297d
---
 core/dbt/clients/jinja_static.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py
index 3d37cbd413b..98b33d3a88c 100644
--- a/core/dbt/clients/jinja_static.py
+++ b/core/dbt/clients/jinja_static.py
@@ -144,7 +144,7 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
             macro_namespace = packages_arg.value
 
     if db_wrapper:
-        macro = db_wrapper.dispatch(
+        macro = db_wrapper.dispatch(func_name, macro_namespace=macro_namespace).macro
             func_name,
             macro_namespace=macro_namespace
         ).macro

From 50609590eec064153411cf60dcf5a6c5bc3a3984 Mon Sep 17 00:00:00 2001
From: Kyle Wigley 
Date: Fri, 4 Jun 2021 10:11:31 -0400
Subject: [PATCH 152/933] Add deprecation warning for providing `packages` to
 `adapter.dispatch` (#3420)

* add deprecation warning

* update changelog

* update deprecation message

* fix flake8

automatic commit by git-black, original commits:
  4fac086556223737f0713f0bb964fede5d19297d
  98c015b7754779793e44e056905614296c6e4527
---
 core/dbt/clients/jinja_static.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py
index 98b33d3a88c..ce3658f08db 100644
--- a/core/dbt/clients/jinja_static.py
+++ b/core/dbt/clients/jinja_static.py
@@ -145,10 +145,7 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper):
 
     if db_wrapper:
         macro = db_wrapper.dispatch(func_name, macro_namespace=macro_namespace).macro
-            func_name,
-            macro_namespace=macro_namespace
-        ).macro
-        func_name = f'{macro.package_name}.{macro.name}'
+        func_name = f"{macro.package_name}.{macro.name}"
         possible_macro_calls.append(func_name)
     else:  # this is only for test/unit/test_macro_calls.py
         if macro_namespace:

From 922819afd0051d4b07ec1217ae867e4ff67339f6 Mon Sep 17 00:00:00 2001
From: Nathaniel May 
Date: Fri, 29 Oct 2021 16:35:48 -0400
Subject: [PATCH 153/933] Client call sites (#4163)

update log call sites with new event system

automatic commit by git-black, original commits:
  5b2562a919b066f86db3c0c3a9643ae1be7dcdd1
---
 core/dbt/clients/git.py      | 16 ++++++++++------
 core/dbt/clients/registry.py |  5 +----
 core/dbt/clients/system.py   |  8 ++++++--
 3 files changed, 17 insertions(+), 12 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index cde9ccc77a4..ffaafa1a1a6 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -4,9 +4,13 @@
 from dbt.clients.system import run_cmd, rmdir
 from dbt.events.functions import fire_event
 from dbt.events.types import (
-    GitSparseCheckoutSubdirectory, GitProgressCheckoutRevision,
-    GitProgressUpdatingExistingDependency, GitProgressPullingNewDependency,
-    GitNothingToDo, GitProgressUpdatedCheckoutRange, GitProgressCheckedOutAt
+    GitSparseCheckoutSubdirectory,
+    GitProgressCheckoutRevision,
+    GitProgressUpdatingExistingDependency,
+    GitProgressPullingNewDependency,
+    GitNothingToDo,
+    GitProgressUpdatedCheckoutRange,
+    GitProgressCheckedOutAt,
 )
 from dbt.exceptions import (
     CommandResultError, RuntimeException, bad_package_spec, raise_git_cloning_error,
@@ -161,9 +165,9 @@ def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
         if start_sha == end_sha:
             fire_event(GitNothingToDo(sha=start_sha[:7]))
         else:
-            fire_event(GitProgressUpdatedCheckoutRange(
-                start_sha=start_sha[:7], end_sha=end_sha[:7]
-            ))
+            fire_event(
+                GitProgressUpdatedCheckoutRange(start_sha=start_sha[:7], end_sha=end_sha[:7])
+            )
     else:
         fire_event(GitProgressCheckedOutAt(end_sha=end_sha[:7]))
     return os.path.join(directory, subdirectory or '')
diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py
index cf9003e4f2e..afa6ece7683 100644
--- a/core/dbt/clients/registry.py
+++ b/core/dbt/clients/registry.py
@@ -1,10 +1,7 @@
 import functools
 import requests
 from dbt.events.functions import fire_event
-from dbt.events.types import (
-    RegistryProgressMakingGETRequest,
-    RegistryProgressGETResponse
-)
+from dbt.events.types import RegistryProgressMakingGETRequest, RegistryProgressGETResponse
 from dbt.utils import memoized, _connection_exception_retry as connection_exception_retry
 from dbt import deprecations
 import os
diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py
index 5821d798532..c2e994bf397 100644
--- a/core/dbt/clients/system.py
+++ b/core/dbt/clients/system.py
@@ -17,8 +17,12 @@
 
 from dbt.events.functions import fire_event
 from dbt.events.types import (
-    SystemErrorRetrievingModTime, SystemCouldNotWrite, SystemExecutingCmd, SystemStdOutMsg,
-    SystemStdErrMsg, SystemReportReturnCode
+    SystemErrorRetrievingModTime,
+    SystemCouldNotWrite,
+    SystemExecutingCmd,
+    SystemStdOutMsg,
+    SystemStdErrMsg,
+    SystemReportReturnCode,
 )
 import dbt.exceptions
 from dbt.utils import _connection_exception_retry as connection_exception_retry

From 1390e27dec5852efdcf1b2fbd1bc9c7177aeb1a3 Mon Sep 17 00:00:00 2001
From: Emily Rockman 
Date: Fri, 17 Dec 2021 16:05:57 -0600
Subject: [PATCH 154/933] scrub message of secrets (#4507)

* scrub message of secrets

* update changelog

* use new scrubbing and scrub more places using git

* fixed small miss of string conv and missing raise

* fix bug with cloning error

* resolving message issues

* better, more specific scrubbing

automatic commit by git-black, original commits:
  7c46b784efb0d6cbe033df6a50f29904dcef994d
---
 core/dbt/clients/git.py | 7 +++++--
 core/dbt/exceptions.py  | 4 ++--
 2 files changed, 7 insertions(+), 4 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index ffaafa1a1a6..40e25c9881d 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -13,8 +13,11 @@
     GitProgressCheckedOutAt,
 )
 from dbt.exceptions import (
-    CommandResultError, RuntimeException, bad_package_spec, raise_git_cloning_error,
-    raise_git_cloning_problem
+    CommandResultError,
+    RuntimeException,
+    bad_package_spec,
+    raise_git_cloning_error,
+    raise_git_cloning_problem,
 )
 from packaging import version
 
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 9158939be1c..acab1d41059 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -470,10 +470,10 @@ def raise_git_cloning_error(error: CommandResultError) -> NoReturn:
 
 def raise_git_cloning_problem(repo) -> NoReturn:
     repo = scrub_secrets(repo, env_secrets())
-    msg = '''\
+    msg = """\
     Something went wrong while cloning {}
     Check the debug logs for more information
-    '''
+    """
     raise RuntimeException(msg.format(repo))
 
 

From d1f93d9c94be20428650a23c66aa6930addcc457 Mon Sep 17 00:00:00 2001
From: Robert <49005401+Gitznik@users.noreply.github.com>
Date: Fri, 5 Nov 2021 10:12:07 +0100
Subject: [PATCH 155/933] Add error surfacing for git cloning errors (#4124)

* Add error surfacing for git cloning errors

* Update CHANGELOG.md

* Fix formatting and remove redundant except: raise

* Turn error handling for duplicate packages back on

automatic commit by git-black, original commits:
  c7bc6eb812e22007a26590217e362fead2483f80
---
 core/dbt/clients/git.py | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index 40e25c9881d..3ba4b4b7ef8 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -28,9 +28,9 @@ def _is_commit(revision: str) -> bool:
 
 
 def _raise_git_cloning_error(repo, revision, error):
-    stderr = error.stderr.decode('utf-8').strip()
-    if 'usage: git' in stderr:
-        stderr = stderr.split('\nusage: git')[0]
+    stderr = error.stderr.decode("utf-8").strip()
+    if "usage: git" in stderr:
+        stderr = stderr.split("\nusage: git")[0]
     if re.match("fatal: destination path '(.+)' already exists", stderr):
         raise_git_cloning_error(error)
 
@@ -62,13 +62,13 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
     if dirname is not None:
         clone_cmd.append(dirname)
     try:
-        result = run_cmd(cwd, clone_cmd, env={'LC_ALL': 'C'})
+        result = run_cmd(cwd, clone_cmd, env={"LC_ALL": "C"})
     except CommandResultError as exc:
         _raise_git_cloning_error(repo, revision, exc)
 
     if subdirectory:
-        cwd_subdir = os.path.join(cwd, dirname or '')
-        clone_cmd_subdir = ['git', 'sparse-checkout', 'set', subdirectory]
+        cwd_subdir = os.path.join(cwd, dirname or "")
+        clone_cmd_subdir = ["git", "sparse-checkout", "set", subdirectory]
         try:
             run_cmd(cwd_subdir, clone_cmd_subdir)
         except CommandResultError as exc:

From d88a62486f18ed72623d8419cc9b3821b9137ebf Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Fri, 8 May 2020 08:24:26 -0600
Subject: [PATCH 156/933] Fix dbt init to clone a v2 project

automatic commit by git-black, original commits:
  2cd98c2c60b9ad95c67b888d8d44c36ed60dfba1
---
 core/dbt/clients/git.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index 3ba4b4b7ef8..d8f4882df3e 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -41,7 +41,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
     has_revision = revision is not None
     is_commit = _is_commit(revision or "")
 
-    clone_cmd = ['git', 'clone', '--depth', '1']
+    clone_cmd = ["git", "clone", "--depth", "1"]
     if subdirectory:
         fire_event(GitSparseCheckoutSubdirectory(subdir=subdirectory))
         out, _ = run_cmd(cwd, ['git', '--version'], env={'LC_ALL': 'C'})

From 9085423655375476c1cc87482881fb13d625fb7f Mon Sep 17 00:00:00 2001
From: Daniel Mateus Pires 
Date: Wed, 28 Apr 2021 14:40:05 +0100
Subject: [PATCH 157/933] Issue 275: Support dbt package dependencies in Git
 subdirectories (#3267)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

* 🔨 Extend git package contract and signatures to pass `subdirectory`

* Add sparse checkout logic

* ✅ Add test

* 🧹 Lint

* ✏️ Update CHANGELOG

* 🐛 Make os.path.join safe

* Use a test-container with an updated `git` version

* 🔨 Fix integration tests

* 📖 Update CHANGELOG contributors to include this PR

* 🧪 Parameterize the test

* Use new test-container published by @kwigley (contains more recent version of git)

* Use repositories managed by fishtown

* 🧘‍♂️ Merge the CHANGELOG

* 🤦‍♂️ Remove repetition of my contribution on the CHANGELOG

Co-authored-by: Jeremy Cohen 

automatic commit by git-black, original commits:
  5fb36e3e2a6a5849b77fc90e2801d10d74d670d5
---
 core/dbt/clients/git.py |  9 +++++----
 core/dbt/deps/git.py    | 15 ++++++++++++---
 2 files changed, 17 insertions(+), 7 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index d8f4882df3e..cd3e4db5242 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -44,7 +44,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
     clone_cmd = ["git", "clone", "--depth", "1"]
     if subdirectory:
         fire_event(GitSparseCheckoutSubdirectory(subdir=subdirectory))
-        out, _ = run_cmd(cwd, ['git', '--version'], env={'LC_ALL': 'C'})
+        out, _ = run_cmd(cwd, ["git", "--version"], env={"LC_ALL": "C"})
         git_version = version.parse(re.search(r"\d+\.\d+\.\d+", out.decode("utf-8")).group(0))
         if not git_version >= version.parse("2.25.0"):
             # 2.25.0 introduces --sparse
@@ -52,7 +52,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
                 "Please update your git version to pull a dbt package "
                 "from a subdirectory: your version is {}, >= 2.25.0 needed".format(git_version)
             )
-        clone_cmd.extend(['--filter=blob:none', '--sparse'])
+        clone_cmd.extend(["--filter=blob:none", "--sparse"])
 
     if has_revision and not is_commit:
         clone_cmd.extend(['--branch', revision])
@@ -131,7 +131,8 @@ def remove_remote(cwd):
 
 
 def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
-                       revision=None, subdirectory=None):
+    repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None
+):
     exists = None
     try:
         _, err = clone(
@@ -173,4 +174,4 @@ def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
             )
     else:
         fire_event(GitProgressCheckedOutAt(end_sha=end_sha[:7]))
-    return os.path.join(directory, subdirectory or '')
+    return os.path.join(directory, subdirectory or "")
diff --git a/core/dbt/deps/git.py b/core/dbt/deps/git.py
index cec6206a4a0..56f58511a91 100644
--- a/core/dbt/deps/git.py
+++ b/core/dbt/deps/git.py
@@ -78,7 +78,10 @@ def _checkout(self):
         try:
             dir_ = git.clone_and_checkout(
                 self.git, get_downloads_path(), revision=self.revision,
-                dirname=self._checkout_name, subdirectory=self.subdirectory
+                get_downloads_path(),
+                revision=self.revision,
+                dirname=self._checkout_name,
+                subdirectory=self.subdirectory,
             )
         except ExecutableError as exc:
             if exc.cmd and exc.cmd[0] == 'git':
@@ -132,7 +135,11 @@ def from_contract(
         # we want to map None -> True
         warn_unpinned = contract.warn_unpinned is not False
         return cls(git=contract.git, revisions=revisions,
-                   warn_unpinned=warn_unpinned, subdirectory=contract.subdirectory)
+            git=contract.git,
+            revisions=revisions,
+            warn_unpinned=warn_unpinned,
+            subdirectory=contract.subdirectory,
+        )
 
     def all_names(self) -> List[str]:
         if self.git.endswith('.git'):
@@ -164,5 +171,7 @@ def resolved(self) -> GitPinnedPackage:
 
         return GitPinnedPackage(
             git=self.git, revision=requested.pop(),
-            warn_unpinned=self.warn_unpinned, subdirectory=self.subdirectory
+            revision=requested.pop(),
+            warn_unpinned=self.warn_unpinned,
+            subdirectory=self.subdirectory,
         )

From 87ef00422f8f7256810ead0bd7a130f7d372ec7a Mon Sep 17 00:00:00 2001
From: Daniel Mateus Pires 
Date: Fri, 16 Apr 2021 10:21:49 +0100
Subject: [PATCH 158/933] =?UTF-8?q?=E2=9C=A8=20Support=20git=20commit=20as?=
 =?UTF-8?q?=20revision?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

automatic commit by git-black, original commits:
  5934d263b809f08b6068561a12ab179b724b8cb5
---
 core/dbt/clients/git.py | 10 +++++-----
 core/dbt/deps/git.py    |  4 ++--
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index cd3e4db5242..5054e8c9e57 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -55,7 +55,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
         clone_cmd.extend(["--filter=blob:none", "--sparse"])
 
     if has_revision and not is_commit:
-        clone_cmd.extend(['--branch', revision])
+        clone_cmd.extend(["--branch", revision])
 
     clone_cmd.append(repo)
 
@@ -94,16 +94,16 @@ def _checkout(cwd, repo, revision):
     if _is_commit(revision):
         run_cmd(cwd, fetch_cmd + [revision])
     else:
-        run_cmd(cwd, ['git', 'remote', 'set-branches', 'origin', revision])
+        run_cmd(cwd, ["git", "remote", "set-branches", "origin", revision])
         run_cmd(cwd, fetch_cmd + ["--tags", revision])
 
     if _is_commit(revision):
         spec = revision
     # Prefer tags to branches if one exists
     elif revision in list_tags(cwd):
-        spec = 'tags/{}'.format(revision)
+        spec = "tags/{}".format(revision)
     else:
-        spec = 'origin/{}'.format(revision)
+        spec = "origin/{}".format(revision)
 
     out, err = run_cmd(cwd, ['git', 'reset', '--hard', spec],
                        env={'LC_ALL': 'C'})
@@ -112,7 +112,7 @@ def _checkout(cwd, repo, revision):
 
 def checkout(cwd, repo, revision=None):
     if revision is None:
-        revision = 'HEAD'
+        revision = "HEAD"
     try:
         return _checkout(cwd, repo, revision)
     except CommandResultError as exc:
diff --git a/core/dbt/deps/git.py b/core/dbt/deps/git.py
index 56f58511a91..e2726111c00 100644
--- a/core/dbt/deps/git.py
+++ b/core/dbt/deps/git.py
@@ -58,7 +58,7 @@ def get_subdirectory(self):
 
     def nice_version_name(self):
         if self.revision == 'HEAD':
-            return 'HEAD (default revision)'
+            return "HEAD (default revision)"
         else:
             return 'revision {}'.format(self.revision)
 
@@ -77,7 +77,7 @@ def _checkout(self):
         the path to the checked out directory."""
         try:
             dir_ = git.clone_and_checkout(
-                self.git, get_downloads_path(), revision=self.revision,
+                self.git,
                 get_downloads_path(),
                 revision=self.revision,
                 dirname=self._checkout_name,

From c3876f0702c7af401fd60174be4653cf52c3c644 Mon Sep 17 00:00:00 2001
From: Connor McArthur 
Date: Fri, 14 Jul 2017 15:54:38 -0400
Subject: [PATCH 159/933] init: remove .git directory after clone (#487)

* init: remove .git directory after clone

* fix bad merge w development


automatic commit by git-black, original commits:
  66d8673aeda52e6e56e1b50d221997d346e0bbb6
---
 core/dbt/clients/git.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index 5054e8c9e57..168c766d158 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -75,7 +75,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
             _raise_git_cloning_error(repo, revision, exc)
 
     if remove_git_dir:
-        rmdir(os.path.join(dirname, '.git'))
+        rmdir(os.path.join(dirname, ".git"))
 
     return result
 

From 5be28726fd45500febdaa6038eb94fc357f78ed4 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Thu, 24 Jan 2019 08:09:44 -0700
Subject: [PATCH 160/933] add support for "env" parameter to system.run_cmd,
 make every git command that parses output use it

automatic commit by git-black, original commits:
  96578c3d2f361546e87251f9dd5015cfdded0ecc
---
 core/dbt/clients/git.py | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index 168c766d158..b973ca2eb63 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -81,7 +81,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
 
 
 def list_tags(cwd):
-    out, err = run_cmd(cwd, ['git', 'tag', '--list'], env={'LC_ALL': 'C'})
+    out, err = run_cmd(cwd, ["git", "tag", "--list"], env={"LC_ALL": "C"})
     tags = out.decode('utf-8').strip().split("\n")
     return tags
 
@@ -105,8 +105,7 @@ def _checkout(cwd, repo, revision):
     else:
         spec = "origin/{}".format(revision)
 
-    out, err = run_cmd(cwd, ['git', 'reset', '--hard', spec],
-                       env={'LC_ALL': 'C'})
+    out, err = run_cmd(cwd, ["git", "reset", "--hard", spec], env={"LC_ALL": "C"})
     return out, err
 
 
@@ -121,13 +120,13 @@ def checkout(cwd, repo, revision=None):
 
 
 def get_current_sha(cwd):
-    out, err = run_cmd(cwd, ['git', 'rev-parse', 'HEAD'], env={'LC_ALL': 'C'})
+    out, err = run_cmd(cwd, ["git", "rev-parse", "HEAD"], env={"LC_ALL": "C"})
 
     return out.decode('utf-8')
 
 
 def remove_remote(cwd):
-    return run_cmd(cwd, ['git', 'remote', 'rm', 'origin'], env={'LC_ALL': 'C'})
+    return run_cmd(cwd, ["git", "remote", "rm", "origin"], env={"LC_ALL": "C"})
 
 
 def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,

From b9ed993098be4a021ce111ea8835b7c7512e521c Mon Sep 17 00:00:00 2001
From: Drew Banin 
Date: Mon, 18 Jun 2018 14:43:52 -0400
Subject: [PATCH 161/933] Merge betsy-ross release code into development (#798)

Merge dev/betsy-ross branch into development

automatic commit by git-black, original commits:
  457db9d09e8d8025c8a6e8e648ad0a13cb6c1b34
---
 core/dbt/clients/git.py   | 2 +-
 core/dbt/clients/jinja.py | 2 +-
 core/dbt/task/generate.py | 7 ++-----
 3 files changed, 4 insertions(+), 7 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index b973ca2eb63..db116be5492 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -82,7 +82,7 @@ def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirec
 
 def list_tags(cwd):
     out, err = run_cmd(cwd, ["git", "tag", "--list"], env={"LC_ALL": "C"})
-    tags = out.decode('utf-8').strip().split("\n")
+    tags = out.decode("utf-8").strip().split("\n")
     return tags
 
 
diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index f095592f33d..18bd3c83e16 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -354,7 +354,7 @@ def __call__(self, connection_name: str, node) -> str:
 
 
 class MaterializationExtension(jinja2.ext.Extension):
-    tags = ['materialization']
+    tags = ["materialization"]
 
     def parse(self, parser):
         node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
diff --git a/core/dbt/task/generate.py b/core/dbt/task/generate.py
index 39fb37cf28a..f6fbf849e95 100644
--- a/core/dbt/task/generate.py
+++ b/core/dbt/task/generate.py
@@ -26,7 +26,7 @@
 import dbt.exceptions
 
 
-CATALOG_FILENAME = 'catalog.json'
+CATALOG_FILENAME = "catalog.json"
 
 
 def get_stripped_prefix(source: Dict[str, Any], prefix: str) -> Dict[str, Any]:
@@ -34,10 +34,7 @@ def get_stripped_prefix(source: Dict[str, Any], prefix: str) -> Dict[str, Any]:
     with the given prefix.
     """
     cut = len(prefix)
-    return {
-        k[cut:]: v for k, v in source.items()
-        if k.startswith(prefix)
-    }
+    return {k[cut:]: v for k, v in source.items() if k.startswith(prefix)}
 
 
 def build_catalog_table(data) -> CatalogTable:

From c96e242bfffab43da1816ef3a6de1af8f07964ba Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Mon, 5 Nov 2018 11:12:55 -0700
Subject: [PATCH 162/933] Raise an exception on rc!=0 in run_cmd, raise more
 specific exceptions about what went wrong on error

automatic commit by git-black, original commits:
  59b6f78c7111637a6fadcc8bf703408594ba31ae
---
 core/dbt/clients/git.py    |  4 ++--
 core/dbt/clients/system.py | 17 ++++++++---------
 2 files changed, 10 insertions(+), 11 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index db116be5492..6bb5f12c973 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -115,7 +115,7 @@ def checkout(cwd, repo, revision=None):
     try:
         return _checkout(cwd, repo, revision)
     except CommandResultError as exc:
-        stderr = exc.stderr.decode('utf-8').strip()
+        stderr = exc.stderr.decode("utf-8").strip()
     bad_package_spec(repo, revision, stderr)
 
 
@@ -142,7 +142,7 @@ def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
             subdirectory=subdirectory,
         )
     except CommandResultError as exc:
-        err = exc.stderr.decode('utf-8')
+        err = exc.stderr.decode("utf-8")
         exists = re.match("fatal: destination path '(.+)' already exists", err)
         if not exists:
             raise_git_cloning_problem(repo)
diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py
index c2e994bf397..b8a92285549 100644
--- a/core/dbt/clients/system.py
+++ b/core/dbt/clients/system.py
@@ -320,13 +320,13 @@ def open_dir_cmd() -> str:
 
 def _handle_posix_cwd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
     if exc.errno == errno.ENOENT:
-        message = 'Directory does not exist'
+        message = "Directory does not exist"
     elif exc.errno == errno.EACCES:
-        message = 'Current user cannot access directory, check permissions'
+        message = "Current user cannot access directory, check permissions"
     elif exc.errno == errno.ENOTDIR:
-        message = 'Not a directory'
+        message = "Not a directory"
     else:
-        message = 'Unknown OSError: {} - cwd'.format(str(exc))
+        message = "Unknown OSError: {} - cwd".format(str(exc))
     raise dbt.exceptions.WorkingDirectoryError(cwd, cmd, message)
 
 
@@ -334,9 +334,9 @@ def _handle_posix_cmd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn:
     if exc.errno == errno.ENOENT:
         message = "Could not find command, ensure it is in the user's PATH"
     elif exc.errno == errno.EACCES:
-        message = 'User does not have permissions for this command'
+        message = "User does not have permissions for this command"
     else:
-        message = 'Unknown OSError: {} - cmd'.format(str(exc))
+        message = "Unknown OSError: {} - cmd".format(str(exc))
     raise dbt.exceptions.ExecutableError(cwd, cmd, message)
 
 
@@ -423,7 +423,7 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T
         if exe_pth:
             cmd = [os.path.abspath(exe_pth)] + list(cmd[1:])
         proc = subprocess.Popen(
-            cmd,
+            cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=full_env
             cwd=cwd,
             stdout=subprocess.PIPE,
             stderr=subprocess.PIPE,
@@ -438,8 +438,7 @@ def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> T
 
     if proc.returncode != 0:
         fire_event(SystemReportReturnCode(returncode=proc.returncode))
-        raise dbt.exceptions.CommandResultError(cwd, cmd, proc.returncode,
-                                                out, err)
+        raise dbt.exceptions.CommandResultError(cwd, cmd, proc.returncode, out, err)
 
     return out, err
 

From 9c548c45298ca37dad0e136d70b869ab0acc9eac Mon Sep 17 00:00:00 2001
From: Connor McArthur 
Date: Mon, 20 Mar 2017 13:51:10 -0400
Subject: [PATCH 163/933] make dependencies more compact (#342)

automatic commit by git-black, original commits:
  4f79c28bce4243097d239a215b15bdb578dc701e
---
 core/dbt/clients/git.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index 6bb5f12c973..3c20dd9855b 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -122,7 +122,7 @@ def checkout(cwd, repo, revision=None):
 def get_current_sha(cwd):
     out, err = run_cmd(cwd, ["git", "rev-parse", "HEAD"], env={"LC_ALL": "C"})
 
-    return out.decode('utf-8')
+    return out.decode("utf-8")
 
 
 def remove_remote(cwd):

From 3d79fba5b82d61d852f0531911237dcb938ddb22 Mon Sep 17 00:00:00 2001
From: Connor McArthur 
Date: Tue, 27 Feb 2018 18:19:18 -0500
Subject: [PATCH 164/933] package management (#542)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

* semver resolution

* cleanup

* remove unnecessary comment

* add test for multiples on both sides

* add resolve_to_specific_version

* local registry

* hacking out deps

* Buck pkg mgmt (#645)

* only load hooks and archives once (#540)

* sets schema for node before parsing raw sql (#541)

* Fix/env vars (#543)

* fix for bad env_var exception

* overwrite target with compiled values

* fixes env vars, adds test. Auto-compile profile/target args

* improvements for code that runs in hooks (#544)

* improvements for code that runs in hooks

* fix error message note

* typo

* Update CHANGELOG.md

* bump version (#546)

* add scope to service account json creds initializer (#547)

* bump 0.9.0a3 --> 0.9.0a4 (#548)

* Fix README links (#554)

* Update README.md

* handle empty profiles.yml file (#555)

* return empty string (instead of None) to avoid polluting rendered sql (#566)

* tojson was added in jinja 2.9 (#563)

* tojson was added in jinja 2.9

* requirements

* fix package-defined schema test macros (#562)

* fix package-defined schema test macros

* create a dummy Relation in parsing

* fix for bq quoting (#565)

* bump snowflake, remove pyasn1 (#570)

* bump snowflake, remove pyasn1

* change requirements.txt

* allow macros to return non-text values (#571)

* revert jinja version, implement tojson hack (#572)

* bump to 090a5

* update changelog

* bump (#574)

* 090 docs (#575)

* 090 docs

* Update CHANGELOG.md

* Update CHANGELOG.md

* Raise CompilationException on duplicate model (#568)

* Raise CompilationException on duplicate model

Extend tests

* Ignore disabled models in parse_sql_nodes

Extend tests for duplicate model

* Fix preexisting models

* Use double quotes consistently

Rename model-1 to model-disabled

* Fix unit tests

* Raise exception on duplicate model across packages

Extend tests

* Make run_started_at timezone aware (#553) (#556)

* Make run_started_at timezone aware

Set run_started_at timezone to UTC
Enable timezone change in models
Extend requirements
Extend tests

* Address comments from code review

Create modules namespace to context
Move pytz to modules
Add new dependencies to setup.py

* Add warning for missing constraints. Fixes #592 (#600)

* Add warning for missing constraints. Fixes #592

* fix unit tests

* fix schema tests used in, or defined in packages (#599)

* fix schema tests used in, or defined in packages

* don't hardcode dbt test namespace

* fix/actually run tests

* rm junk

* run hooks in correct order, fixes #590 (#601)

* run hooks in correct order, fixes #590

* add tests

* fix tests

* pep8

* change req for snowflake to fix crypto install issue (#612)

From cffi callback :
Traceback (most recent call last):
  File "c:\projects\dbt\.tox\pywin\lib\site-packages\OpenSSL\SSL.py", line 313, in wrapper
    _lib.X509_up_ref(x509)
AttributeError: module 'lib' has no attribute 'X509_up_ref'
From cffi callback :

* Update python version in Makefile from 3.5 to 3.6 (#613)

* Fix/snowflake custom schema (#626)

* Fixes already opened transaction issue

For https://github.com/fishtown-analytics/dbt/issues/602

* Fixes https://github.com/fishtown-analytics/dbt/issues/621

* Create schema in archival flow (#625)

* Fix for pre-hooks outside of transactions (#623)

* Fix for pre-hooks outside of transactions https://github.com/fishtown-analytics/dbt/issues/576

* improve tests

* Fixes already opened transaction issue (#622)

For https://github.com/fishtown-analytics/dbt/issues/602

* Accept string for postgres port number (#583) (#624)

* Accept string for postgres port number (#583)

* s/str/basestring/g

* print correct run time (include hooks) (#607)

* add support for late binding views (Redshift) (#614)

* add support for late binding views (Redshift)

* fix bind logic

* wip for get_columns_in_table

* fix get_columns_in_table

* fix for default value in bind config

* pep8

* skip tests that depend on nonexistent or disabled models (#617)

* skip tests that depend on nonexistent or disabled models

* pep8, Fixes https://github.com/fishtown-analytics/dbt/issues/616

* refactor

* fix for adapter macro called within packages (#630)

* fix for adapter macro called within packages

* better error message

* Update CHANGELOG.md (#632)

* Update CHANGELOG.md

* Update CHANGELOG.md

* Bump version: 0.9.0 → 0.9.1

* more helpful exception for registry funcs

* Rework deps to support local & git

* pylint and cleanup

* make modules directory first

* Refactor registry client for cleanliness and better error handling

* init converter script

* create modules directory only if non-existent

* Only check the hub registry for registry packages

* Incorporate changes from Drew's branch

Diff of original changes:
https://github.com/fishtown-analytics/dbt/pull/591/files

* lint

* include a portion of the actual name in destination directory

* Install dependencies using actual name; better exceptions

* Error if two dependencies have same name

* Process dependencies one level at a time

Included in this change is a refactor of the deps run function for
clarity.

Also I changed the resolve_version function to update the object in
place. I prefer the immutability of this function as it was, but the
rest of the code doesn't really operate that way. And I ran into some
bugs due to this discrepancy.

* update var name

* Provide support for repositories in project yml

* Download files in a temp directory

The downloads directory causes problems with the run command because
this directory is not a dbt project. Need to download it elsewhere.

* pin some versions

* pep8-ify

* some PR feedback changes around logging

* PR feedback round 2

* Fix for redshift varchar bug (#647)

* Fix for redshift varchar bug

* pep8 on a sql string, smh

* Set global variable overrides on the command line with --vars (#640)

* Set global variable overrides on the command line with --vars

* pep8

* integration tests for cli vars

* Seed rewrite (#618)

* loader for seed data files

* Functioning rework of seed task

* Make CompilerRunner fns private and impl. SeedRunner.compile

Trying to distinguish between the public/private interface for this
class. And the SeedRunner doesn't need the functionality in the compile
function, it just needs a compile function to exist for use in the
compilation process.

* Test changes and fixes

* make the DB setup script usable locally

* convert simple copy test to use seeed

* Fixes to get Snowflake working

* New seed flag and make it non-destructive by default

* Convert update SQL script to another seed

* cleanup

* implement bigquery csv load

* context handling of StringIO

* Better typing

* strip seeder and csvkit dependency

* update bigquery to use new data typing and to fix unicode issue

* update seed test

* fix abstract functions in base adapter

* support time type

* try pinning crypto, pyopenssl versions

* remove unnecessary version pins

* insert all at once, rather than one query per row

* do not quote field names on creation

* bad

* quiet down parsedatetime logger

* pep8

* UI updates + node conformity for seed nodes

* add seed to list of resource types, cleanup

* show option for CSVs

* typo

* pep8

* move agate import to avoid strange warnings

* deprecation warning for --drop-existing

* quote column names in seed files

* revert quoting change (breaks Snowflake). Hush warnings

* use hub url

* Show installed version, silence semver regex warnings

* sort versions to make tests deterministic. Prefer higher versions

* pep8, fix comparison functions for py3

* make compare function return value in {-1, 0, 1}

* fix for deleting git dirs on windows?

* use system client rmdir instead of shutil directly

* debug logging to identify appveyor issue

* less restrictive error retry

* rm debug logging

* s/version/revision for git packages

* more s/version/revision, deprecation cleanup

* remove unused semver codepath

* plus symlinks!!!

* get rid of reference to removed function


automatic commit by git-black, original commits:
  5fbcd122180cf226d00dc686dab690f0e579cc30
---
 core/dbt/clients/git.py      |   4 +-
 core/dbt/clients/registry.py |  10 ++--
 core/dbt/clients/system.py   |   6 +--
 core/dbt/exceptions.py       |  17 +++---
 core/dbt/semver.py           | 102 +++++++++++++++++------------------
 core/dbt/utils.py            |   9 ++--
 6 files changed, 73 insertions(+), 75 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index 3c20dd9855b..d597578b6c7 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -129,7 +129,7 @@ def remove_remote(cwd):
     return run_cmd(cwd, ["git", "remote", "rm", "origin"], env={"LC_ALL": "C"})
 
 
-def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
+def clone_and_checkout(
     repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None
 ):
     exists = None
@@ -153,7 +153,7 @@ def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False,
         directory = exists.group(1)
         fire_event(GitProgressUpdatingExistingDependency(dir=directory))
     else:
-        matches = re.match("Cloning into '(.+)'", err.decode('utf-8'))
+        matches = re.match("Cloning into '(.+)'", err.decode("utf-8"))
         if matches is None:
             raise RuntimeException(
                 f'Error cloning {repo} - never saw "Cloning into ..." from git'
diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py
index afa6ece7683..9347d0d3d20 100644
--- a/core/dbt/clients/registry.py
+++ b/core/dbt/clients/registry.py
@@ -6,17 +6,17 @@
 from dbt import deprecations
 import os
 
-if os.getenv('DBT_PACKAGE_HUB_URL'):
-    DEFAULT_REGISTRY_BASE_URL = os.getenv('DBT_PACKAGE_HUB_URL')
+if os.getenv("DBT_PACKAGE_HUB_URL"):
+    DEFAULT_REGISTRY_BASE_URL = os.getenv("DBT_PACKAGE_HUB_URL")
 else:
-    DEFAULT_REGISTRY_BASE_URL = 'https://hub.getdbt.com/'
+    DEFAULT_REGISTRY_BASE_URL = "https://hub.getdbt.com/"
 
 
 def _get_url(url, registry_base_url=None):
     if registry_base_url is None:
         registry_base_url = DEFAULT_REGISTRY_BASE_URL
 
-    return '{}{}'.format(registry_base_url, url)
+    return "{}{}".format(registry_base_url, url)
 
 
 def _get_with_retries(path, registry_base_url=None):
@@ -85,4 +85,4 @@ def package_version(name, version, registry_base_url=None):
 
 def get_available_versions(name):
     response = package(name)
-    return list(response['versions'])
+    return list(response["versions"])
diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py
index b8a92285549..a974cb32109 100644
--- a/core/dbt/clients/system.py
+++ b/core/dbt/clients/system.py
@@ -134,7 +134,7 @@ def make_symlink(source: str, link_path: str) -> None:
     Create a symlink at `link_path` referring to `source`.
     """
     if not supports_symlinks():
-        dbt.exceptions.system_error('create a symbolic link')
+        dbt.exceptions.system_error("create a symbolic link")
 
     os.symlink(source, link_path)
 
@@ -211,7 +211,7 @@ def rmdir(path: str) -> None:
     cloned via git) can cause rmtree to throw a PermissionError exception
     """
     path = convert_path(path)
-    if sys.platform == 'win32':
+    if sys.platform == "win32":
         onerror = _windows_rmdir_readonly
     else:
         onerror = None
@@ -456,7 +456,7 @@ def download(
     path = convert_path(path)
     connection_timeout = timeout or float(os.getenv('DBT_HTTP_TIMEOUT', 10))
     response = requests.get(url, timeout=connection_timeout)
-    with open(path, 'wb') as handle:
+    with open(path, "wb") as handle:
         for block in response.iter_content(1024 * 64):
             handle.write(block)
 
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index acab1d41059..534fd9ccb17 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -754,17 +754,16 @@ def relation_wrong_type(relation, expected_type, model=None):
 
 
 def package_not_found(package_name):
-    raise_dependency_error(
-        "Package {} was not found in the package index".format(package_name))
+    raise_dependency_error("Package {} was not found in the package index".format(package_name))
 
 
 def package_version_not_found(package_name, version_range, available_versions):
-    base_msg = ('Could not find a matching version for package {}\n'
-                '  Requested range: {}\n'
-                '  Available versions: {}')
-    raise_dependency_error(base_msg.format(package_name,
-                                           version_range,
-                                           available_versions))
+    base_msg = (
+        "Could not find a matching version for package {}\n"
+        "  Requested range: {}\n"
+        "  Available versions: {}"
+    )
+    raise_dependency_error(base_msg.format(package_name, version_range, available_versions))
 
 
 def invalid_materialization_argument(name, argument):
@@ -778,7 +777,7 @@ def system_error(operation_name):
         "dbt encountered an error when attempting to {}. "
         "If this error persists, please create an issue at: \n\n"
         "https://github.com/dbt-labs/dbt-core"
-        .format(operation_name))
+    )
 
 
 class ConnectionException(Exception):
diff --git a/core/dbt/semver.py b/core/dbt/semver.py
index 71be4925af3..6d8c98872f2 100644
--- a/core/dbt/semver.py
+++ b/core/dbt/semver.py
@@ -39,7 +39,9 @@ class VersionSpecification(dbtClassMixin):
 (?P{num_no_leading_zeros})\.
 (?P{num_no_leading_zeros})\.
 (?P{num_no_leading_zeros})
-""".format(num_no_leading_zeros=_NUM_NO_LEADING_ZEROS)
+""".format(
+    num_no_leading_zeros=_NUM_NO_LEADING_ZEROS
+)
 
 _VERSION_EXTRA_REGEX = r"""
 (\-?
@@ -49,8 +51,8 @@ class VersionSpecification(dbtClassMixin):
   (?P
     {alpha}(\.{alpha})*))?
 """.format(
-    alpha_no_leading_zeros=_ALPHA_NO_LEADING_ZEROS,
-    alpha=_ALPHA)
+    alpha_no_leading_zeros=_ALPHA_NO_LEADING_ZEROS, alpha=_ALPHA
+)
 
 
 _VERSION_REGEX_PAT_STR = r"""
@@ -71,25 +73,21 @@ class VersionSpecification(dbtClassMixin):
 @dataclass
 class VersionSpecifier(VersionSpecification):
     def to_version_string(self, skip_matcher=False):
-        prerelease = ''
-        build = ''
-        matcher = ''
+        prerelease = ""
+        build = ""
+        matcher = ""
 
         if self.prerelease:
-            prerelease = '-' + self.prerelease
+            prerelease = "-" + self.prerelease
 
         if self.build:
-            build = '+' + self.build
+            build = "+" + self.build
 
         if not skip_matcher:
             matcher = self.matcher
-        return '{}{}.{}.{}{}{}'.format(
-            matcher,
-            self.major,
-            self.minor,
-            self.patch,
-            prerelease,
-            build)
+        return "{}{}.{}.{}{}{}".format(
+            matcher, self.major, self.minor, self.patch, prerelease, build
+        )
 
     @classmethod
     def from_version_string(cls, version_string):
@@ -97,7 +95,8 @@ def from_version_string(cls, version_string):
 
         if not match:
             raise dbt.exceptions.SemverException(
-                'Could not parse version "{}"'.format(version_string))
+                'Could not parse version "{}"'.format(version_string)
+            )
 
         matched = {k: v for k, v in match.groupdict().items() if v is not None}
 
@@ -114,17 +113,13 @@ def to_range(self):
             range_start = self
             range_end = self
 
-        elif self.matcher in [Matchers.GREATER_THAN,
-                              Matchers.GREATER_THAN_OR_EQUAL]:
+        elif self.matcher in [Matchers.GREATER_THAN, Matchers.GREATER_THAN_OR_EQUAL]:
             range_start = self
 
-        elif self.matcher in [Matchers.LESS_THAN,
-                              Matchers.LESS_THAN_OR_EQUAL]:
+        elif self.matcher in [Matchers.LESS_THAN, Matchers.LESS_THAN_OR_EQUAL]:
             range_end = self
 
-        return VersionRange(
-            start=range_start,
-            end=range_end)
+        return VersionRange(start=range_start, end=range_end)
 
     def compare(self, other):
         if self.is_unbounded or other.is_unbounded:
@@ -155,26 +150,35 @@ def compare(self, other):
                 elif packaging_version.parse(a) < packaging_version.parse(b):
                     return -1
 
-        equal = ((self.matcher == Matchers.GREATER_THAN_OR_EQUAL and
-                  other.matcher == Matchers.LESS_THAN_OR_EQUAL) or
-                 (self.matcher == Matchers.LESS_THAN_OR_EQUAL and
-                  other.matcher == Matchers.GREATER_THAN_OR_EQUAL))
+        equal = (
+            self.matcher == Matchers.GREATER_THAN_OR_EQUAL
+            and other.matcher == Matchers.LESS_THAN_OR_EQUAL
+        ) or (
+            self.matcher == Matchers.LESS_THAN_OR_EQUAL
+            and other.matcher == Matchers.GREATER_THAN_OR_EQUAL
+        )
         if equal:
             return 0
 
-        lt = ((self.matcher == Matchers.LESS_THAN and
-               other.matcher == Matchers.LESS_THAN_OR_EQUAL) or
-              (other.matcher == Matchers.GREATER_THAN and
-               self.matcher == Matchers.GREATER_THAN_OR_EQUAL) or
-              (self.is_upper_bound and other.is_lower_bound))
+        lt = (
+            (self.matcher == Matchers.LESS_THAN and other.matcher == Matchers.LESS_THAN_OR_EQUAL)
+            or (
+                other.matcher == Matchers.GREATER_THAN
+                and self.matcher == Matchers.GREATER_THAN_OR_EQUAL
+            )
+            or (self.is_upper_bound and other.is_lower_bound)
+        )
         if lt:
             return -1
 
-        gt = ((other.matcher == Matchers.LESS_THAN and
-               self.matcher == Matchers.LESS_THAN_OR_EQUAL) or
-              (self.matcher == Matchers.GREATER_THAN and
-               other.matcher == Matchers.GREATER_THAN_OR_EQUAL) or
-              (self.is_lower_bound and other.is_upper_bound))
+        gt = (
+            (other.matcher == Matchers.LESS_THAN and self.matcher == Matchers.LESS_THAN_OR_EQUAL)
+            or (
+                self.matcher == Matchers.GREATER_THAN
+                and other.matcher == Matchers.GREATER_THAN_OR_EQUAL
+            )
+            or (self.is_lower_bound and other.is_upper_bound)
+        )
         if gt:
             return 1
 
@@ -198,13 +202,11 @@ def is_unbounded(self):
 
     @property
     def is_lower_bound(self):
-        return self.matcher in [Matchers.GREATER_THAN,
-                                Matchers.GREATER_THAN_OR_EQUAL]
+        return self.matcher in [Matchers.GREATER_THAN, Matchers.GREATER_THAN_OR_EQUAL]
 
     @property
     def is_upper_bound(self):
-        return self.matcher in [Matchers.LESS_THAN,
-                                Matchers.LESS_THAN_OR_EQUAL]
+        return self.matcher in [Matchers.LESS_THAN, Matchers.LESS_THAN_OR_EQUAL]
 
     @property
     def is_exact(self):
@@ -368,16 +370,14 @@ def reduce_versions(*args):
                 version_specifiers.append(version.end)
 
         else:
-            version_specifiers.append(
-                VersionSpecifier.from_version_string(version))
+            version_specifiers.append(VersionSpecifier.from_version_string(version))
 
     for version_specifier in version_specifiers:
         if not isinstance(version_specifier, VersionSpecifier):
             raise Exception(version_specifier)
 
     if not version_specifiers:
-        return VersionRange(start=UnboundedVersionSpecifier(),
-                            end=UnboundedVersionSpecifier())
+        return VersionRange(start=UnboundedVersionSpecifier(), end=UnboundedVersionSpecifier())
 
     try:
         to_return = version_specifiers.pop().to_range()
@@ -386,8 +386,8 @@ def reduce_versions(*args):
             to_return = to_return.reduce(version_specifier.to_range())
     except VersionsNotCompatibleException:
         raise VersionsNotCompatibleException(
-            'Could not find a satisfactory version from options: {}'
-            .format([str(a) for a in args]))
+            "Could not find a satisfactory version from options: {}".format([str(a) for a in args])
+        )
 
     return to_return
 
@@ -409,9 +409,7 @@ def find_possible_versions(requested_range, available_versions):
     for version_string in available_versions:
         version = VersionSpecifier.from_version_string(version_string)
 
-        if(versions_compatible(version,
-                               requested_range.start,
-                               requested_range.end)):
+        if versions_compatible(version, requested_range.start, requested_range.end):
             possible_versions.append(version)
 
     sorted_versions = sorted(possible_versions, reverse=True)
@@ -425,9 +423,9 @@ def resolve_to_specific_version(requested_range, available_versions):
     for version_string in available_versions:
         version = VersionSpecifier.from_version_string(version_string)
 
-        if(versions_compatible(version,
+        if versions_compatible(version, requested_range.start, requested_range.end) and (
            requested_range.start, requested_range.end) and
-           (max_version is None or max_version.compare(version) < 0)):
+        ):
             max_version = version
             max_version_string = version_string
 
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index e19cb0502ab..32f44d20e1d 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -269,11 +269,12 @@ def flatten_nodes(dep_list):
 
 
 class memoized:
-    '''Decorator. Caches a function's return value each time it is called. If
+    """Decorator. Caches a function's return value each time it is called. If
     called later with the same arguments, the cached value is returned (not
     reevaluated).
 
-    Taken from https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize'''
+    Taken from https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize"""
+
     def __init__(self, func):
         self.func = func
         self.cache = {}
@@ -290,11 +291,11 @@ def __call__(self, *args):
         return value
 
     def __repr__(self):
-        '''Return the function's docstring.'''
+        """Return the function's docstring."""
         return self.func.__doc__
 
     def __get__(self, obj, objtype):
-        '''Support instance methods.'''
+        """Support instance methods."""
         return functools.partial(self.__call__, obj)
 
 

From b7a39922a45335666897fdd5448ebf12f694231a Mon Sep 17 00:00:00 2001
From: Emily Rockman 
Date: Fri, 17 Dec 2021 16:05:57 -0600
Subject: [PATCH 165/933] scrub message of secrets (#4507)

* scrub message of secrets

* update changelog

* use new scrubbing and scrub more places using git

* fixed small miss of string conv and missing raise

* fix bug with cloning error

* resolving message issues

* better, more specific scrubbing

automatic commit by git-black, original commits:
  7c46b784efb0d6cbe033df6a50f29904dcef994d
  9cc7a7a87fcfdc2c558b91c9316efb22bf3fdb27
---
 core/dbt/clients/git.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py
index d597578b6c7..6d3c484f371 100644
--- a/core/dbt/clients/git.py
+++ b/core/dbt/clients/git.py
@@ -155,9 +155,7 @@ def clone_and_checkout(
     else:
         matches = re.match("Cloning into '(.+)'", err.decode("utf-8"))
         if matches is None:
-            raise RuntimeException(
-                f'Error cloning {repo} - never saw "Cloning into ..." from git'
-            )
+            raise RuntimeException(f'Error cloning {repo} - never saw "Cloning into ..." from git')
         directory = matches.group(1)
         fire_event(GitProgressPullingNewDependency(dir=directory))
     full_path = os.path.join(cwd, directory)

From 9078c5518de5705e1521ce49e5957c0e2d5938a0 Mon Sep 17 00:00:00 2001
From: Benoit Perigaud <8754100+b-per@users.noreply.github.com>
Date: Mon, 8 Nov 2021 22:30:38 +1100
Subject: [PATCH 166/933] Fix/dbt deps retry none answer (#4225)

* Fix issue #4178
Allow retries when the answer is None

* Include fix for #4178
Allow retries when the answer from dbt deps is None

* Add link to the PR

* Update exception and shorten line size

* Add test when dbt deps returns None

automatic commit by git-black, original commits:
  f20e83a32b99540f15d1b8ab051a781f03243d2d
---
 core/dbt/clients/registry.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py
index 9347d0d3d20..7fed7f6294a 100644
--- a/core/dbt/clients/registry.py
+++ b/core/dbt/clients/registry.py
@@ -37,7 +37,7 @@ def _get(path, registry_base_url=None):
     # See https://github.com/dbt-labs/dbt-core/issues/4577
     if resp.json() is None:
         raise requests.exceptions.ContentDecodingError(
-            'Request error: The response is None', response=resp
+            "Request error: The response is None", response=resp
         )
     return resp.json()
 

From 59f55353907235a16f5a78a335c4150544081ff5 Mon Sep 17 00:00:00 2001
From: leahwicz <60146280+leahwicz@users.noreply.github.com>
Date: Tue, 24 Aug 2021 13:35:09 -0400
Subject: [PATCH 167/933] Retry GitHub download failures (#3729)

* Retry GitHub download failures

* Refactor and add tests

* Fixed linting and added comment

* Fixing unit test assertRaises

Co-authored-by: Kyle Wigley 

* Fixing casing

Co-authored-by: Kyle Wigley 

* Changing to use partial for function calls

Co-authored-by: Kyle Wigley 

automatic commit by git-black, original commits:
  09ea989d81e5272d9fe9e5b5b30b1a441e3da349
---
 core/dbt/clients/registry.py | 6 +++---
 core/dbt/utils.py            | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py
index 7fed7f6294a..59c2b85305b 100644
--- a/core/dbt/clients/registry.py
+++ b/core/dbt/clients/registry.py
@@ -43,14 +43,14 @@ def _get(path, registry_base_url=None):
 
 
 def index(registry_base_url=None):
-    return _get_with_retries('api/v1/index.json', registry_base_url)
+    return _get_with_retries("api/v1/index.json", registry_base_url)
 
 
 index_cached = memoized(index)
 
 
 def packages(registry_base_url=None):
-    return _get_with_retries('api/v1/packages.json', registry_base_url)
+    return _get_with_retries("api/v1/packages.json", registry_base_url)
 
 
 def package(name, registry_base_url=None):
@@ -80,7 +80,7 @@ def package(name, registry_base_url=None):
 
 
 def package_version(name, version, registry_base_url=None):
-    return _get_with_retries('api/v1/{}/{}.json'.format(name, version), registry_base_url)
+    return _get_with_retries("api/v1/{}/{}.json".format(name, version), registry_base_url)
 
 
 def get_available_versions(name):
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index 32f44d20e1d..b8ac10e5da5 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -621,7 +621,7 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0):
             time.sleep(1)
             _connection_exception_retry(fn, max_attempts, attempt + 1)
         else:
-            raise ConnectionException('External connection exception occurred: ' + str(exc))
+            raise ConnectionException("External connection exception occurred: " + str(exc))
 
 
 # This is used to serialize the args in the run_results and in the logs.

From d52dc72e8e0f20b23525287e01488f0da5b3729b Mon Sep 17 00:00:00 2001
From: Anna Filippova <7892219+annafil@users.noreply.github.com>
Date: Tue, 7 Sep 2021 08:53:02 -0700
Subject: [PATCH 168/933] Feature: Add support for Package name changes on the
 Hub (#3825)

* Add warning about new package name

* Update CHANGELOG.md

* make linter happy

* Add warning about new package name

* Update CHANGELOG.md

* make linter happy

* move warnings to deprecations

* Update core/dbt/clients/registry.py

Co-authored-by: leahwicz <60146280+leahwicz@users.noreply.github.com>

* add comments for posterity

* Update core/dbt/deprecations.py

Co-authored-by: Jeremy Cohen 

* add deprecation test

Co-authored-by: leahwicz <60146280+leahwicz@users.noreply.github.com>
Co-authored-by: Jeremy Cohen 

automatic commit by git-black, original commits:
  6393f5a5d71e51012d7c435c035c0b9e5171877a
---
 core/dbt/clients/registry.py | 18 +++++++++---------
 core/dbt/deprecations.py     |  8 ++++----
 2 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py
index 59c2b85305b..846b4f622c8 100644
--- a/core/dbt/clients/registry.py
+++ b/core/dbt/clients/registry.py
@@ -54,27 +54,27 @@ def packages(registry_base_url=None):
 
 
 def package(name, registry_base_url=None):
-    response = _get_with_retries('api/v1/{}.json'.format(name), registry_base_url)
+    response = _get_with_retries("api/v1/{}.json".format(name), registry_base_url)
 
     # Either redirectnamespace or redirectname in the JSON response indicate a redirect
     # redirectnamespace redirects based on package ownership
     # redirectname redirects based on package name
     # Both can be present at the same time, or neither. Fails gracefully to old name
 
-    if ('redirectnamespace' in response) or ('redirectname' in response):
+    if ("redirectnamespace" in response) or ("redirectname" in response):
 
-        if ('redirectnamespace' in response) and response['redirectnamespace'] is not None:
-            use_namespace = response['redirectnamespace']
+        if ("redirectnamespace" in response) and response["redirectnamespace"] is not None:
+            use_namespace = response["redirectnamespace"]
         else:
-            use_namespace = response['namespace']
+            use_namespace = response["namespace"]
 
-        if ('redirectname' in response) and response['redirectname'] is not None:
-            use_name = response['redirectname']
+        if ("redirectname" in response) and response["redirectname"] is not None:
+            use_name = response["redirectname"]
         else:
-            use_name = response['name']
+            use_name = response["name"]
 
         new_nwo = use_namespace + "/" + use_name
-        deprecations.warn('package-redirect', old_name=name, new_name=new_nwo)
+        deprecations.warn("package-redirect", old_name=name, new_name=new_nwo)
 
     return response
 
diff --git a/core/dbt/deprecations.py b/core/dbt/deprecations.py
index 8a42d4311fe..2bdb2b4957c 100644
--- a/core/dbt/deprecations.py
+++ b/core/dbt/deprecations.py
@@ -44,11 +44,11 @@ def show(self, *args, **kwargs) -> None:
 
 
 class PackageRedirectDeprecation(DBTDeprecation):
-    _name = 'package-redirect'
-    _description = '''\
+    _name = "package-redirect"
+    _description = """\
     The `{old_name}` package is deprecated in favor of `{new_name}`. Please update
     your `packages.yml` configuration to use `{new_name}` instead.
-    '''
+    """
 
 
 class PackageInstallPathDeprecation(DBTDeprecation):
@@ -116,7 +116,7 @@ def warn(name, *args, **kwargs):
     ConfigSourcePathDeprecation(),
     ConfigDataPathDeprecation(),
     PackageInstallPathDeprecation(),
-    PackageRedirectDeprecation()
+    PackageRedirectDeprecation(),
 ]
 
 deprecations: Dict[str, DBTDeprecation] = {

From 7b4ae237645ea1731071e3c4fc162329984fa0fb Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 7 Jul 2020 14:10:02 -0600
Subject: [PATCH 169/933] make native env rendering opt-in

automatic commit by git-black, original commits:
  84bf169458e897fccdaf50382772c2b7ba3fb597
  9cc7a7a87fcfdc2c558b91c9316efb22bf3fdb27
---
 core/dbt/clients/jinja.py | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index 18bd3c83e16..e0f5e004b20 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -7,10 +7,7 @@
 from ast import literal_eval
 from contextlib import contextmanager
 from itertools import chain, islice
-from typing import (
-    List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple,
-    Callable
-)
+from typing import List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple, Callable
 
 import jinja2
 import jinja2.ext

From b8b668a4781bf3f674381e673869ae879b7e32a2 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 17 Mar 2020 07:17:29 -0600
Subject: [PATCH 170/933] Add metadata to tests, add a native env Render test
 arguments in the native env and pass them along to the context added/fixed
 tests Update changelog

automatic commit by git-black, original commits:
  dc65118f176b24e704dc0ad5f3db7586f18d2ba8
---
 core/dbt/clients/jinja.py                | 6 +++---
 core/dbt/parser/generic_test_builders.py | 6 +++---
 core/dbt/parser/schemas.py               | 8 ++------
 3 files changed, 8 insertions(+), 12 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index e0f5e004b20..f4774205505 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -17,7 +17,7 @@
 import jinja2.sandbox
 
 from dbt.utils import (
-    get_dbt_macro_name, get_docs_macro_name, get_materialization_macro_name,
+    get_dbt_macro_name,
     get_test_macro_name, deep_map_render
 )
 
@@ -632,7 +632,7 @@ def add_rendered_test_kwargs(
     renderer, then insert that value into the given context as the special test
     keyword arguments member.
     """
-    looks_like_func = r'^\s*(env_var|ref|var|source|doc)\s*\(.+\)\s*$'
+    looks_like_func = r"^\s*(env_var|ref|var|source|doc)\s*\(.+\)\s*$"
 
     def _convert_function(
         value: Any, keypath: Tuple[Union[str, int], ...]
@@ -645,7 +645,7 @@ def _convert_function(
 
             if re.match(looks_like_func, value) is not None:
                 # curly braces to make rendering happy
-                value = f'{{{{ {value} }}}}'
+                value = f"{{{{ {value} }}}}"
 
             value = get_rendered(
                 value, context, node, capture_macros=capture_macros,
diff --git a/core/dbt/parser/generic_test_builders.py b/core/dbt/parser/generic_test_builders.py
index 182d15e598f..121464d9339 100644
--- a/core/dbt/parser/generic_test_builders.py
+++ b/core/dbt/parser/generic_test_builders.py
@@ -25,7 +25,7 @@ def get_nice_generic_test_name(
     flat_args = []
     for arg_name in sorted(args):
         # the model is already embedded in the name, so skip it
-        if arg_name == 'model':
+        if arg_name == "model":
             continue
         arg_val = args[arg_name]
 
@@ -205,14 +205,14 @@ def __init__(
     ) -> None:
         test_name, test_args = self.extract_test_args(test, column_name)
         self.args: Dict[str, Any] = test_args
-        if 'model' in self.args:
+        if "model" in self.args:
             raise_compiler_error(
                 'Test arguments include "model", which is a reserved argument',
             )
         self.package_name: str = package_name
         self.target: Testable = target
 
-        self.args['model'] = self.build_model_str()
+        self.args["model"] = self.build_model_str()
 
         match = self.TEST_NAME_PATTERN.match(test_name)
         if match is None:
diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py
index 387190df1fe..53e8062094e 100644
--- a/core/dbt/parser/schemas.py
+++ b/core/dbt/parser/schemas.py
@@ -60,9 +60,7 @@
     TestBlock, Testable
 )
 from dbt.ui import warning_tag
-from dbt.utils import (
-    get_pseudo_test_path, coerce_dict_str
-)
+from dbt.utils import get_pseudo_test_path, coerce_dict_str
 
 
 UnparsedSchemaYaml = Union[
@@ -453,9 +451,7 @@ def render_with_context(
         add_rendered_test_kwargs(context, node, capture_macros=True)
 
         # the parsed node is not rendered in the native context.
-        get_rendered(
-            node.raw_sql, context, node, capture_macros=True
-        )
+        get_rendered(node.raw_sql, context, node, capture_macros=True)
 
     def parse_test(
         self,

From b3825f634aef146dd0bcef730286853917092626 Mon Sep 17 00:00:00 2001
From: Gerda Shank 
Date: Mon, 8 Nov 2021 11:28:43 -0500
Subject: [PATCH 171/933] [#3885] Partially parse when environment variables in
 schema files change (#4162)

* [#3885] Partially parse when environment variables in schema files
change

* Add documentation for test kwargs

* Add test and fix for schema configs with env_var

automatic commit by git-black, original commits:
  bda70c988e34a29566ad45e98e210119d3b351d6
---
 core/dbt/clients/jinja.py          |  5 ++++-
 core/dbt/context/configured.py     |  4 ++--
 core/dbt/context/providers.py      |  2 +-
 core/dbt/exceptions.py             |  2 +-
 core/dbt/parser/partial.py         | 30 ++++++++++++++++--------------
 core/dbt/parser/schema_renderer.py | 22 +++++++++++-----------
 core/dbt/parser/schemas.py         | 21 +++++++++++----------
 core/dbt/utils.py                  |  2 +-
 8 files changed, 47 insertions(+), 41 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index f4774205505..5bbc9d6cb57 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -18,7 +18,10 @@
 
 from dbt.utils import (
     get_dbt_macro_name,
-    get_test_macro_name, deep_map_render
+    get_docs_macro_name,
+    get_materialization_macro_name,
+    get_test_macro_name,
+    deep_map_render,
 )
 
 from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag
diff --git a/core/dbt/context/configured.py b/core/dbt/context/configured.py
index 7d87464d5f1..b1e847bb7bf 100644
--- a/core/dbt/context/configured.py
+++ b/core/dbt/context/configured.py
@@ -67,7 +67,7 @@ def __call__(self, var_name, default=Var._VAR_NOTSET):
         return self.get_missing_var(var_name)
 
 
-class SchemaYamlVars():
+class SchemaYamlVars:
     def __init__(self):
         self.env_vars = {}
         self.vars = {}
@@ -115,7 +115,7 @@ def var(self) -> ConfiguredVar:
 
 
 def generate_schema_yml_context(
-        config: AdapterRequiredConfig, project_name: str, schema_yaml_vars: SchemaYamlVars = None
+    config: AdapterRequiredConfig, project_name: str, schema_yaml_vars: SchemaYamlVars = None
 ) -> Dict[str, Any]:
     ctx = SchemaYamlContext(config, project_name, schema_yaml_vars)
     return ctx.to_dict()
diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py
index 4748e5d2ece..24680920f77 100644
--- a/core/dbt/context/providers.py
+++ b/core/dbt/context/providers.py
@@ -1493,7 +1493,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str:
                 # the "model" should only be test nodes, but just in case, check
                 if self.model.resource_type == NodeType.Test and self.model.file_key_name:
                     source_file = self.manifest.files[self.model.file_id]
-                    (yaml_key, name) = self.model.file_key_name.split('.')
+                    (yaml_key, name) = self.model.file_key_name.split(".")
                     source_file.add_env_var(var, yaml_key, name)
             return return_value
         else:
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 534fd9ccb17..245ecbbd5b4 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -246,7 +246,7 @@ class ParsingException(RuntimeException):
 
     @property
     def type(self):
-        return 'Parsing'
+        return "Parsing"
 
 
 class JSONValidationException(ValidationException):
diff --git a/core/dbt/parser/partial.py b/core/dbt/parser/partial.py
index d77f238b4ea..8be81326aaa 100644
--- a/core/dbt/parser/partial.py
+++ b/core/dbt/parser/partial.py
@@ -69,8 +69,10 @@ def __init__(self, saved_manifest: Manifest, new_files: MutableMapping[str, AnyS
         self.project_parser_files = {}
         self.deleted_manifest = Manifest()
         self.macro_child_map: Dict[str, List[str]] = {}
-        (self.env_vars_changed_source_files, self.env_vars_changed_schema_files) = \
-            self.build_env_vars_to_files()
+        (
+            self.env_vars_changed_source_files,
+            self.env_vars_changed_schema_files,
+        ) = self.build_env_vars_to_files()
         self.build_file_diff()
         self.processing_file = None
         self.deleted_special_override_macro = False
@@ -613,7 +615,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict
             # Handle schema file updates due to env_var changes
             if dict_key in env_var_changes and dict_key in new_yaml_dict:
                 for name in env_var_changes[dict_key]:
-                    if name in key_diff['changed_or_deleted_names']:
+                    if name in key_diff["changed_or_deleted_names"]:
                         continue
                     elem = self.get_schema_element(new_yaml_dict[dict_key], name)
                     if elem:
@@ -621,21 +623,21 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict
                         self.merge_patch(schema_file, dict_key, elem)
 
         # sources
-        dict_key = 'sources'
+        dict_key = "sources"
         source_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict)
         if source_diff['changed']:
             for source in source_diff['changed']:
                 if 'overrides' in source:  # This is a source patch; need to re-parse orig source
                     self.remove_source_override_target(source)
                 self.delete_schema_source(schema_file, source)
-                self.remove_tests(schema_file, dict_key, source['name'])
+                self.remove_tests(schema_file, dict_key, source["name"])
                 self.merge_patch(schema_file, dict_key, source)
         if source_diff['deleted']:
             for source in source_diff['deleted']:
                 if 'overrides' in source:  # This is a source patch; need to re-parse orig source
                     self.remove_source_override_target(source)
                 self.delete_schema_source(schema_file, source)
-                self.remove_tests(schema_file, dict_key, source['name'])
+                self.remove_tests(schema_file, dict_key, source["name"])
         if source_diff['added']:
             for source in source_diff['added']:
                 if 'overrides' in source:  # This is a source patch; need to re-parse orig source
@@ -644,18 +646,18 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict
         # Handle schema file updates due to env_var changes
         if dict_key in env_var_changes and dict_key in new_yaml_dict:
             for name in env_var_changes[dict_key]:
-                if name in source_diff['changed_or_deleted_names']:
+                if name in source_diff["changed_or_deleted_names"]:
                     continue
                 source = self.get_schema_element(new_yaml_dict[dict_key], name)
                 if source:
-                    if 'overrides' in source:
+                    if "overrides" in source:
                         self.remove_source_override_target(source)
                     self.delete_schema_source(schema_file, source)
-                    self.remove_tests(schema_file, dict_key, source['name'])
+                    self.remove_tests(schema_file, dict_key, source["name"])
                     self.merge_patch(schema_file, dict_key, source)
 
         # macros
-        dict_key = 'macros'
+        dict_key = "macros"
         macro_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict)
         if macro_diff['changed']:
             for macro in macro_diff['changed']:
@@ -670,7 +672,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict
         # Handle schema file updates due to env_var changes
         if dict_key in env_var_changes and dict_key in new_yaml_dict:
             for name in env_var_changes[dict_key]:
-                if name in macro_diff['changed_or_deleted_names']:
+                if name in macro_diff["changed_or_deleted_names"]:
                     continue
                 elem = self.get_schema_element(new_yaml_dict[dict_key], name)
                 if elem:
@@ -678,7 +680,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict
                     self.merge_patch(schema_file, dict_key, elem)
 
         # exposures
-        dict_key = 'exposures'
+        dict_key = "exposures"
         exposure_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict)
         if exposure_diff['changed']:
             for exposure in exposure_diff['changed']:
@@ -693,7 +695,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict
         # Handle schema file updates due to env_var changes
         if dict_key in env_var_changes and dict_key in new_yaml_dict:
             for name in env_var_changes[dict_key]:
-                if name in exposure_diff['changed_or_deleted_names']:
+                if name in exposure_diff["changed_or_deleted_names"]:
                     continue
                 elem = self.get_schema_element(new_yaml_dict[dict_key], name)
                 if elem:
@@ -779,7 +781,7 @@ def merge_patch(self, schema_file, key, patch):
                     found = True
             if not found:
                 pp_dict[key].append(patch)
-        schema_file.delete_from_env_vars(key, patch['name'])
+        schema_file.delete_from_env_vars(key, patch["name"])
         self.add_to_pp_files(schema_file)
 
     # For model, seed, snapshot, analysis schema dictionary keys,
diff --git a/core/dbt/parser/schema_renderer.py b/core/dbt/parser/schema_renderer.py
index 1838ccf641f..82aab537751 100644
--- a/core/dbt/parser/schema_renderer.py
+++ b/core/dbt/parser/schema_renderer.py
@@ -18,7 +18,7 @@ def __init__(self, context: Dict[str, Any], key: str) -> None:
 
     @property
     def name(self):
-        return 'Rendering yaml'
+        return "Rendering yaml"
 
     def _is_norender_key(self, keypath: Keypath) -> bool:
         """
@@ -34,16 +34,16 @@ def _is_norender_key(self, keypath: Keypath) -> bool:
         Return True if it's tests or description - those aren't rendered now
         because they're rendered later in parse_generic_tests or process_docs.
         """
-        if len(keypath) >= 1 and keypath[0] in ('tests', 'description'):
+        if len(keypath) >= 1 and keypath[0] in ("tests", "description"):
             return True
 
-        if len(keypath) == 2 and keypath[1] in ('tests', 'description'):
+        if len(keypath) == 2 and keypath[1] in ("tests", "description"):
             return True
 
         if (
-            len(keypath) >= 3 and
-            keypath[0] == 'columns' and
-            keypath[2] in ('tests', 'description')
+            len(keypath) >= 3
+            and keypath[0] == "columns"
+            and keypath[2] in ("tests", "description")
         ):
             return True
 
@@ -54,14 +54,14 @@ def should_render_keypath(self, keypath: Keypath) -> bool:
         if len(keypath) < 1:
             return True
 
-        if self.key == 'sources':
-            if keypath[0] == 'description':
+        if self.key == "sources":
+            if keypath[0] == "description":
                 return False
-            if keypath[0] == 'tables':
+            if keypath[0] == "tables":
                 if self._is_norender_key(keypath[2:]):
                     return False
-        elif self.key == 'macros':
-            if keypath[0] == 'arguments':
+        elif self.key == "macros":
+            if keypath[0] == "arguments":
                 if self._is_norender_key(keypath[1:]):
                     return False
             elif self._is_norender_key(keypath[0:]):
diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py
index 53e8062094e..9519b5bec6c 100644
--- a/core/dbt/parser/schemas.py
+++ b/core/dbt/parser/schemas.py
@@ -48,7 +48,7 @@
 from dbt.exceptions import (
     warn_invalid_patch, validator_error_message, JSONValidationException,
     raise_invalid_schema_yml_version, ValidationException,
-    ParsingException, raise_duplicate_patch_name,
+    JSONValidationException,
     raise_duplicate_macro_patch_name, InternalException,
     raise_duplicate_source_patch_name, warn_or_error,
 )
@@ -261,7 +261,7 @@ def get_hashable_md(
             'test_metadata': test_metadata,
             'column_name': column_name,
             'checksum': FileHash.empty().to_dict(omit_none=True),
-            'file_key_name': file_key_name,
+            "file_key_name": file_key_name,
         }
         try:
             ParsedGenericTestNode.validate(dct)
@@ -358,8 +358,8 @@ def store_env_vars(self, target, schema_file_id, env_vars):
             if isinstance(target, UnpatchedSourceDefinition):
                 search_name = target.source.name
                 yaml_key = target.source.yaml_key
-                if '.' in search_name:  # source file definitions
-                    (search_name, _) = search_name.split('.')
+                if "." in search_name:  # source file definitions
+                    (search_name, _) = search_name.split(".")
             else:
                 search_name = target.name
                 yaml_key = target.yaml_key
@@ -597,7 +597,7 @@ def __init__(
         self.render_ctx = generate_schema_yml_context(
             self.schema_parser.root_project,
             self.schema_parser.project.project_name,
-            self.schema_yaml_vars
+            self.schema_yaml_vars,
         )
         self.renderer = SchemaYamlRenderer(self.render_ctx, self.key)
 
@@ -640,7 +640,7 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]:
                 )
                 raise ParsingException(msg)
 
-            if 'name' not in entry:
+            if "name" not in entry:
                 raise ParsingException("Entry did not contain a name")
 
             # Render the data (except for tests and descriptions).
@@ -651,7 +651,7 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]:
                 schema_file = self.yaml.file
                 assert isinstance(schema_file, SchemaSourceFile)
                 for var in self.schema_yaml_vars.env_vars.keys():
-                    schema_file.add_env_var(var, self.key, entry['name'])
+                    schema_file.add_env_var(var, self.key, entry["name"])
                 self.schema_yaml_vars.env_vars = {}
 
             yield entry
@@ -662,8 +662,8 @@ def render_entry(self, dct):
             dct = self.renderer.render_data(dct)
         except ParsingException as exc:
             raise ParsingException(
-                f'Failed to render {self.yaml.file.path.original_file_path} from '
-                f'project {self.project.project_name}: {exc}'
+                f"Failed to render {self.yaml.file.path.original_file_path} from "
+                f"project {self.project.project_name}: {exc}"
             ) from exc
         return dct
 
@@ -823,7 +823,8 @@ def get_unparsed_target(self) -> Iterable[NonSourceTarget]:
     def normalize_meta_attribute(self, data, path):
         if 'meta' in data:
             if 'config' in data and 'meta' in data['config']:
-                raise ParsingException(f"""
+                raise ParsingException(
+                    f"""
                     In {path}: found meta dictionary in 'config' dictionary and as top-level key.
                     Remove the top-level key and define it under 'config' dictionary only.
                 """.strip())
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index b8ac10e5da5..7750e03b090 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -199,7 +199,7 @@ def _deep_map_render(
     return ret
 
 
-def deep_map_render(
+def deep_map_render(func: Callable[[Any, Tuple[Union[str, int], ...]], Any], value: Any) -> Any:
     func: Callable[[Any, Tuple[Union[str, int], ...]], Any],
     value: Any
 ) -> Any:

From 818c4360c424a98e643bbef5a01ca509c97835d2 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Feb 2020 11:24:40 -0700
Subject: [PATCH 172/933] depends_on macros

Clean up macro generation a bit


automatic commit by git-black, original commits:
  20496651c5e02ab94e98549ff3ff6aeee4608186
---
 core/dbt/clients/jinja.py | 14 +++++---------
 1 file changed, 5 insertions(+), 9 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index 5bbc9d6cb57..f6e2d1cc16e 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -28,7 +28,7 @@
 from dbt.contracts.graph.compiled import CompiledGenericTestNode
 from dbt.contracts.graph.parsed import ParsedGenericTestNode
 from dbt.exceptions import (
-    InternalException, raise_compiler_error, CompilationException,
+    InternalException,
     invalid_materialization_argument, MacroReturn, JinjaRenderingException,
     UndefinedMacroException
 )
@@ -276,7 +276,7 @@ def push(self, name):
     def pop(self, name):
         got = self.call_stack.pop()
         if got != name:
-            raise InternalException(f'popped {got}, expected {name}')
+            raise InternalException(f"popped {got}, expected {name}")
 
 
 class MacroGenerator(BaseMacroGenerator):
@@ -285,7 +285,7 @@ def __init__(
         macro,
         context: Optional[Dict[str, Any]] = None,
         node: Optional[Any] = None,
-        stack: Optional[MacroStack] = None
+        stack: Optional[MacroStack] = None,
     ) -> None:
         super().__init__(context)
         self.macro = macro
@@ -377,13 +377,9 @@ def parse(self, parser):
                 adapter_name = value.value
 
             else:
-                invalid_materialization_argument(
-                    materialization_name, target.name
-                )
+                invalid_materialization_argument(materialization_name, target.name)
 
-        node.name = get_materialization_macro_name(
-            materialization_name, adapter_name
-        )
+        node.name = get_materialization_macro_name(materialization_name, adapter_name)
 
         node.body = parser.parse_statements(('name:endmaterialization',),
                                             drop_needle=True)

From 387c77b12a90dba7022e0d96a48b18e0a4f7eae6 Mon Sep 17 00:00:00 2001
From: Josh Devlin 
Date: Wed, 19 May 2021 13:08:30 -0500
Subject: [PATCH 173/933] Add a better error for undefined macros (#3343)

* Add a better error for undefined macros

* Add check/error when installed packages < specified packages

* fix integration tests

* Fix issue with null packages

* Don't call _get_project_directories() twice

Co-authored-by: Jeremy Cohen 

* Fix some integration and unit tests

* Make mypy happy

Co-authored-by: Jeremy Cohen 

* Fix docs and rpc integration tests

* Fix (almost) all the rpc tests

Co-authored-by: Jeremy Cohen 

automatic commit by git-black, original commits:
  17555faaca70b4a9cd9e202097ababa1aca7839e
---
 core/dbt/clients/jinja.py  | 8 ++++++--
 core/dbt/config/runtime.py | 8 ++++----
 core/dbt/exceptions.py     | 9 +++++----
 3 files changed, 15 insertions(+), 10 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index f6e2d1cc16e..beb5056fede 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -29,8 +29,12 @@
 from dbt.contracts.graph.parsed import ParsedGenericTestNode
 from dbt.exceptions import (
     InternalException,
-    invalid_materialization_argument, MacroReturn, JinjaRenderingException,
-    UndefinedMacroException
+    raise_compiler_error,
+    CompilationException,
+    invalid_materialization_argument,
+    MacroReturn,
+    JinjaRenderingException,
+    UndefinedMacroException,
 )
 from dbt import flags
 
diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py
index f87c7346d15..a3bf020d148 100644
--- a/core/dbt/config/runtime.py
+++ b/core/dbt/config/runtime.py
@@ -339,11 +339,11 @@ def load_dependencies(self) -> Mapping[str, 'RuntimeConfig']:
             count_packages_installed = len(tuple(self._get_project_directories()))
             if count_packages_specified > count_packages_installed:
                 raise_compiler_error(
-                    f'dbt found {count_packages_specified} package(s) '
-                    f'specified in packages.yml, but only '
-                    f'{count_packages_installed} package(s) installed '
+                    f"dbt found {count_packages_specified} package(s) "
+                    f"specified in packages.yml, but only "
+                    f"{count_packages_installed} package(s) installed "
                     f'in {self.packages_install_path}. Run "dbt deps" to '
-                    f'install package dependencies.'
+                    f"install package dependencies."
                 )
             project_paths = itertools.chain(
                 internal_packages,
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index 245ecbbd5b4..c675c7c3328 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -297,12 +297,13 @@ class JinjaRenderingException(CompilationException):
 
 
 class UndefinedMacroException(CompilationException):
-
-    def __str__(self, prefix='! ') -> str:
+    def __str__(self, prefix="! ") -> str:
         msg = super().__str__(prefix)
-        return f'{msg}. This can happen when calling a macro that does ' \
-            'not exist. Check for typos and/or install package dependencies ' \
+        return (
+            f"{msg}. This can happen when calling a macro that does "
+            "not exist. Check for typos and/or install package dependencies "
             'with "dbt deps".'
+        )
 
 
 class UnknownAsyncIDException(Exception):

From 3adf62bffeba3119c9163531523dd3c58a1325fd Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 23 Jul 2019 17:07:55 -0600
Subject: [PATCH 174/933] Add environment variables for macro debugging flags

automatic commit by git-black, original commits:
  709ee2a0e84006bdc4a6e9d5c73a152c1dcdf1be
---
 core/dbt/clients/jinja.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index beb5056fede..ce18249bfc3 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -44,11 +44,11 @@ def _linecache_inject(source, write):
         # this is the only reliable way to accomplish this. Obviously, it's
         # really darn noisy and will fill your temporary directory
         tmp_file = tempfile.NamedTemporaryFile(
-            prefix='dbt-macro-compiled-',
-            suffix='.py',
+            prefix="dbt-macro-compiled-",
+            suffix=".py",
             delete=False,
-            mode='w+',
-            encoding='utf-8',
+            mode="w+",
+            encoding="utf-8",
         )
         tmp_file.write(source)
         filename = tmp_file.name

From c2f4f8e3e5c6b9927dea0fddd9eebecc5e58741a Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 17 Dec 2019 11:13:33 -0700
Subject: [PATCH 175/933] Fix mypy checking

Make mypy check our nested namespace packages by putting dbt in the mypy_path.
Fix a number of exposed mypy/type checker complaints. The checker mostly
passes now even if you add `--check-untyped-defs`, though there are a couple lingering issues so I'll leave that out of CI
Change the return type of RunOperation a bit - adds a couple fields to appease mypy

Also, bump the mypy version (it catches a few more issues).


automatic commit by git-black, original commits:
  709ee2a0e84006bdc4a6e9d5c73a152c1dcdf1be
  9cc7a7a87fcfdc2c558b91c9316efb22bf3fdb27
---
 core/dbt/clients/jinja.py | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index ce18249bfc3..ab6f8e2554a 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -59,12 +59,7 @@ def _linecache_inject(source, write):
         filename = rnd.decode("ascii")
 
     # put ourselves in the cache
-    cache_entry = (
-        len(source),
-        None,
-        [line + '\n' for line in source.splitlines()],
-        filename
-    )
+    cache_entry = (len(source), None, [line + "\n" for line in source.splitlines()], filename)
     # linecache does in fact have an attribute `cache`, thanks
     linecache.cache[filename] = cache_entry  # type: ignore
     return filename

From 8988feec4cc66c1be19ff1bd2f0ca00faceef854 Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Wed, 5 Feb 2020 11:24:40 -0700
Subject: [PATCH 176/933] depends_on macros

Clean up macro generation a bit


automatic commit by git-black, original commits:
  20496651c5e02ab94e98549ff3ff6aeee4608186
  82c75a5334f4bf413e12f7f1f0cc3ce0953d3a31
---
 core/dbt/clients/jinja.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index ab6f8e2554a..a2ef758ec83 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -72,8 +72,7 @@ def parse_macro(self):
         # modified to fuzz macros defined in the same file. this way
         # dbt can understand the stack of macros being called.
         #  - @cmcarthur
-        node.name = get_dbt_macro_name(
-            self.parse_assign_target(name_only=True).name)
+        node.name = get_dbt_macro_name(self.parse_assign_target(name_only=True).name)
 
         self.parse_signature(node)
         node.body = self.parse_statements(('name:endmacro',),

From cd0ee5d545917782a515e913d7c768bf298e6195 Mon Sep 17 00:00:00 2001
From: Connor McArthur 
Date: Fri, 21 Jul 2017 09:43:43 -0400
Subject: [PATCH 177/933] define materializations as jinja macros (#466)

automatic commit by git-black, original commits:
  82c75a5334f4bf413e12f7f1f0cc3ce0953d3a31
---
 core/dbt/clients/jinja.py | 19 ++++++++-----------
 core/dbt/exceptions.py    |  8 ++++----
 core/dbt/utils.py         |  7 +++----
 3 files changed, 15 insertions(+), 19 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index a2ef758ec83..4e4a4dc58b2 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -75,8 +75,7 @@ def parse_macro(self):
         node.name = get_dbt_macro_name(self.parse_assign_target(name_only=True).name)
 
         self.parse_signature(node)
-        node.body = self.parse_statements(('name:endmacro',),
-                                          drop_needle=True)
+        node.body = self.parse_statements(("name:endmacro",), drop_needle=True)
         return node
 
 
@@ -356,21 +355,20 @@ class MaterializationExtension(jinja2.ext.Extension):
 
     def parse(self, parser):
         node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
-        materialization_name = \
-            parser.parse_assign_target(name_only=True).name
+        materialization_name = parser.parse_assign_target(name_only=True).name
 
-        adapter_name = 'default'
+        adapter_name = "default"
         node.args = []
         node.defaults = []
 
-        while parser.stream.skip_if('comma'):
+        while parser.stream.skip_if("comma"):
             target = parser.parse_assign_target(name_only=True)
 
-            if target.name == 'default':
+            if target.name == "default":
                 pass
 
-            elif target.name == 'adapter':
-                parser.stream.expect('assign')
+            elif target.name == "adapter":
+                parser.stream.expect("assign")
                 value = parser.parse_expression()
                 adapter_name = value.value
 
@@ -379,8 +377,7 @@ def parse(self, parser):
 
         node.name = get_materialization_macro_name(materialization_name, adapter_name)
 
-        node.body = parser.parse_statements(('name:endmaterialization',),
-                                            drop_needle=True)
+        node.body = parser.parse_statements(("name:endmaterialization",), drop_needle=True)
 
         return node
 
diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py
index c675c7c3328..6b4943a8284 100644
--- a/core/dbt/exceptions.py
+++ b/core/dbt/exceptions.py
@@ -692,7 +692,7 @@ def materialization_not_available(model, adapter_type):
     materialization = model.get_materialization()
 
     raise_compiler_error(
-        "Materialization '{}' is not available for {}!"
+        "Materialization '{}' is not available for {}!".format(materialization, adapter_type),
         .format(materialization, adapter_type),
         model)
 
@@ -702,11 +702,11 @@ def missing_materialization(model, adapter_type):
 
     valid_types = "'default'"
 
-    if adapter_type != 'default':
+    if adapter_type != "default":
         valid_types = "'default' and '{}'".format(adapter_type)
 
     raise_compiler_error(
-        "No materialization '{}' was found for adapter {}! (searched types {})"
+        "No materialization '{}' was found for adapter {}! (searched types {})".format(
         .format(materialization, adapter_type, valid_types),
         model)
 
@@ -723,7 +723,7 @@ def raise_cache_inconsistent(message):
 
 def missing_config(model, name):
     raise_compiler_error(
-        "Model '{}' does not define a required config parameter '{}'."
+        "Model '{}' does not define a required config parameter '{}'.".format(
             model.unique_id, name
         model)
 
diff --git a/core/dbt/utils.py b/core/dbt/utils.py
index 7750e03b090..dcb821eb8a3 100644
--- a/core/dbt/utils.py
+++ b/core/dbt/utils.py
@@ -73,7 +73,7 @@ def get_model_name_or_none(model):
     return name
 
 
-MACRO_PREFIX = 'dbt_macro__'
+MACRO_PREFIX = "dbt_macro__"
 DOCS_PREFIX = 'dbt_docs__'
 
 
@@ -89,10 +89,9 @@ def get_dbt_docs_name(name):
     return f'{DOCS_PREFIX}{name}'
 
 
-def get_materialization_macro_name(materialization_name, adapter_type=None,
-                                   with_prefix=True):
+def get_materialization_macro_name(materialization_name, adapter_type=None, with_prefix=True):
     if adapter_type is None:
-        adapter_type = 'default'
+        adapter_type = "default"
     name = f'materialization_{materialization_name}_{adapter_type}'
     return get_dbt_macro_name(name) if with_prefix else name
 

From 96e23bb1932a09cd99efd8347514b5e9b6cbfc7d Mon Sep 17 00:00:00 2001
From: Jacob Beck 
Date: Tue, 23 Jun 2020 10:14:24 -0600
Subject: [PATCH 178/933] Try to make imports a little more sane, ordering-wise

consolidate dbt.ui, move non-rpc node_runners into their tasks
move parse_cli_vars into config.utils
get rid of logger/exceptions requirements in dbt.utils


automatic commit by git-black, original commits:
  32c559838d692c572a6a33446c5d71b63b88d257
---
 core/dbt/clients/jinja.py   |  4 +--
 core/dbt/config/runtime.py  |  2 +-
 core/dbt/config/utils.py    |  3 +-
 core/dbt/deprecations.py    |  2 +-
 core/dbt/deps/git.py        |  3 +-
 core/dbt/parser/manifest.py | 28 ++++-----------
 core/dbt/parser/sources.py  |  2 +-
 core/dbt/task/base.py       | 29 ++++++++--------
 core/dbt/task/debug.py      |  2 +-
 core/dbt/task/freshness.py  | 17 ++++------
 core/dbt/task/run.py        | 68 ++++++++++++++++---------------------
 core/dbt/task/seed.py       |  2 +-
 core/dbt/ui.py              | 28 +++++++--------
 13 files changed, 82 insertions(+), 108 deletions(-)

diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py
index 4e4a4dc58b2..36e08375786 100644
--- a/core/dbt/clients/jinja.py
+++ b/core/dbt/clients/jinja.py
@@ -91,8 +91,8 @@ def _compile(self, source, filename):
         If the value is 'write', also write the files to disk.
         WARNING: This can write a ton of data if you aren't careful.
         """
-        if filename == '