diff --git a/.ci/RELEASE_NOTES_GUIDE.md b/.ci/RELEASE_NOTES_GUIDE.md index 217726d8a164..372af94a954f 100644 --- a/.ci/RELEASE_NOTES_GUIDE.md +++ b/.ci/RELEASE_NOTES_GUIDE.md @@ -93,12 +93,11 @@ Notes SHOULD: changes, deprecations, or new behavior. - Impersonal third person (no “I”, “you”, etc.) - Start with {{service}} if changing an existing resource (see below) +- List specific added or changed resources or fields -Notes, breaking changes, and features are exceptions. These are more free-form and left to -the discretion of the PR author and reviewer. The overarching goal should be a good user -experience when reading the changelog. - -See examples below for good release notes. +Notes and breaking changes are exceptions, these are more free-form and left to +the discretion of the PR author and/or reviewer. The changelog should be clear, +and easy to understand for end users not familiar with provider internals. ### Examples: diff --git a/.ci/containers/downstream-builder/generate_downstream.sh b/.ci/containers/downstream-builder/generate_downstream.sh index 4838f13c06da..00cd2b74dddd 100755 --- a/.ci/containers/downstream-builder/generate_downstream.sh +++ b/.ci/containers/downstream-builder/generate_downstream.sh @@ -104,6 +104,7 @@ if [ "$REPO" == "terraform-google-conversion" ]; then rm -rf ./tfplan2cai/testdata/templates/ rm -rf ./tfplan2cai/testdata/generatedconvert/ rm -rf ./tfplan2cai/converters/google/provider + rm -rf ./tfplan2cai/converters/google/resources find ./tfplan2cai/test/** -type f -exec git rm {} \; popd diff --git a/.ci/containers/gcb-terraform-vcr-tester/test_terraform_vcr.sh b/.ci/containers/gcb-terraform-vcr-tester/test_terraform_vcr.sh index a006f41dab6e..11ee13c6859d 100755 --- a/.ci/containers/gcb-terraform-vcr-tester/test_terraform_vcr.sh +++ b/.ci/containers/gcb-terraform-vcr-tester/test_terraform_vcr.sh @@ -179,7 +179,7 @@ if [[ -n $FAILED_TESTS_PATTERN ]]; then export VCR_MODE=RECORDING FAILED_TESTS=$(grep "^--- FAIL: TestAcc" replaying_test$test_suffix.log | awk '{print $3}') # test_exit_code=0 - parallel --jobs 16 TF_LOG=DEBUG TF_LOG_PATH_MASK=$local_path/testlog/recording/%s.log TF_ACC=1 TF_SCHEMA_PANIC_ON_ERROR=1 go test $GOOGLE_TEST_DIRECTORY -parallel 1 -v -run="{}$" -timeout 120m -ldflags="-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc" ">" testlog/recording_build/{}_recording_test.log ::: $FAILED_TESTS + parallel --jobs 16 TF_LOG=DEBUG TF_LOG_PATH_MASK=$local_path/testlog/recording/%s.log TF_ACC=1 TF_SCHEMA_PANIC_ON_ERROR=1 go test $GOOGLE_TEST_DIRECTORY -parallel 1 -v -run="{}$" -timeout 240m -ldflags="-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc" ">" testlog/recording_build/{}_recording_test.log ::: $FAILED_TESTS test_exit_code=$? diff --git a/.ci/containers/pre-build-validator/validate.sh b/.ci/containers/pre-build-validator/validate.sh index 33821aa530f1..9f10f7693ddd 100755 --- a/.ci/containers/pre-build-validator/validate.sh +++ b/.ci/containers/pre-build-validator/validate.sh @@ -3,28 +3,23 @@ set -e gh_repo=magic-modules -new_branch="auto-pr-$PR_NUMBER" -if [ $PR_NUMBER == 7874 ]; then - post_body=$(jq -n \ - --arg owner "GoogleCloudPlatform" \ - --arg repo "$gh_repo" \ - --arg branch "$new_branch" \ - --arg sha "$COMMIT_SHA" \ - '{ - ref: "main", - inputs: { - owner: $owner, - repo: $repo, - branch: $branch, - sha: $sha, - } - }') +post_body=$(jq -n \ + --arg owner "GoogleCloudPlatform" \ + --arg repo "$gh_repo" \ + --arg sha "$COMMIT_SHA" \ + '{ + ref: "main", + inputs: { + owner: $owner, + repo: $repo, + sha: $sha, + } + }') - curl \ - -X POST \ - -u "modular-magician:$GITHUB_TOKEN" \ - -H "Accept: application/vnd.github.v3+json" \ - "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/actions/workflows/pre-build-validation.yml/dispatches" \ - -d "$post_body" -fi +curl \ + -X POST \ + -u "modular-magician:$GITHUB_TOKEN" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/actions/workflows/pre-build-validation.yml/dispatches" \ + -d "$post_body" diff --git a/.ci/infra/terraform/main.tf b/.ci/infra/terraform/main.tf index c77d8475a6d6..63c4dc01c84c 100644 --- a/.ci/infra/terraform/main.tf +++ b/.ci/infra/terraform/main.tf @@ -294,6 +294,7 @@ module "project-services" { "testing.googleapis.com", "tpu.googleapis.com", "trafficdirector.googleapis.com", + "vmwareengine.googleapis.com", "vpcaccess.googleapis.com", "websecurityscanner.googleapis.com", "workflowexecutions.googleapis.com", diff --git a/.github/workflows/pre-build-validation.yml b/.github/workflows/pre-build-validation.yml index 1be0494e28ef..c99bcf2a7e3d 100644 --- a/.github/workflows/pre-build-validation.yml +++ b/.github/workflows/pre-build-validation.yml @@ -19,11 +19,8 @@ on: description: 'The Base Repository to pull from' required: false default: 'magic-modules' - branch: - description: 'The branch or sha to execute against' - required: true sha: - description: "The commit SHA in magic-modules repository where the status result will be posted" + description: "The commit SHA in magic-modules repository to execute against and where the status result will be posted" required: true jobs: @@ -34,13 +31,14 @@ jobs: uses: actions/checkout@v2 with: repository: ${{ github.event.inputs.owner }}/${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.branch }} + ref: ${{ github.event.inputs.sha }} path: repo fetch-depth: 0 - name: Check for mmv1 product file changes id: pull_request run: | cd repo + git config user.name "modular-magician" git merge --no-ff origin/main yamlfiles=$(git diff --name-only origin/main -- mmv1/products) if [ -z "$yamlfiles" ]; then diff --git a/.github/workflows/test-tgc.yml b/.github/workflows/test-tgc.yml new file mode 100644 index 000000000000..b4d87709f1c2 --- /dev/null +++ b/.github/workflows/test-tgc.yml @@ -0,0 +1,98 @@ +name: TGC Build and Unit Test + +permissions: + actions: read + contents: read + statuses: write + +env: + status_suffix: "-build-and-unit-tests" + +on: + workflow_dispatch: + inputs: + owner: + description: 'The owner of the fork' + required: false + default: 'modular-magician' + repo: + description: 'The Base Repository to pull from' + required: false + default: 'terraform-google-conversion' + branch: + description: 'The branch or sha of the tgc execute against' + required: true + sha: + description: "The commit SHA in magic-modules repository where the status result will be posted" + required: true + +concurrency: + group: test-tgc-${{ github.event.inputs.owner }}-${{ github.event.inputs.repo }}-${{ github.event.inputs.branch }} + cancel-in-progress: true + +jobs: + build-and-unit-test: + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + with: + repository: ${{ github.event.inputs.owner }}/${{ github.event.inputs.repo }} + ref: ${{ github.event.inputs.branch }} + path: tgc + fetch-depth: 2 + - name: Check for Code Changes + id: pull_request + run: | + cd tgc + gofiles=$(git diff --name-only HEAD~1 | { grep -e "\.go$" -e "go.mod$" -e "go.sum$" || test $? = 1; }) + if [ -z "$gofiles" ]; then + echo "has_changes=false" >> $GITHUB_OUTPUT + else + echo "has_changes=true" >> $GITHUB_OUTPUT + fi + - name: Get Job URL + if: ${{ !cancelled() }} + id: get_job + run: | + response=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/attempts/${{ github.run_attempt }}/jobs") + html_url=$(echo "$response" | jq -r --arg job_name "${{ github.job }}" '.jobs | map(select(.name == $job_name)) | .[0].html_url') + echo "url=${html_url}" >> $GITHUB_OUTPUT + - name: Post Pending Status to Pull Request + if: ${{ !cancelled() }} + run: | + curl -X POST -H "Authorization: token ${{secrets.GITHUB_TOKEN}}" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/${{github.event.inputs.sha}}" \ + -d '{ + "context": "${{ github.event.inputs.repo }}${{ env.status_suffix }}", + "target_url": "${{ steps.get_job.outputs.url }}", + "state": "pending" + }' + - name: Set up Go + if: ${{ !failure() && steps.pull_request.outputs.has_changes == 'true' }} + uses: actions/setup-go@v4 + with: + go-version: '^1.19.9' + - name: Build Terraform Google Conversion + if: ${{ !failure() && steps.pull_request.outputs.has_changes == 'true' }} + run: | + cd tgc + go build + - name: Run Unit Tests + if: ${{ !failure() && steps.pull_request.outputs.has_changes == 'true' }} + run: | + cd tgc + make test + - name: Post Result Status to Pull Request + if: ${{ !cancelled() }} + run: | + curl -X POST -H "Authorization: token ${{secrets.GITHUB_TOKEN}}" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/${{github.event.inputs.sha}}" \ + -d '{ + "context": "${{ github.event.inputs.repo }}${{ env.status_suffix }}", + "target_url": "${{ steps.get_job.outputs.url }}", + "state": "${{ job.status }}" + }' diff --git a/docs/config.toml b/docs/config.toml index 3765d0004d06..8d6712b0b96c 100644 --- a/docs/config.toml +++ b/docs/config.toml @@ -16,3 +16,4 @@ path = 'github.com/alex-shpak/hugo-book' BookCommitPath = 'commit' BookEditPath = 'edit/main/docs' BookComments = false + BookSection = '*' diff --git a/docs/content/_index.md b/docs/content/_index.md index 97e0283e9c48..66b385622db1 100644 --- a/docs/content/_index.md +++ b/docs/content/_index.md @@ -1,8 +1,5 @@ --- -title: "Home" -weight: 0 -type: "docs" -date: 2022-11-14T09:50:49-08:00 +title: "Overview" --- diff --git a/docs/content/docs/best-practices/_index.md b/docs/content/best-practices/_index.md similarity index 99% rename from docs/content/docs/best-practices/_index.md rename to docs/content/best-practices/_index.md index 7baf60071a22..192af1b155a4 100644 --- a/docs/content/docs/best-practices/_index.md +++ b/docs/content/best-practices/_index.md @@ -1,6 +1,8 @@ --- title: "Best practices" weight: 25 +aliases: + - /docs/best-practices --- # Best practices diff --git a/docs/content/docs/getting-started/_index.md b/docs/content/getting-started/_index.md similarity index 72% rename from docs/content/docs/getting-started/_index.md rename to docs/content/getting-started/_index.md index 4a7986f94403..922f70037b69 100644 --- a/docs/content/docs/getting-started/_index.md +++ b/docs/content/getting-started/_index.md @@ -1,5 +1,4 @@ --- title: "Getting Started" weight: 10 -isSection: true --- \ No newline at end of file diff --git a/docs/content/docs/getting-started/contributing.md b/docs/content/getting-started/contributing.md similarity index 97% rename from docs/content/docs/getting-started/contributing.md rename to docs/content/getting-started/contributing.md index 071b08dae875..b568636c1577 100644 --- a/docs/content/docs/getting-started/contributing.md +++ b/docs/content/getting-started/contributing.md @@ -1,6 +1,8 @@ --- title: "Contributing" weight: 50 +aliases: + - /docs/getting-started/contributing --- # General contributing steps @@ -27,6 +29,7 @@ weight: 50 1. [Generate the providers](/magic-modules/docs/getting-started/generate-providers/) that include your change. 1. [Run provider tests locally](/magic-modules/docs/getting-started/run-provider-tests/) that are relevant to the change you made. (Testing the PR locally and pushing the commit to the PR only after the tests pass locally may significantly reduce back-and-forth in review.) 1. Push your changes to your `magic-modules` repo fork and send a pull request from that branch to the main branch on `magic-modules`. A reviewer will be assigned automatically to your PR. +1. Get approval to start Clould Builder jobs from the reviewer if you're an community contributor 1. Wait for the the modules magician to generate downstream diff (which should take about 15 mins after creating the PR) to make sure all changes are generated correctly in downstream repos. 1. Wait for the VCR test results. {{< details "Get to know general workflow for VCR tests" >}} diff --git a/docs/content/docs/getting-started/generate-providers.md b/docs/content/getting-started/generate-providers.md similarity index 97% rename from docs/content/docs/getting-started/generate-providers.md rename to docs/content/getting-started/generate-providers.md index e24d96da7ac5..cd3da174324f 100644 --- a/docs/content/docs/getting-started/generate-providers.md +++ b/docs/content/getting-started/generate-providers.md @@ -1,6 +1,8 @@ --- title: Generate the providers weight: 20 +aliases: + - /docs/getting-started/generate-providers --- # Generate the provider diff --git a/docs/content/docs/getting-started/provider-documentation.md b/docs/content/getting-started/provider-documentation.md similarity index 98% rename from docs/content/docs/getting-started/provider-documentation.md rename to docs/content/getting-started/provider-documentation.md index 4fe66f9b3d25..6940793a19f9 100644 --- a/docs/content/docs/getting-started/provider-documentation.md +++ b/docs/content/getting-started/provider-documentation.md @@ -1,6 +1,8 @@ --- title: "Provider documentation" weight: 60 +aliases: + - /docs/getting-started/provider-documentation --- # Provider documentation diff --git a/docs/content/docs/getting-started/run-provider-tests.md b/docs/content/getting-started/run-provider-tests.md similarity index 98% rename from docs/content/docs/getting-started/run-provider-tests.md rename to docs/content/getting-started/run-provider-tests.md index 272594e2c3da..026dab366cad 100644 --- a/docs/content/docs/getting-started/run-provider-tests.md +++ b/docs/content/getting-started/run-provider-tests.md @@ -1,6 +1,8 @@ --- title: "Run provider tests" weight: 30 +aliases: + - /docs/getting-started/run-provider-tests --- # Run provider tests locally diff --git a/docs/content/docs/getting-started/setup.md b/docs/content/getting-started/setup.md similarity index 99% rename from docs/content/docs/getting-started/setup.md rename to docs/content/getting-started/setup.md index b252ff371f30..15d9e25bde95 100644 --- a/docs/content/docs/getting-started/setup.md +++ b/docs/content/getting-started/setup.md @@ -1,6 +1,8 @@ --- title: Set up your environment weight: 10 +aliases: + - /docs/getting-started/setup --- # Set up your environment diff --git a/docs/content/docs/getting-started/use-built-provider.md b/docs/content/getting-started/use-built-provider.md similarity index 99% rename from docs/content/docs/getting-started/use-built-provider.md rename to docs/content/getting-started/use-built-provider.md index ec15b53ccdba..0b2306abb469 100644 --- a/docs/content/docs/getting-started/use-built-provider.md +++ b/docs/content/getting-started/use-built-provider.md @@ -1,6 +1,8 @@ --- title: Use built provider weight: 40 +aliases: + - /docs/getting-started/use-built-provider --- # Use built provider locally diff --git a/docs/content/docs/how-to/_index.md b/docs/content/how-to/_index.md similarity index 70% rename from docs/content/docs/how-to/_index.md rename to docs/content/how-to/_index.md index bf8166b68bed..835df7d7e00c 100644 --- a/docs/content/docs/how-to/_index.md +++ b/docs/content/how-to/_index.md @@ -1,6 +1,8 @@ --- title: "How To" weight: 20 +aliases: + - /docs/how-to --- # How To diff --git a/docs/content/docs/how-to/add-handwritten-datasource-documentation.md b/docs/content/how-to/add-handwritten-datasource-documentation.md similarity index 99% rename from docs/content/docs/how-to/add-handwritten-datasource-documentation.md rename to docs/content/how-to/add-handwritten-datasource-documentation.md index e248030846ea..8f0bb8357fdc 100644 --- a/docs/content/docs/how-to/add-handwritten-datasource-documentation.md +++ b/docs/content/how-to/add-handwritten-datasource-documentation.md @@ -2,6 +2,8 @@ title: "Add documentation for a handwritten data source" summary: "New handwritten datasources require new handwritten documentation to be created." weight: 25 +aliases: + - /docs/how-to/add-handwritten-datasource-documentation --- # Add documentation for a handwritten data source diff --git a/docs/content/docs/how-to/add-handwritten-datasource.md b/docs/content/how-to/add-handwritten-datasource.md similarity index 97% rename from docs/content/docs/how-to/add-handwritten-datasource.md rename to docs/content/how-to/add-handwritten-datasource.md index eb887556af09..9cb661fab79c 100644 --- a/docs/content/docs/how-to/add-handwritten-datasource.md +++ b/docs/content/how-to/add-handwritten-datasource.md @@ -2,6 +2,8 @@ title: "Add a handwritten datasource" summary: "Datasources are like terraform resources except they don't *create* anything." weight: 22 +aliases: + - /docs/how-to/add-handwritten-datasource --- # Add a handwritten datasource diff --git a/docs/content/docs/how-to/add-handwritten-iam.md b/docs/content/how-to/add-handwritten-iam.md similarity index 98% rename from docs/content/docs/how-to/add-handwritten-iam.md rename to docs/content/how-to/add-handwritten-iam.md index 1218174b0301..007142109ffc 100644 --- a/docs/content/docs/how-to/add-handwritten-iam.md +++ b/docs/content/how-to/add-handwritten-iam.md @@ -3,6 +3,8 @@ title: "Add handwritten IAM resources" summary: "Handwritten IAM support is only recommended for resources that cannot be managed using [MMv1](/magic-modules/docs/how-to/add-mmv1-iam)." weight: 23 +aliases: + - /docs/how-to/add-handwritten-iam --- # Add handwritten IAM resources diff --git a/docs/content/docs/how-to/add-handwritten-test.md b/docs/content/how-to/add-handwritten-test.md similarity index 99% rename from docs/content/docs/how-to/add-handwritten-test.md rename to docs/content/how-to/add-handwritten-test.md index b84eda237a1e..cc5d974765e5 100644 --- a/docs/content/docs/how-to/add-handwritten-test.md +++ b/docs/content/how-to/add-handwritten-test.md @@ -3,6 +3,8 @@ title: "Add a handwritten test" summary: "For handwritten resources and generated resources that need to test update, handwritten tests must be added." weight: 21 +aliases: + - /docs/how-to/add-handwritten-test --- diff --git a/docs/content/docs/how-to/add-mmv1-iam.md b/docs/content/how-to/add-mmv1-iam.md similarity index 99% rename from docs/content/docs/how-to/add-mmv1-iam.md rename to docs/content/how-to/add-mmv1-iam.md index b3cd8a2ff078..e23a2220b8bf 100644 --- a/docs/content/docs/how-to/add-mmv1-iam.md +++ b/docs/content/how-to/add-mmv1-iam.md @@ -3,6 +3,8 @@ title: "Add MMv1 IAM resources" summary: "For resources implemented through the MMv1 engine, the majority of configuration for IAM support can be inferred based on the preexisting YAML specification file." weight: 11 +aliases: + - /docs/how-to/add-mmv1-iam --- # Add MMv1 IAM resources diff --git a/docs/content/docs/how-to/add-mmv1-resource.md b/docs/content/how-to/add-mmv1-resource.md similarity index 99% rename from docs/content/docs/how-to/add-mmv1-resource.md rename to docs/content/how-to/add-mmv1-resource.md index 894537d4a253..40196ba22b3e 100644 --- a/docs/content/docs/how-to/add-mmv1-resource.md +++ b/docs/content/how-to/add-mmv1-resource.md @@ -5,6 +5,8 @@ configured by editing definition files under the [`mmv1/products`](https://github.com/GoogleCloudPlatform/magic-modules/tree/master/mmv1/products) path." weight: 10 +aliases: + - /docs/how-to/add-mmv1-resource --- # Add an MMv1 resource diff --git a/docs/content/docs/how-to/add-mmv1-test.md b/docs/content/how-to/add-mmv1-test.md similarity index 99% rename from docs/content/docs/how-to/add-mmv1-test.md rename to docs/content/how-to/add-mmv1-test.md index 8a5d25f4c16a..527edf3f731c 100644 --- a/docs/content/docs/how-to/add-mmv1-test.md +++ b/docs/content/how-to/add-mmv1-test.md @@ -2,6 +2,8 @@ title: "Add an MMv1 test" summary: "An example terraform configuration can be used to generate docs and tests for a resource." weight: 12 +aliases: + - /docs/how-to/add-mmv1-test --- # Add an MMv1 test diff --git a/docs/content/docs/how-to/mmv1-resource-documentation.md b/docs/content/how-to/mmv1-resource-documentation.md similarity index 99% rename from docs/content/docs/how-to/mmv1-resource-documentation.md rename to docs/content/how-to/mmv1-resource-documentation.md index ba4bb94914a2..a9b236c26a39 100644 --- a/docs/content/docs/how-to/mmv1-resource-documentation.md +++ b/docs/content/how-to/mmv1-resource-documentation.md @@ -2,6 +2,8 @@ title: "Add and update MMv1 resource documentation" summary: "Generated resources have generated documentation. This page describes the generation process and what YAML inputs are used." weight: 13 +aliases: + - /docs/how-to/mmv1-resource-documentation --- # MMv1 resource documentation diff --git a/docs/content/docs/how-to/types-of-resources.md b/docs/content/how-to/types-of-resources.md similarity index 97% rename from docs/content/docs/how-to/types-of-resources.md rename to docs/content/how-to/types-of-resources.md index 2849242d0a4e..271bddf140a1 100644 --- a/docs/content/docs/how-to/types-of-resources.md +++ b/docs/content/how-to/types-of-resources.md @@ -2,6 +2,8 @@ title: "Types of resources" summary: "Check the header in the Go source to determine what type of resource it is. If there is no header, it is likely handwritten." weight: 1 +aliases: + - /docs/how-to/mmv1-resource-documentation --- # Types of resources diff --git a/docs/content/docs/how-to/update-handwritten-documentation.md b/docs/content/how-to/update-handwritten-documentation.md similarity index 98% rename from docs/content/docs/how-to/update-handwritten-documentation.md rename to docs/content/how-to/update-handwritten-documentation.md index 1420825954f2..2568e5a0aef5 100644 --- a/docs/content/docs/how-to/update-handwritten-documentation.md +++ b/docs/content/how-to/update-handwritten-documentation.md @@ -2,6 +2,8 @@ title: "Update handwritten provider documentation" summary: "Handwritten resources and datasources have handwritten documentation that needs to be updated in PRs." weight: 24 +aliases: + - /docs/how-to/update-handwritten-documentation --- # Update handwritten provider documentation (for handwritten resource or datasource) diff --git a/docs/content/docs/how-to/update-handwritten-resource.md b/docs/content/how-to/update-handwritten-resource.md similarity index 99% rename from docs/content/docs/how-to/update-handwritten-resource.md rename to docs/content/how-to/update-handwritten-resource.md index d69e542d83ba..fb4d4743667e 100644 --- a/docs/content/docs/how-to/update-handwritten-resource.md +++ b/docs/content/how-to/update-handwritten-resource.md @@ -2,6 +2,8 @@ title: "Update a handwritten resource" summary: "The Google providers for Terraform have a large number of handwritten go files, primarily for resources written before Magic Modules was used with them. Most handwritten files are expected to stay handwritten indefinitely, although conversion to a generator may be possible for a limited subset of them." weight: 20 +aliases: + - /docs/how-to/update-handwritten-resource --- # Update a handwritten resource diff --git a/docs/content/docs/reference/_index.md b/docs/content/reference/_index.md similarity index 69% rename from docs/content/docs/reference/_index.md rename to docs/content/reference/_index.md index fe5c90d5711d..12f01a6bd041 100644 --- a/docs/content/docs/reference/_index.md +++ b/docs/content/reference/_index.md @@ -1,5 +1,4 @@ --- title: "Reference" weight: 30 -isSection: true --- \ No newline at end of file diff --git a/docs/content/docs/reference/resource-name-yaml-resource.md b/docs/content/reference/resource-name-yaml-resource.md similarity index 100% rename from docs/content/docs/reference/resource-name-yaml-resource.md rename to docs/content/reference/resource-name-yaml-resource.md diff --git a/docs/layouts/partials/docs/inject/menu-before.html b/docs/layouts/partials/docs/inject/menu-before.html new file mode 100644 index 000000000000..6041b619cb44 --- /dev/null +++ b/docs/layouts/partials/docs/inject/menu-before.html @@ -0,0 +1,10 @@ +{{ with .Site.GetPage "/" }} + {{ $current := eq $ .Page }} + +{{ end }} \ No newline at end of file diff --git a/mmv1/compile/core.rb b/mmv1/compile/core.rb index 3f40c4903318..e90cc9c73462 100644 --- a/mmv1/compile/core.rb +++ b/mmv1/compile/core.rb @@ -211,6 +211,11 @@ def compile_string(ctx, source) end end + def hashicorp_copyright_header(lang, pwd) + Thread.current[:autogen] = true + comment_block(compile("#{pwd}/templates/hashicorp_copyright_header.erb").split("\n"), lang) + end + def autogen_notice(lang, pwd) Thread.current[:autogen] = true comment_block(compile("#{pwd}/templates/autogen_notice.erb").split("\n"), lang) diff --git a/mmv1/products/accesscontextmanager/AccessLevelCondition.yaml b/mmv1/products/accesscontextmanager/AccessLevelCondition.yaml index 72d37ccdbe90..b9b95de73366 100644 --- a/mmv1/products/accesscontextmanager/AccessLevelCondition.yaml +++ b/mmv1/products/accesscontextmanager/AccessLevelCondition.yaml @@ -58,7 +58,7 @@ docs: !ruby/object:Provider::Terraform::Docs Your account must have the `serviceusage.services.use` permission on the `billing_project` you defined. async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] autogen_async: true exclude_validator: true diff --git a/mmv1/products/accesscontextmanager/EgressPolicy.yaml b/mmv1/products/accesscontextmanager/EgressPolicy.yaml index b26535edbbde..2c5389d903db 100644 --- a/mmv1/products/accesscontextmanager/EgressPolicy.yaml +++ b/mmv1/products/accesscontextmanager/EgressPolicy.yaml @@ -31,13 +31,7 @@ nested_query: !ruby/object:Api::Resource::NestedQuery references: !ruby/object:Api::Resource::ReferenceLinks api: 'https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters#egresspolicy' description: | - EgressPolicies match requests based on egressFrom and egressTo stanzas. - For an EgressPolicy to match, both egressFrom and egressTo stanzas must be matched. - If an EgressPolicy matches a request, the request is allowed to span the ServicePerimeter - boundary. For example, an EgressPolicy can be used to allow VMs on networks - within the ServicePerimeter to access a defined set of projects outside the - perimeter in certain contexts (e.g. to read data from a Cloud Storage bucket - or query against a BigQuery dataset). + This resource has been deprecated, please refer to ServicePerimeterEgressPolicy. autogen_async: true exclude_validator: true # Skipping the sweeper due to the non-standard base_url and because this is fine-grained under ServicePerimeter/IngressPolicy diff --git a/mmv1/products/accesscontextmanager/IngressPolicy.yaml b/mmv1/products/accesscontextmanager/IngressPolicy.yaml index a67a40ca30b8..901b11b4632f 100644 --- a/mmv1/products/accesscontextmanager/IngressPolicy.yaml +++ b/mmv1/products/accesscontextmanager/IngressPolicy.yaml @@ -31,14 +31,7 @@ nested_query: !ruby/object:Api::Resource::NestedQuery references: !ruby/object:Api::Resource::ReferenceLinks api: 'https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters#ingresspolicy' description: | - IngressPolicies match requests based on ingressFrom and ingressTo stanzas. For an ingress policy to match, - both the ingressFrom and ingressTo stanzas must be matched. If an IngressPolicy matches a request, - the request is allowed through the perimeter boundary from outside the perimeter. - For example, access from the internet can be allowed either based on an AccessLevel or, - for traffic hosted on Google Cloud, the project of the source network. - For access from private networks, using the project of the hosting network is required. - Individual ingress policies can be limited by restricting which services and/ - or actions they match using the ingressTo field. + This resource has been deprecated, please refer to ServicePerimeterIngressPolicy. autogen_async: true exclude_validator: true # Skipping the sweeper due to the non-standard base_url and because this is fine-grained under ServicePerimeter/IngressPolicy diff --git a/mmv1/products/accesscontextmanager/ServicePerimeterEgressPolicy.yaml b/mmv1/products/accesscontextmanager/ServicePerimeterEgressPolicy.yaml new file mode 100644 index 000000000000..337d6ca3932e --- /dev/null +++ b/mmv1/products/accesscontextmanager/ServicePerimeterEgressPolicy.yaml @@ -0,0 +1,136 @@ +# Copyright 2018 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +--- !ruby/object:Api::Resource +name: 'ServicePerimeterEgressPolicy' +create_url: '{{perimeter}}' +base_url: '' +self_link: '{{perimeter}}' +create_verb: :PATCH +delete_verb: :PATCH +update_mask: true +identity: + - egressFrom + - egressTo +nested_query: !ruby/object:Api::Resource::NestedQuery + modify_by_patch: true + is_list_of_ids: false + keys: + - status + - egressPolicies +references: !ruby/object:Api::Resource::ReferenceLinks + api: 'https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters#egresspolicy' +description: | + EgressPolicies match requests based on egressFrom and egressTo stanzas. + For an EgressPolicy to match, both egressFrom and egressTo stanzas must be matched. + If an EgressPolicy matches a request, the request is allowed to span the ServicePerimeter + boundary. For example, an EgressPolicy can be used to allow VMs on networks + within the ServicePerimeter to access a defined set of projects outside the + perimeter in certain contexts (e.g. to read data from a Cloud Storage bucket + or query against a BigQuery dataset). +autogen_async: true +exclude_validator: true +# Skipping the sweeper due to the non-standard base_url and because this is fine-grained under ServicePerimeter +skip_sweeper: true +id_format: '{{perimeter}}' +import_format: ['{{perimeter}}'] +mutex: '{{perimeter}}' +custom_code: !ruby/object:Provider::Terraform::CustomCode + custom_import: templates/terraform/custom_import/access_context_manager_service_perimeter_ingress_policy.go.erb +parameters: + - !ruby/object:Api::Type::ResourceRef + name: 'perimeter' + resource: 'ServicePerimeter' + imports: 'name' + description: | + The name of the Service Perimeter to add this resource to. + required: true + immutable: true + url_param_only: true +properties: + - !ruby/object:Api::Type::NestedObject + name: 'egressFrom' + description: | + Defines conditions on the source of a request causing this `EgressPolicy` to apply. + properties: + - !ruby/object:Api::Type::Enum + name: 'identityType' + description: | + Specifies the type of identities that are allowed access to outside the + perimeter. If left unspecified, then members of `identities` field will + be allowed access. + values: + - :ANY_IDENTITY + - :ANY_USER_ACCOUNT + - :ANY_SERVICE_ACCOUNT + - !ruby/object:Api::Type::Array + name: 'identities' + description: | + A list of identities that are allowed access through this `EgressPolicy`. + Should be in the format of email address. The email address should + represent individual user or service account only. + item_type: Api::Type::String + - !ruby/object:Api::Type::NestedObject + name: 'egressTo' + description: | + Defines the conditions on the `ApiOperation` and destination resources that + cause this `EgressPolicy` to apply. + properties: + - !ruby/object:Api::Type::Array + name: 'resources' + item_type: Api::Type::String + description: | + A list of resources, currently only projects in the form + `projects/`, that match this to stanza. A request matches + if it contains a resource in this list. If * is specified for resources, + then this `EgressTo` rule will authorize access to all resources outside + the perimeter. + - !ruby/object:Api::Type::Array + name: 'externalResources' + item_type: Api::Type::String + description: | + A list of external resources that are allowed to be accessed. A request + matches if it contains an external resource in this list (Example: + s3://bucket/path). Currently '*' is not allowed. + - !ruby/object:Api::Type::Array + name: 'operations' + description: | + A list of `ApiOperations` that this egress rule applies to. A request matches + if it contains an operation/service in this list. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'serviceName' + description: | + The name of the API whose methods or permissions the `IngressPolicy` or + `EgressPolicy` want to allow. A single `ApiOperation` with serviceName + field set to `*` will allow all methods AND permissions for all services. + - !ruby/object:Api::Type::Array + name: 'methodSelectors' + description: | + API methods or permissions to allow. Method or permission must belong + to the service specified by `serviceName` field. A single MethodSelector + entry with `*` specified for the `method` field will allow all methods + AND permissions for the service specified in `serviceName`. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'method' + description: | + Value for `method` should be a valid method name for the corresponding + `serviceName` in `ApiOperation`. If `*` used as value for method, + then ALL methods and permissions are allowed. + - !ruby/object:Api::Type::String + name: 'permission' + description: | + Value for permission should be a valid Cloud IAM permission for the + corresponding `serviceName` in `ApiOperation`. diff --git a/mmv1/products/accesscontextmanager/ServicePerimeterIngressPolicy.yaml b/mmv1/products/accesscontextmanager/ServicePerimeterIngressPolicy.yaml new file mode 100644 index 000000000000..f292102fbdf4 --- /dev/null +++ b/mmv1/products/accesscontextmanager/ServicePerimeterIngressPolicy.yaml @@ -0,0 +1,160 @@ +# Copyright 2018 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +--- !ruby/object:Api::Resource +name: 'ServicePerimeterIngressPolicy' +create_url: '{{perimeter}}' +base_url: '' +self_link: '{{perimeter}}' +create_verb: :PATCH +delete_verb: :PATCH +update_mask: true +identity: + - ingressFrom + - ingressTo +nested_query: !ruby/object:Api::Resource::NestedQuery + modify_by_patch: true + is_list_of_ids: false + keys: + - status + - ingressPolicies +references: !ruby/object:Api::Resource::ReferenceLinks + api: 'https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters#ingresspolicy' +description: | + IngressPolicies match requests based on ingressFrom and ingressTo stanzas. For an ingress policy to match, + both the ingressFrom and ingressTo stanzas must be matched. If an IngressPolicy matches a request, + the request is allowed through the perimeter boundary from outside the perimeter. + For example, access from the internet can be allowed either based on an AccessLevel or, + for traffic hosted on Google Cloud, the project of the source network. + For access from private networks, using the project of the hosting network is required. + Individual ingress policies can be limited by restricting which services and/ + or actions they match using the ingressTo field. +autogen_async: true +exclude_validator: true +# Skipping the sweeper due to the non-standard base_url and because this is fine-grained under ServicePerimeter +skip_sweeper: true +id_format: '{{perimeter}}' +import_format: ['{{perimeter}}'] +mutex: '{{perimeter}}' +custom_code: !ruby/object:Provider::Terraform::CustomCode + custom_import: templates/terraform/custom_import/access_context_manager_service_perimeter_ingress_policy.go.erb +parameters: + - !ruby/object:Api::Type::ResourceRef + name: 'perimeter' + resource: 'ServicePerimeter' + imports: 'name' + description: | + The name of the Service Perimeter to add this resource to. + required: true + immutable: true + url_param_only: true +properties: + - !ruby/object:Api::Type::NestedObject + name: 'ingressFrom' + description: | + Defines the conditions on the source of a request causing this `IngressPolicy` + to apply. + properties: + - !ruby/object:Api::Type::Enum + name: 'identityType' + description: | + Specifies the type of identities that are allowed access from outside the + perimeter. If left unspecified, then members of `identities` field will be + allowed access. + values: + - :ANY_IDENTITY + - :ANY_USER_ACCOUNT + - :ANY_SERVICE_ACCOUNT + - !ruby/object:Api::Type::Array + name: 'identities' + item_type: Api::Type::String + description: | + A list of identities that are allowed access through this ingress policy. + Should be in the format of email address. The email address should represent + individual user or service account only. + - !ruby/object:Api::Type::Array + name: 'sources' + description: | + Sources that this `IngressPolicy` authorizes access from. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'accessLevel' + description: | + An `AccessLevel` resource name that allow resources within the + `ServicePerimeters` to be accessed from the internet. `AccessLevels` listed + must be in the same policy as this `ServicePerimeter`. Referencing a nonexistent + `AccessLevel` will cause an error. If no `AccessLevel` names are listed, + resources within the perimeter can only be accessed via Google Cloud calls + with request origins within the perimeter. + Example `accessPolicies/MY_POLICY/accessLevels/MY_LEVEL.` + If * is specified, then all IngressSources will be allowed. + - !ruby/object:Api::Type::String + name: 'resource' + description: | + A Google Cloud resource that is allowed to ingress the perimeter. + Requests from these resources will be allowed to access perimeter data. + Currently only projects are allowed. Format `projects/{project_number}` + The project may be in any Google Cloud organization, not just the + organization that the perimeter is defined in. `*` is not allowed, the case + of allowing all Google Cloud resources only is not supported. + - !ruby/object:Api::Type::NestedObject + name: 'ingressTo' + description: | + Defines the conditions on the `ApiOperation` and request destination that cause + this `IngressPolicy` to apply. + properties: + - !ruby/object:Api::Type::Array + name: 'resources' + item_type: Api::Type::String + description: | + A list of resources, currently only projects in the form + `projects/`, protected by this `ServicePerimeter` + that are allowed to be accessed by sources defined in the + corresponding `IngressFrom`. A request matches if it contains + a resource in this list. If `*` is specified for resources, + then this `IngressTo` rule will authorize access to all + resources inside the perimeter, provided that the request + also matches the `operations` field. + - !ruby/object:Api::Type::Array + name: 'operations' + description: | + A list of `ApiOperations` the sources specified in corresponding `IngressFrom` + are allowed to perform in this `ServicePerimeter`. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'serviceName' + description: | + The name of the API whose methods or permissions the `IngressPolicy` or + `EgressPolicy` want to allow. A single `ApiOperation` with `serviceName` + field set to `*` will allow all methods AND permissions for all services. + - !ruby/object:Api::Type::Array + name: 'methodSelectors' + description: | + API methods or permissions to allow. Method or permission must belong to + the service specified by serviceName field. A single `MethodSelector` entry + with `*` specified for the method field will allow all methods AND + permissions for the service specified in `serviceName`. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'method' + description: | + Value for method should be a valid method name for the corresponding + serviceName in `ApiOperation`. If `*` used as value for `method`, then + ALL methods and permissions are allowed. + - !ruby/object:Api::Type::String + name: 'permission' + description: | + Value for permission should be a valid Cloud IAM permission for the + corresponding `serviceName` in `ApiOperation`. diff --git a/mmv1/products/appengine/FirewallRule.yaml b/mmv1/products/appengine/FirewallRule.yaml index 24bd7627e3a8..ab7851a668fe 100644 --- a/mmv1/products/appengine/FirewallRule.yaml +++ b/mmv1/products/appengine/FirewallRule.yaml @@ -28,7 +28,7 @@ collection_url_key: 'ingressRules' import_format: ['apps/{{project}}/firewall/ingressRules/{{priority}}'] mutex: 'apps/{{project}}' async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] skip_sweeper: true examples: diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index ae2907dfbfc5..7f02acfe3512 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -119,7 +119,7 @@ properties: [alpha user group](https://cloud.google.com/artifact-registry/docs/supported-formats#alpha-access). required: true immutable: true - diff_suppress_func: 'compareCaseInsensitive' + diff_suppress_func: 'tpgresource.CompareCaseInsensitive' - !ruby/object:Api::Type::String name: description description: |- diff --git a/mmv1/products/bigquery/Job.yaml b/mmv1/products/bigquery/Job.yaml index a4b8687dcaf1..cf12ae9fdf3d 100644 --- a/mmv1/products/bigquery/Job.yaml +++ b/mmv1/products/bigquery/Job.yaml @@ -34,7 +34,7 @@ import_format: skip_delete: true id_format: projects/{{project}}/jobs/{{job_id}} async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] examples: - !ruby/object:Provider::Terraform::Examples diff --git a/mmv1/products/bigqueryanalyticshub/Listing.yaml b/mmv1/products/bigqueryanalyticshub/Listing.yaml index 359facf908f1..a4be04d1c723 100644 --- a/mmv1/products/bigqueryanalyticshub/Listing.yaml +++ b/mmv1/products/bigqueryanalyticshub/Listing.yaml @@ -37,6 +37,8 @@ import_format: [ 'projects/{{project}}/locations/{{location}}/dataExchanges/{{data_exchange_id}}/listings/{{listing_id}}', ] +# Skipping the sweeper due to the non-standard base_url +skip_sweeper: true examples: - !ruby/object:Provider::Terraform::Examples name: 'bigquery_analyticshub_listing_basic' diff --git a/mmv1/products/binaryauthorization/Policy.yaml b/mmv1/products/binaryauthorization/Policy.yaml index b9a2073cb685..1c565b622f09 100644 --- a/mmv1/products/binaryauthorization/Policy.yaml +++ b/mmv1/products/binaryauthorization/Policy.yaml @@ -141,7 +141,7 @@ properties: } at := copy["require_attestations_by"].(*schema.Set) if at != nil { - t := convertAndMapStringArr(at.List(), tpgresource.GetResourceNameFromSelfLink) + t := tpgresource.ConvertAndMapStringArr(at.List(), tpgresource.GetResourceNameFromSelfLink) copy["require_attestations_by"] = schema.NewSet(tpgresource.SelfLinkNameHash, tpgresource.ConvertStringArrToInterface(t)) } var buf bytes.Buffer diff --git a/mmv1/products/certificatemanager/CertificateIssuanceConfig.yaml b/mmv1/products/certificatemanager/CertificateIssuanceConfig.yaml new file mode 100644 index 000000000000..9d32abc0849f --- /dev/null +++ b/mmv1/products/certificatemanager/CertificateIssuanceConfig.yaml @@ -0,0 +1,132 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Resource +name: 'CertificateIssuanceConfig' +base_url: 'projects/{{project}}/locations/{{location}}/certificateIssuanceConfigs' +create_url: 'projects/{{project}}/locations/{{location}}/certificateIssuanceConfigs?certificateIssuanceConfigId={{name}}' +references: !ruby/object:Api::Resource::ReferenceLinks + guides: + 'Manage certificate issuance configs': 'https://cloud.google.com/certificate-manager/docs/issuance-configs' + api: 'https://cloud.google.com/certificate-manager/docs/reference/certificate-manager/rest/v1/projects.locations.certificateIssuanceConfigs' +immutable: true +description: | + Certificate represents a HTTP-reachable backend for a Certificate. +async: !ruby/object:Api::OpAsync + operation: !ruby/object:Api::OpAsync::Operation + path: 'name' + base_url: '{{op_id}}' + wait_ms: 1000 + result: !ruby/object:Api::OpAsync::Result + path: 'response' + status: !ruby/object:Api::OpAsync::Status + path: 'done' + complete: true + allowed: + - true + - false + error: !ruby/object:Api::OpAsync::Error + path: 'error' + message: 'message' +docs: !ruby/object:Provider::Terraform::Docs +autogen_async: true +import_format: + [ + 'projects/{{project}}/locations/{{location}}/certificateIssuanceConfigs/{{name}}', + ] +examples: + - !ruby/object:Provider::Terraform::Examples + name: 'certificate_manager_certificate_issuance_config' + primary_resource_id: 'default' + vars: + ca_name: 'my-ca' + pool_name: 'my-ca-pool' + +parameters: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + A user-defined name of the certificate issuance config. + CertificateIssuanceConfig names must be unique globally. + url_param_only: true + - !ruby/object:Api::Type::String + name: 'location' + description: | + The Certificate Manager location. If not specified, "global" is used. + default_value: global + url_param_only: true +properties: + - !ruby/object:Api::Type::String + name: 'description' + description: | + One or more paragraphs of text description of a CertificateIssuanceConfig. + - !ruby/object:Api::Type::Integer + name: 'rotationWindowPercentage' + description: | + It specifies the percentage of elapsed time of the certificate lifetime to wait before renewing the certificate. + Must be a number between 1-99, inclusive. + You must set the rotation window percentage in relation to the certificate lifetime so that certificate renewal occurs at least 7 days after + the certificate has been issued and at least 7 days before it expires. + required: true + - !ruby/object:Api::Type::Enum + name: keyAlgorithm + description: | + Key algorithm to use when generating the private key. + values: + - :RSA_2048 + - :ECDSA_P256 + required: true + - !ruby/object:Api::Type::String + name: 'lifetime' + description: | + Lifetime of issued certificates. A duration in seconds with up to nine fractional digits, ending with 's'. + Example: "1814400s". Valid values are from 21 days (1814400s) to 30 days (2592000s) + required: true + - !ruby/object:Api::Type::String + name: 'createTime' + description: | + The creation timestamp of a CertificateIssuanceConfig. Timestamp is in RFC3339 UTC "Zulu" format, + accurate to nanoseconds with up to nine fractional digits. + Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + output: true + - !ruby/object:Api::Type::Time + name: 'updateTime' + description: | + The last update timestamp of a CertificateIssuanceConfig. Timestamp is in RFC3339 UTC "Zulu" format, + accurate to nanoseconds with up to nine fractional digits. + Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + output: true + - !ruby/object:Api::Type::KeyValuePairs + name: 'labels' + description: | + 'Set of label tags associated with the CertificateIssuanceConfig resource. + An object containing a list of "key": value pairs. Example: { "name": "wrench", "count": "3" }. + - !ruby/object:Api::Type::NestedObject + name: 'certificateAuthorityConfig' + description: | + The CA that issues the workload certificate. It includes the CA address, type, authentication to CA service, etc. + required: true + properties: + - !ruby/object:Api::Type::NestedObject + name: 'certificateAuthorityServiceConfig' + description: Defines a CertificateAuthorityServiceConfig. + properties: + - !ruby/object:Api::Type::String + name: 'caPool' + required: true + description: | + A CA pool resource used to issue a certificate. + The CA pool string has a relative resource path following the form + "projects/{project}/locations/{location}/caPools/{caPool}". + diff_suppress_func: 'tpgresource.CompareResourceNames' diff --git a/mmv1/products/certificatemanager/DnsAuthorization.yaml b/mmv1/products/certificatemanager/DnsAuthorization.yaml index 9f9c8d7e1d78..fa6e636e7691 100644 --- a/mmv1/products/certificatemanager/DnsAuthorization.yaml +++ b/mmv1/products/certificatemanager/DnsAuthorization.yaml @@ -47,6 +47,7 @@ examples: vars: dns_auth_name: 'dns-auth' zone_name: 'my-zone' + subdomain: 'subdomain' parameters: - !ruby/object:Api::Type::String name: 'name' diff --git a/mmv1/products/cloudbuild/BitbucketServerConfig.yaml b/mmv1/products/cloudbuild/BitbucketServerConfig.yaml index 5ecd00674e3b..e93c0022c803 100644 --- a/mmv1/products/cloudbuild/BitbucketServerConfig.yaml +++ b/mmv1/products/cloudbuild/BitbucketServerConfig.yaml @@ -49,14 +49,19 @@ examples: - !ruby/object:Provider::Terraform::Examples name: 'cloudbuild_bitbucket_server_config' primary_resource_id: 'bbs-config' + vars: + config_id: 'bbs-config' - !ruby/object:Provider::Terraform::Examples name: 'cloudbuild_bitbucket_server_config_repositories' primary_resource_id: 'bbs-config-with-repos' + vars: + config_id: 'bbs-config' skip_test: true - !ruby/object:Provider::Terraform::Examples name: 'cloudbuild_bitbucket_server_config_peered_network' primary_resource_id: 'bbs-config-with-peered-network' vars: + config_id: 'bbs-config' network_name: 'vpc-network' test_vars_overrides: network_name: 'BootstrapSharedTestNetwork(t, "peered-network")' diff --git a/mmv1/products/cloudbuild/Trigger.yaml b/mmv1/products/cloudbuild/Trigger.yaml index 4f0a0307e5a0..daed97d6ef92 100644 --- a/mmv1/products/cloudbuild/Trigger.yaml +++ b/mmv1/products/cloudbuild/Trigger.yaml @@ -93,6 +93,8 @@ examples: primary_resource_id: 'allow-exit-codes-trigger' - !ruby/object:Provider::Terraform::Examples name: "cloudbuild_trigger_pubsub_with_repo" + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14390 + skip_test: true min_version: beta primary_resource_id: "pubsub-with-repo-trigger" vars: diff --git a/mmv1/products/cloudidentity/Group.yaml b/mmv1/products/cloudidentity/Group.yaml index 71e9068437e5..a27a53223689 100644 --- a/mmv1/products/cloudidentity/Group.yaml +++ b/mmv1/products/cloudidentity/Group.yaml @@ -25,8 +25,8 @@ references: !ruby/object:Api::Resource::ReferenceLinks 'Official Documentation': 'https://cloud.google.com/identity/docs/how-to/setup' api: 'https://cloud.google.com/identity/docs/reference/rest/v1beta1/groups' async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistenceWith403 - check_response_func_absence: PollCheckForAbsenceWith403 + check_response_func_existence: transport_tpg.PollCheckForExistenceWith403 + check_response_func_absence: transport_tpg.PollCheckForAbsenceWith403 target_occurrences: 10 actions: ['create', 'update', 'delete'] docs: !ruby/object:Provider::Terraform::Docs diff --git a/mmv1/products/compute/Disk.yaml b/mmv1/products/compute/Disk.yaml index db196e0d787c..e980e8d10faa 100644 --- a/mmv1/products/compute/Disk.yaml +++ b/mmv1/products/compute/Disk.yaml @@ -62,6 +62,7 @@ custom_code: !ruby/object:Provider::Terraform::CustomCode encoder: templates/terraform/encoders/disk.erb decoder: templates/terraform/decoders/disk.erb resource_definition: templates/terraform/resource_definition/disk.erb + update_encoder: templates/terraform/update_encoder/hyper_disk.go.erb examples: - !ruby/object:Provider::Terraform::Examples name: 'disk_basic' @@ -429,8 +430,11 @@ properties: name: 'provisionedIops' description: | Indicates how many IOPS must be provisioned for the disk. + Note: Update currently only supported by hyperdisk skus, allowing for an update of IOPS every 4 hours required: false default_from_api: true + update_verb: :PATCH + update_url: 'projects/{{project}}/zones/{{zone}}/disks/{{name}}?paths=provisionedIops' - !ruby/object:Api::Type::NestedObject name: 'asyncPrimaryDisk' min_version: 'beta' diff --git a/mmv1/products/compute/Firewall.yaml b/mmv1/products/compute/Firewall.yaml index c3a8f548d5fa..13f862ca273f 100644 --- a/mmv1/products/compute/Firewall.yaml +++ b/mmv1/products/compute/Firewall.yaml @@ -105,7 +105,7 @@ properties: icmp, esp, ah, sctp, ipip, all), or the IP protocol number. api_name: 'IPProtocol' required: true - diff_suppress_func: 'compareCaseInsensitive' + diff_suppress_func: 'tpgresource.CompareCaseInsensitive' - !ruby/object:Api::Type::Array item_type: Api::Type::String name: 'ports' @@ -144,7 +144,7 @@ properties: one of the following well known protocol strings (tcp, udp, icmp, esp, ah, sctp, ipip, all), or the IP protocol number. api_name: 'IPProtocol' - diff_suppress_func: 'compareCaseInsensitive' + diff_suppress_func: 'tpgresource.CompareCaseInsensitive' required: true - !ruby/object:Api::Type::Array item_type: Api::Type::String diff --git a/mmv1/products/compute/ForwardingRule.yaml b/mmv1/products/compute/ForwardingRule.yaml index c70701be2be2..1564567b8d43 100644 --- a/mmv1/products/compute/ForwardingRule.yaml +++ b/mmv1/products/compute/ForwardingRule.yaml @@ -162,7 +162,6 @@ examples: - 'ip_address' - !ruby/object:Provider::Terraform::Examples name: 'forwarding_rule_vpc_psc' - min_version: 'beta' primary_resource_id: 'default' vars: forwarding_rule_name: 'psc-endpoint' @@ -588,5 +587,4 @@ properties: description: This is used in PSC consumer ForwardingRule to control whether the PSC endpoint can be accessed from another region. - min_version: beta send_empty_value: true diff --git a/mmv1/products/compute/MachineImage.yaml b/mmv1/products/compute/MachineImage.yaml index 9bc1ae8c916e..e4fa8a754657 100644 --- a/mmv1/products/compute/MachineImage.yaml +++ b/mmv1/products/compute/MachineImage.yaml @@ -124,7 +124,7 @@ properties: name: 'kmsKeyName' description: | The name of the encryption key that is stored in Google Cloud KMS. - diff_suppress_func: compareCryptoKeyVersions + diff_suppress_func: tpgresource.CompareCryptoKeyVersions - !ruby/object:Api::Type::String name: 'kmsKeyServiceAccount' description: | diff --git a/mmv1/products/dataform/Repository.yaml b/mmv1/products/dataform/Repository.yaml index 57f193d1e743..03b2ba514217 100644 --- a/mmv1/products/dataform/Repository.yaml +++ b/mmv1/products/dataform/Repository.yaml @@ -74,3 +74,16 @@ properties: output: true description: | Indicates the status of the Git access token. https://cloud.google.com/dataform/reference/rest/v1beta1/projects.locations.repositories#TokenStatus + - !ruby/object:Api::Type::NestedObject + name: 'workspaceCompilationOverrides' + description: Optional. If set, fields of workspaceCompilationOverrides override the default compilation settings that are specified in dataform.json when creating workspace-scoped compilation results. + properties: + - !ruby/object:Api::Type::String + name: defaultDatabase + description: Optional. The default database (Google Cloud project ID). + - !ruby/object:Api::Type::String + name: 'schemaSuffix' + description: Optional. The suffix that should be appended to all schema (BigQuery dataset ID) names. + - !ruby/object:Api::Type::String + name: 'tablePrefix' + description: Optional. The prefix that should be prepended to all table names. \ No newline at end of file diff --git a/mmv1/products/dataplex/Datascan.yaml b/mmv1/products/dataplex/Datascan.yaml new file mode 100644 index 000000000000..e788f3f39dbd --- /dev/null +++ b/mmv1/products/dataplex/Datascan.yaml @@ -0,0 +1,796 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the License); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Resource +name: 'Datascan' +base_url: 'projects/{{project}}/locations/{{location}}/dataScans' +self_link: 'projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/dataScans?dataScanId={{data_scan_id}}' +update_verb: :PATCH +update_mask: true +import_format: + [ + 'projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}', + '{{data_scan_id}}', + ] +async: !ruby/object:Api::OpAsync + operation: !ruby/object:Api::OpAsync::Operation + path: 'name' + base_url: '{{op_id}}' + wait_ms: 1000 + timeouts: !ruby/object:Api::Timeouts + insert_minutes: 5 + update_minutes: 5 + delete_minutes: 5 + result: !ruby/object:Api::OpAsync::Result + path: 'response' + status: !ruby/object:Api::OpAsync::Status + path: 'done' + complete: true + allowed: + - true + - false + error: !ruby/object:Api::OpAsync::Error + path: 'error' + message: 'message' +autogen_async: true +read_query_params: '?view=FULL' +description: | + Represents a user-visible job which provides the insights for the related data source. +iam_policy: !ruby/object:Api::Resource::IamPolicy + method_name_separator: ':' + parent_resource_attribute: 'data_scan_id' + fetch_iam_policy_verb: :GET + import_format: + [ + 'projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}', + '{{data_scan_id}}', + ] +references: !ruby/object:Api::Resource::ReferenceLinks + guides: + 'Official Documentation': 'https://cloud.google.com/dataplex/docs' + api: 'https://cloud.google.com/dataplex/docs/reference/rest' +examples: + - !ruby/object:Provider::Terraform::Examples + name: 'dataplex_datascan_basic_profile' + primary_resource_id: 'basic_profile' + primary_resource_name: + 'fmt.Sprintf("tf-test-datascan%s", context["random_suffix"])' + test_env_vars: + project_name: :PROJECT_NAME + - !ruby/object:Provider::Terraform::Examples + name: 'dataplex_datascan_full_profile' + primary_resource_id: 'full_profile' + primary_resource_name: + 'fmt.Sprintf("tf-test-datascan%s", context["random_suffix"])' + test_env_vars: + project_name: :PROJECT_NAME + - !ruby/object:Provider::Terraform::Examples + name: 'dataplex_datascan_basic_quality' + primary_resource_id: 'basic_quality' + primary_resource_name: + 'fmt.Sprintf("tf-test-datascan%s", context["random_suffix"])' + test_env_vars: + project_name: :PROJECT_NAME + - !ruby/object:Provider::Terraform::Examples + name: 'dataplex_datascan_full_quality' + primary_resource_id: 'full_quality' + primary_resource_name: + 'fmt.Sprintf("tf-test-datascan%s", context["random_suffix"])' + test_env_vars: + project_name: :PROJECT_NAME +parameters: + - !ruby/object:Api::Type::String + name: 'location' + immutable: true + required: true + url_param_only: true + description: | + The location where the data scan should reside. + - !ruby/object:Api::Type::String + name: 'dataScanId' + immutable: true + required: true + url_param_only: true + description: | + DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter. +properties: + - !ruby/object:Api::Type::String + name: 'name' + output: true + description: | + The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region. + - !ruby/object:Api::Type::String + name: 'uid' + output: true + description: | + System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name. + - !ruby/object:Api::Type::String + name: 'description' + description: | + Description of the scan. + - !ruby/object:Api::Type::String + name: 'displayName' + description: | + User friendly display name. + - !ruby/object:Api::Type::KeyValuePairs + name: 'labels' + description: | + User-defined labels for the scan. A list of key->value pairs. + - !ruby/object:Api::Type::Enum + name: 'state' + output: true + description: | + Current state of the DataScan. + values: + - :STATE_UNSPECIFIED + - :ACTIVE + - :CREATING + - :DELETING + - :ACTION_REQUIRED + - !ruby/object:Api::Type::String + name: 'createTime' + output: true + description: | + The time when the scan was created. + - !ruby/object:Api::Type::String + name: 'updateTime' + output: true + description: | + The time when the scan was last updated. + - !ruby/object:Api::Type::NestedObject + name: 'data' + required: true + immutable: true + description: | + The data source for DataScan. + properties: + - !ruby/object:Api::Type::String + name: 'entity' + immutable: true + exactly_one_of: + - data.0.entity + - data.0.resource + description: | + The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan. + - !ruby/object:Api::Type::String + name: 'resource' + immutable: true + exactly_one_of: + - data.0.entity + - data.0.resource + description: | + The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: + (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan. + - !ruby/object:Api::Type::NestedObject + name: 'executionSpec' + required: true + description: | + DataScan execution settings. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'trigger' + required: true + description: | + Spec related to how often and when a scan should be triggered. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'onDemand' + allow_empty_object: true + send_empty_value: true + exactly_one_of: + - execution_spec.0.trigger.0.on_demand + - execution_spec.0.trigger.0.schedule + description: | + The scan runs once via dataScans.run API. + properties: [] + - !ruby/object:Api::Type::NestedObject + name: 'schedule' + description: | + The scan is scheduled to run periodically. + exactly_one_of: + - execution_spec.0.trigger.0.on_demand + - execution_spec.0.trigger.0.schedule + properties: + - !ruby/object:Api::Type::String + name: 'cron' + required: true + description: + Cron schedule for running scans periodically. This field is + required for Schedule scans. + - !ruby/object:Api::Type::String + name: 'field' + immutable: true + description: | + The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table. + - !ruby/object:Api::Type::NestedObject + name: 'executionStatus' + output: true + description: | + Status of the data scan execution. + properties: + - !ruby/object:Api::Type::String + name: 'latestJobEndTime' + output: true + description: | + The time when the latest DataScanJob started. + - !ruby/object:Api::Type::String + name: 'latestJobStartTime' + output: true + description: | + The time when the latest DataScanJob ended. + - !ruby/object:Api::Type::Enum + name: 'type' + output: true + description: | + The type of DataScan. + values: + - :DATA_SCAN_TYPE_UNSPECIFIED + - :DATA_QUALITY + - :DATA_PROFILE + - !ruby/object:Api::Type::NestedObject + name: 'dataQualitySpec' + exactly_one_of: + - data_quality_spec + - data_profile_spec + description: | + DataQualityScan related setting. + properties: + - !ruby/object:Api::Type::Double + name: 'samplingPercent' + description: | + The percentage of the records to be selected from the dataset for DataScan. + - !ruby/object:Api::Type::String + name: 'rowFilter' + description: | + A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10 + - !ruby/object:Api::Type::Array + name: 'rules' + min_size: 1 + description: | + The list of rules to evaluate against a data source. At least one rule is required. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'column' + description: | + The unnested column which this rule is evaluated against. + - !ruby/object:Api::Type::Boolean + name: 'ignoreNull' + description: | + Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules. + - !ruby/object:Api::Type::String + name: 'dimension' + required: true + description: | + The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"] + - !ruby/object:Api::Type::Double + name: 'threshold' + description: | + The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0). + - !ruby/object:Api::Type::NestedObject + name: 'rangeExpectation' + description: | + ColumnMap rule which evaluates whether each column value lies between a specified range. + properties: + - !ruby/object:Api::Type::String + name: 'minValue' + description: | + The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::String + name: 'maxValue' + description: | + The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::Boolean + name: 'strictMinEnabled' + default_value: false + description: | + Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. + Only relevant if a minValue has been defined. Default = false. + - !ruby/object:Api::Type::Boolean + name: 'strictMaxEnabled' + default_value: false + description: | + Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. + Only relevant if a maxValue has been defined. Default = false. + - !ruby/object:Api::Type::NestedObject + name: 'nonNullExpectation' + allow_empty_object: true + send_empty_value: true + description: | + ColumnMap rule which evaluates whether each column value is null. + properties: [] + - !ruby/object:Api::Type::NestedObject + name: 'setExpectation' + description: | + ColumnMap rule which evaluates whether each column value is contained by a specified set. + properties: + - !ruby/object:Api::Type::Array + name: 'values' + required: true + description: | + Expected values for the column value. + item_type: Api::Type::String + - !ruby/object:Api::Type::NestedObject + name: 'regexExpectation' + description: | + ColumnMap rule which evaluates whether each column value matches a specified regex. + properties: + - !ruby/object:Api::Type::String + name: 'regex' + required: true + description: | + A regular expression the column value is expected to match. + - !ruby/object:Api::Type::NestedObject + name: 'uniquenessExpectation' + allow_empty_object: true + send_empty_value: true + description: | + ColumnAggregate rule which evaluates whether the column has duplicates. + properties: [] + - !ruby/object:Api::Type::NestedObject + name: 'statisticRangeExpectation' + description: | + ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. + properties: + - !ruby/object:Api::Type::Enum + name: 'statistic' + required: true + description: | + column statistics. + values: + - :STATISTIC_UNDEFINED + - :MEAN + - :MIN + - :MAX + - !ruby/object:Api::Type::String + name: 'minValue' + description: | + The minimum column statistic value allowed for a row to pass this validation. + At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::String + name: 'maxValue' + description: | + The maximum column statistic value allowed for a row to pass this validation. + At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::Boolean + name: 'strictMinEnabled' + default_value: false + description: | + Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. + Only relevant if a minValue has been defined. Default = false. + - !ruby/object:Api::Type::Boolean + name: 'strictMaxEnabled' + default_value: false + description: | + Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. + Only relevant if a maxValue has been defined. Default = false. + - !ruby/object:Api::Type::NestedObject + name: 'rowConditionExpectation' + description: | + Table rule which evaluates whether each row passes the specified condition. + properties: + - !ruby/object:Api::Type::String + name: 'sqlExpression' + required: true + description: | + The SQL expression. + - !ruby/object:Api::Type::NestedObject + name: 'tableConditionExpectation' + description: | + Table rule which evaluates whether the provided expression is true. + properties: + - !ruby/object:Api::Type::String + name: 'sqlExpression' + required: true + description: | + The SQL expression. + - !ruby/object:Api::Type::NestedObject + name: 'dataProfileSpec' + allow_empty_object: true + send_empty_value: true + exactly_one_of: + - data_quality_spec + - data_profile_spec + description: | + DataProfileScan related setting. + properties: + - !ruby/object:Api::Type::Double + name: 'samplingPercent' + description: | + The percentage of the records to be selected from the dataset for DataScan. + - !ruby/object:Api::Type::String + name: 'rowFilter' + description: | + A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10 + - !ruby/object:Api::Type::NestedObject + name: 'dataQualityResult' + output: true + description: | + The result of the data quality scan. + properties: + - !ruby/object:Api::Type::Boolean + name: 'passed' + output: true + description: | + Overall data quality result -- true if all rules passed. + - !ruby/object:Api::Type::Array + name: 'dimensions' + description: | + A list of results at the dimension level. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Boolean + name: 'passed' + description: | + Whether the dimension passed or failed. + - !ruby/object:Api::Type::Array + name: 'rules' + output: true + description: | + A list of all the rules in a job, and their results. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::NestedObject + name: 'rule' + output: true + description: | + The rule specified in the DataQualitySpec, as is. + properties: + - !ruby/object:Api::Type::String + name: 'column' + description: | + The unnested column which this rule is evaluated against. + - !ruby/object:Api::Type::Boolean + name: 'ignoreNull' + description: | + Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules. + - !ruby/object:Api::Type::String + name: 'dimension' + description: | + The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"] + - !ruby/object:Api::Type::Integer + name: 'threshold' + description: | + The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0). + - !ruby/object:Api::Type::NestedObject + name: 'rangeExpectation' + output: true + description: | + ColumnMap rule which evaluates whether each column value lies between a specified range. + properties: + - !ruby/object:Api::Type::String + name: 'minValue' + description: | + The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::String + name: maxValue + description: | + The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::Boolean + name: 'strictMinEnabled' + default_value: false + description: | + Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. + Only relevant if a minValue has been defined. Default = false. + - !ruby/object:Api::Type::Boolean + name: 'strictMaxEnabled' + default_value: false + description: | + Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. + Only relevant if a maxValue has been defined. Default = false. + - !ruby/object:Api::Type::NestedObject + name: 'nonNullExpectation' + output: true + allow_empty_object: true + description: | + ColumnMap rule which evaluates whether each column value is null. + properties: [] + - !ruby/object:Api::Type::NestedObject + name: 'setExpectation' + output: true + description: | + ColumnMap rule which evaluates whether each column value is contained by a specified set. + properties: + - !ruby/object:Api::Type::Array + name: 'values' + description: | + Expected values for the column value. + item_type: Api::Type::String + - !ruby/object:Api::Type::NestedObject + name: 'regexExpectation' + output: true + description: | + ColumnMap rule which evaluates whether each column value matches a specified regex. + properties: + - !ruby/object:Api::Type::String + name: 'regex' + description: | + A regular expression the column value is expected to match. + - !ruby/object:Api::Type::NestedObject + name: 'uniquenessExpectation' + output: true + allow_empty_object: true + description: | + ColumnAggregate rule which evaluates whether the column has duplicates. + properties: [] + - !ruby/object:Api::Type::NestedObject + name: 'statisticRangeExpectation' + output: true + description: | + ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. + properties: + - !ruby/object:Api::Type::Enum + name: 'statistic' + description: | + column statistics. + values: + - :STATISTIC_UNDEFINED + - :MEAN + - :MIN + - :MAX + - !ruby/object:Api::Type::String + name: 'minValue' + description: | + The minimum column statistic value allowed for a row to pass this validation. + At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::String + name: 'maxValue' + description: | + The maximum column statistic value allowed for a row to pass this validation. + At least one of minValue and maxValue need to be provided. + - !ruby/object:Api::Type::Boolean + name: 'strictMinEnabled' + description: | + Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. + Only relevant if a minValue has been defined. Default = false. + - !ruby/object:Api::Type::Boolean + name: 'strictMaxEnabled' + description: | + Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. + Only relevant if a maxValue has been defined. Default = false. + - !ruby/object:Api::Type::NestedObject + name: 'rowConditionExpectation' + output: true + description: | + Table rule which evaluates whether each row passes the specified condition. + properties: + - !ruby/object:Api::Type::String + name: 'sqlExpression' + description: | + The SQL expression. + - !ruby/object:Api::Type::NestedObject + name: 'tableConditionExpectation' + output: true + description: | + Table rule which evaluates whether the provided expression is true. + properties: + - !ruby/object:Api::Type::String + name: 'sqlExpression' + description: | + The SQL expression. + - !ruby/object:Api::Type::Boolean + name: 'passed' + output: true + description: | + Whether the rule passed or failed. + - !ruby/object:Api::Type::String + name: 'evaluatedCount' + output: true + description: | + The number of rows a rule was evaluated against. This field is only valid for ColumnMap type rules. + Evaluated count can be configured to either + 1. include all rows (default) - with null rows automatically failing rule evaluation, or + 2. exclude null rows from the evaluatedCount, by setting ignore_nulls = true. + - !ruby/object:Api::Type::String + name: 'passedCount' + output: true + description: | + The number of rows which passed a rule evaluation. This field is only valid for ColumnMap type rules. + - !ruby/object:Api::Type::String + name: 'nullCount' + output: true + description: | + The number of rows with null values in the specified column. + - !ruby/object:Api::Type::Integer + name: 'passRatio' + output: true + description: | + The ratio of passedCount / evaluatedCount. This field is only valid for ColumnMap type rules. + - !ruby/object:Api::Type::String + name: 'failingRowsQuery' + output: true + description: | + The query to find rows that did not pass this rule. Only applies to ColumnMap and RowCondition rules. + - !ruby/object:Api::Type::String + name: 'rowCount' + output: true + description: | + The count of rows processed. + - !ruby/object:Api::Type::NestedObject + name: 'scannedData' + output: true + description: | + The data scanned for this result. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'incrementalField' + description: | + The range denoted by values of an incremental field + properties: + - !ruby/object:Api::Type::String + name: 'field' + description: | + The field that contains values which monotonically increases over time (e.g. a timestamp column). + - !ruby/object:Api::Type::String + name: 'start' + description: | + Value that marks the start of the range. + - !ruby/object:Api::Type::String + name: 'end' + description: Value that marks the end of the range. + - !ruby/object:Api::Type::NestedObject + name: 'dataProfileResult' + output: true + description: | + The result of the data profile scan. + properties: + - !ruby/object:Api::Type::String + name: 'rowCount' + description: | + The count of rows scanned. + - !ruby/object:Api::Type::NestedObject + name: 'profile' + output: true + description: | + The profile information per field. + properties: + - !ruby/object:Api::Type::Array + name: 'fields' + description: | + List of fields with structural and profile information for each field. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'name' + description: | + The name of the field. + - !ruby/object:Api::Type::String + name: 'type' + description: | + The field data type. + - !ruby/object:Api::Type::String + name: 'mode' + description: | + The mode of the field. Possible values include: + 1. REQUIRED, if it is a required field. + 2. NULLABLE, if it is an optional field. + 3. REPEATED, if it is a repeated field. + - !ruby/object:Api::Type::NestedObject + name: 'profile' + description: | + Profile information for the corresponding field. + properties: + - !ruby/object:Api::Type::Integer + name: 'nullRatio' + output: true + description: | + Ratio of rows with null value against total scanned rows. + - !ruby/object:Api::Type::Integer + name: 'distinctRatio' + description: | + Ratio of rows with distinct values against total scanned rows. Not available for complex non-groupable field type RECORD and fields with REPEATABLE mode. + - !ruby/object:Api::Type::NestedObject + name: 'topNValues' + description: | + The list of top N non-null values and number of times they occur in the scanned data. N is 10 or equal to the number of distinct values in the field, whichever is smaller. Not available for complex non-groupable field type RECORD and fields with REPEATABLE mode. + properties: + - !ruby/object:Api::Type::String + name: 'value' + description: | + String value of a top N non-null value. + - !ruby/object:Api::Type::String + name: 'count' + description: | + Count of the corresponding value in the scanned data. + - !ruby/object:Api::Type::NestedObject + name: 'stringProfile' + output: true + description: | + String type field information. + properties: + - !ruby/object:Api::Type::String + name: 'minLength' + description: | + Minimum length of non-null values in the scanned data. + - !ruby/object:Api::Type::String + name: 'maxLength' + description: | + Maximum length of non-null values in the scanned data. + - !ruby/object:Api::Type::Integer + name: 'averageLength' + description: | + Average length of non-null values in the scanned data. + - !ruby/object:Api::Type::NestedObject + name: 'integerProfile' + output: true + description: | + Integer type field information. + properties: + - !ruby/object:Api::Type::Integer + name: 'average' + description: | + Average of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::Integer + name: 'standardDeviation' + description: | + Standard deviation of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::String + name: 'min' + description: | + Minimum of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::String + name: 'quartiles' + description: | + A quartile divides the number of data points into four parts, or quarters, of more-or-less equal size. Three main quartiles used are: The first quartile (Q1) splits off the lowest 25% of data from the highest 75%. It is also known as the lower or 25th empirical quartile, as 25% of the data is below this point. The second quartile (Q2) is the median of a data set. So, 50% of the data lies below this point. The third quartile (Q3) splits off the highest 25% of data from the lowest 75%. It is known as the upper or 75th empirical quartile, as 75% of the data lies below this point. Here, the quartiles is provided as an ordered list of quartile values for the scanned data, occurring in order Q1, median, Q3. + - !ruby/object:Api::Type::String + name: 'max' + description: | + Maximum of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::NestedObject + name: 'doubleProfile' + output: true + description: | + Double type field information. + properties: + - !ruby/object:Api::Type::Integer + name: 'average' + description: | + Average of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::Integer + name: 'standardDeviation' + description: | + Standard deviation of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::String + name: 'min' + description: | + Minimum of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::String + name: 'quartiles' + description: | + A quartile divides the number of data points into four parts, or quarters, of more-or-less equal size. Three main quartiles used are: The first quartile (Q1) splits off the lowest 25% of data from the highest 75%. It is also known as the lower or 25th empirical quartile, as 25% of the data is below this point. The second quartile (Q2) is the median of a data set. So, 50% of the data lies below this point. The third quartile (Q3) splits off the highest 25% of data from the lowest 75%. It is known as the upper or 75th empirical quartile, as 75% of the data lies below this point. Here, the quartiles is provided as an ordered list of quartile values for the scanned data, occurring in order Q1, median, Q3. + - !ruby/object:Api::Type::String + name: 'max' + description: | + Maximum of non-null values in the scanned data. NaN, if the field has a NaN. + - !ruby/object:Api::Type::NestedObject + name: 'scannedData' + output: true + description: The data scanned for this result. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'incrementalField' + description: | + The range denoted by values of an incremental field + properties: + - !ruby/object:Api::Type::String + name: 'field' + description: | + The field that contains values which monotonically increases over time (e.g. a timestamp column). + - !ruby/object:Api::Type::String + name: 'start' + description: | + Value that marks the start of the range. + - !ruby/object:Api::Type::String + name: 'end' + description: Value that marks the end of the range. diff --git a/mmv1/products/dlp/DeidentifyTemplate.yaml b/mmv1/products/dlp/DeidentifyTemplate.yaml index 02181d246656..224b31ff2c93 100644 --- a/mmv1/products/dlp/DeidentifyTemplate.yaml +++ b/mmv1/products/dlp/DeidentifyTemplate.yaml @@ -42,8 +42,18 @@ examples: primary_resource_id: 'basic' test_env_vars: project: :PROJECT_NAME + - !ruby/object:Provider::Terraform::Examples + name: 'dlp_deidentify_template_with_template_id' + primary_resource_id: 'with_template_id' + vars: + name: 'my-template' + test_env_vars: + project: :PROJECT_NAME + skip_docs: true custom_code: !ruby/object:Provider::Terraform::CustomCode - encoder: templates/terraform/encoders/wrap_object.go.erb + decoder: templates/terraform/decoders/dlp_template_id.go.erb + encoder: templates/terraform/encoders/wrap_object_with_template_id.go.erb + update_encoder: templates/terraform/encoders/wrap_object.go.erb custom_import: templates/terraform/custom_import/dlp_import.go.erb parameters: - !ruby/object:Api::Type::String @@ -83,6 +93,15 @@ properties: description: | The last update timestamp of an deidentifyTemplate. Set by the server. output: true + - !ruby/object:Api::Type::String + name: 'templateId' + description: | + The template id can contain uppercase and lowercase letters, numbers, and hyphens; + that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is + 100 characters. Can be empty to allow the system to generate one. + immutable: true + default_from_api: true + url_param_only: true - !ruby/object:Api::Type::NestedObject name: 'deidentifyConfig' required: true @@ -565,399 +584,30 @@ properties: description: | Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits. item_type: Api::Type::String - - !ruby/object:Api::Type::NestedObject - name: 'recordTransformations' - description: - Treat the dataset as structured. Transformations can be applied to - specific locations within structured datasets, such as transforming a - column within a table. - exactly_one_of: - - deidentify_config.0.info_type_transformations - - deidentify_config.0.record_transformations - - deidentify_config.0.image_transformations - properties: - - !ruby/object:Api::Type::Array - name: 'fieldTransformations' - description: - Transform the record by applying various field transformations. - at_least_one_of: - - deidentify_config.0.record_transformations.0.field_transformations - - deidentify_config.0.record_transformations.0.record_suppressions - item_type: !ruby/object:Api::Type::NestedObject - properties: - - !ruby/object:Api::Type::Array - name: fields - description: | - Input field(s) to apply the transformation to. When you have columns that reference their position within a list, omit the index from the FieldId. - FieldId name matching ignores the index. For example, instead of "contact.nums[0].type", use "contact.nums.type". - required: true - item_type: !ruby/object:Api::Type::NestedObject - properties: - - !ruby/object:Api::Type::String - name: name - description: Name describing the field. - - !ruby/object:Api::Type::NestedObject - name: condition - description: | - Only apply the transformation if the condition evaluates to true for the given RecordCondition. The conditions are allowed to reference fields that are not used in the actual transformation. - Example Use Cases: - - Apply a different bucket transformation to an age column if the zip code column for the same record is within a specific range. - - Redact a field if the date of birth field is greater than 85. - properties: - !ruby/object:Api::Type::NestedObject - name: expressions - description: An expression. + name: 'dateShiftConfig' + description: | + Shifts dates by random number of days, with option to be consistent for the same context. properties: - - !ruby/object:Api::Type::Enum - name: logicalOperator - description: - The operator to apply to the result of conditions. - Default and currently only supported value is AND - default_value: :AND - values: - - :AND - !ruby/object:Api::Type::NestedObject - name: conditions - description: Conditions to apply to the expression. + name: context + description: | + Points to the field that contains the context, for example, an entity id. + If set, must also set cryptoKey. If set, shift will be consistent for the given context. properties: - - !ruby/object:Api::Type::Array - name: conditions - description: A collection of conditions. - item_type: !ruby/object:Api::Type::NestedObject - properties: - - !ruby/object:Api::Type::NestedObject - name: field - description: - Field within the record this condition is - evaluated against. - required: true - properties: - - !ruby/object:Api::Type::String - name: name - description: Name describing the field. - - !ruby/object:Api::Type::Enum - name: operator - description: - Operator used to compare the field or - infoType to the value. - required: true - values: - - :EQUAL_TO - - :NOT_EQUAL_TO - - :GREATER_THAN - - :LESS_THAN - - :GREATER_THAN_OR_EQUALS - - :LESS_THAN_OR_EQUALS - - :EXISTS - - !ruby/object:Api::Type::NestedObject - name: value - description: | - Value to compare against. - The `value` block must only contain one argument. For example when a condition is evaluated against a string-type field, only `string_value` should be set. - This argument is mandatory, except for conditions using the `EXISTS` operator. - properties: - - !ruby/object:Api::Type::String - name: integerValue - description: - An integer value (int64 format) - - !ruby/object:Api::Type::Double - name: floatValue - description: A float value. - - !ruby/object:Api::Type::String - name: stringValue - description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - - !ruby/object:Api::Type::String - name: timestampValue - description: | - A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". - validation: - !ruby/object:Provider::Terraform::Validation - function: 'verify.ValidateRFC3339Date' - - !ruby/object:Api::Type::NestedObject - name: timeValue - description: Represents a time of day. - properties: - - !ruby/object:Api::Type::Integer - name: hours - description: | - Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, 24)' # "An API may choose to allow the value "24:00:00" for scenarios like business closing time." - - !ruby/object:Api::Type::Integer - name: minutes - description: - Minutes of hour of day. Must be - from 0 to 59. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, 59)' - - !ruby/object:Api::Type::Integer - name: seconds - description: - Seconds of minutes of the time. - Must normally be from 0 to 59. An - API may allow the value 60 if it - allows leap-seconds. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, 60)' # "An API may allow the value 60 if it allows leap-seconds." - - !ruby/object:Api::Type::Integer - name: nanos - description: - Fractions of seconds in - nanoseconds. Must be from 0 to - 999,999,999. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, - 999999999)' - - !ruby/object:Api::Type::NestedObject - name: dateValue - description: - Represents a whole or partial calendar - date. - properties: - - !ruby/object:Api::Type::Integer - name: year - description: - Year of the date. Must be from 1 - to 9999, or 0 to specify a date - without a year. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(1, 9999)' - - !ruby/object:Api::Type::Integer - name: month - description: - Month of a year. Must be from 1 to - 12, or 0 to specify a year without - a month and day. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, 12)' - - !ruby/object:Api::Type::Integer - name: day - description: - Day of a month. Must be from 1 to - 31 and valid for the year and - month, or 0 to specify a year by - itself or a year and month where - the day isn't significant. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, 31)' - - !ruby/object:Api::Type::Enum - name: dayOfWeekValue - description: - Represents a day of the week. - values: - - :MONDAY - - :TUESDAY - - :WEDNESDAY - - :THURSDAY - - :FRIDAY - - :SATURDAY - - :SUNDAY - - !ruby/object:Api::Type::NestedObject - name: primitiveTransformation - required: true - description: | - Apply the transformation to the entire field. - The `primitive_transformation` block must only contain one argument, corresponding to the type of transformation. - properties: - - !ruby/object:Api::Type::NestedObject - name: replaceConfig - description: Replace with a specified value. - properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: Name describing the field. - !ruby/object:Api::Type::NestedObject - name: 'newValue' - required: true + name: 'cryptoKey' description: | - Replace each input value with a given value. - The `new_value` block must only contain one argument. For example when replacing the contents of a string-type field, only `string_value` should be set. + The key used by the encryption function. properties: - - !ruby/object:Api::Type::String - name: integerValue - description: An integer value (int64 format) - - !ruby/object:Api::Type::Double - name: floatValue - description: A float value. - - !ruby/object:Api::Type::String - name: stringValue - description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - - !ruby/object:Api::Type::String - name: timestampValue - description: | - A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". - validation: - !ruby/object:Provider::Terraform::Validation - function: 'verify.ValidateRFC3339Date' - !ruby/object:Api::Type::NestedObject - name: timeValue - description: Represents a time of day. - properties: - - !ruby/object:Api::Type::Integer - name: hours - description: | - Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. - validation: - !ruby/object:Provider::Terraform::Validation - function: 'validation.IntBetween(0, 24)' # "An API may choose to allow the value "24:00:00" for scenarios like business closing time." - - !ruby/object:Api::Type::Integer - name: minutes - description: - Minutes of hour of day. Must be from 0 to - 59. - validation: - !ruby/object:Provider::Terraform::Validation - function: 'validation.IntBetween(0, 59)' - - !ruby/object:Api::Type::Integer - name: seconds - description: - Seconds of minutes of the time. Must - normally be from 0 to 59. An API may allow - the value 60 if it allows leap-seconds. - validation: - !ruby/object:Provider::Terraform::Validation - function: 'validation.IntBetween(0, 60)' # "An API may allow the value 60 if it allows leap-seconds." - - !ruby/object:Api::Type::Integer - name: nanos - description: - Fractions of seconds in nanoseconds. Must be - from 0 to 999,999,999. - validation: - !ruby/object:Provider::Terraform::Validation - function: - 'validation.IntBetween(0, 999999999)' - - !ruby/object:Api::Type::NestedObject - name: dateValue - description: - Represents a whole or partial calendar date. - properties: - - !ruby/object:Api::Type::Integer - name: year - description: - Year of the date. Must be from 1 to 9999, or - 0 to specify a date without a year. - validation: - !ruby/object:Provider::Terraform::Validation - function: 'validation.IntBetween(1, 9999)' - - !ruby/object:Api::Type::Integer - name: month - description: - Month of a year. Must be from 1 to 12, or 0 - to specify a year without a month and day. - validation: - !ruby/object:Provider::Terraform::Validation - function: 'validation.IntBetween(0, 12)' - - !ruby/object:Api::Type::Integer - name: day - description: - Day of a month. Must be from 1 to 31 and - valid for the year and month, or 0 to - specify a year by itself or a year and month - where the day isn't significant. - validation: - !ruby/object:Provider::Terraform::Validation - function: 'validation.IntBetween(0, 31)' - - !ruby/object:Api::Type::Enum - name: dayOfWeekValue - description: Represents a day of the week. - values: - - :MONDAY - - :TUESDAY - - :WEDNESDAY - - :THURSDAY - - :FRIDAY - - :SATURDAY - - :SUNDAY - - !ruby/object:Api::Type::NestedObject - name: redactConfig - description: | - Redact a given value. For example, if used with an InfoTypeTransformation transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the output would be 'My phone number is '. - properties: [] # Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/projects.deidentifyTemplates#redactconfig - # The fields below are necessary to include the "redactConfig" transformation in the payload - # A side-effect is null values when the field is unused, see: https://github.com/hashicorp/terraform-provider-google/issues/13201 - send_empty_value: true - allow_empty_object: true - - !ruby/object:Api::Type::NestedObject - name: characterMaskConfig - description: | - Partially mask a string by replacing a given number of characters with a fixed character. Masking can start from the beginning or end of the string. This can be used on data of any type (numbers, longs, and so on) and when de-identifying structured data we'll attempt to preserve the original data's type. (This allows you to take a long like 123 and modify it to a string like **3). - properties: - - !ruby/object:Api::Type::String - name: 'maskingCharacter' - description: | - Character to use to mask the sensitive values—for example, * for an alphabetic string such as a name, or 0 for a numeric string - such as ZIP code or credit card number. This string must have a length of 1. If not supplied, this value defaults to * for - strings, and 0 for digits. - - !ruby/object:Api::Type::Integer - name: 'numberToMask' - description: | - Number of characters to mask. If not set, all matching chars will be masked. Skipped characters do not count towards this tally. - If number_to_mask is negative, this denotes inverse masking. Cloud DLP masks all but a number of characters. For example, suppose you have the following values: - - `masking_character` is * - - `number_to_mask` is -4 - - `reverse_order` is false - - `characters_to_ignore` includes - - - Input string is 1234-5678-9012-3456 - - The resulting de-identified string is ****-****-****-3456. Cloud DLP masks all but the last four characters. If reverseOrder is true, all but the first four characters are masked as 1234-****-****-****. - - !ruby/object:Api::Type::Boolean - name: 'reverseOrder' - description: | - Mask characters in reverse order. For example, if masking_character is 0, number_to_mask is 14, and reverse_order is `false`, then the - input string `1234-5678-9012-3456` is masked as `00000000000000-3456`. - - !ruby/object:Api::Type::Array - name: 'charactersToIgnore' - description: | - Characters to skip when doing de-identification of a value. These will be left alone and skipped. - item_type: !ruby/object:Api::Type::NestedObject - properties: - - !ruby/object:Api::Type::String - name: 'charactersToSkip' - description: | - Characters to not transform when masking. - - !ruby/object:Api::Type::Enum - name: 'commonCharactersToIgnore' - description: | - Common characters to not transform when masking. Useful to avoid removing punctuation. - values: - - :NUMERIC - - :ALPHA_UPPER_CASE - - :ALPHA_LOWER_CASE - - :PUNCTUATION - - :WHITESPACE - - !ruby/object:Api::Type::NestedObject - name: 'cryptoReplaceFfxFpeConfig' - description: | - Replaces an identifier with a surrogate using Format Preserving Encryption (FPE) with the FFX mode of operation; however when used in the `content.reidentify` API method, it serves the opposite function by reversing the surrogate back into the original identifier. The identifier must be encoded as ASCII. For a given crypto key and context, the same identifier will be replaced with the same surrogate. Identifiers must be at least two characters long. In the case that the identifier is the empty string, it will be skipped. See [https://cloud.google.com/dlp/docs/pseudonymization](https://cloud.google.com/dlp/docs/pseudonymization) to learn more. - - Note: We recommend using CryptoDeterministicConfig for all use cases which do not require preserving the input alphabet space and size, plus warrant referential integrity. - properties: - - !ruby/object:Api::Type::NestedObject - name: 'cryptoKey' - description: | - The key used by the encryption algorithm. - properties: - - !ruby/object:Api::Type::NestedObject - name: 'transient' - description: | - Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. + name: 'transient' + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. properties: - !ruby/object:Api::Type::String name: 'name' @@ -974,7 +624,6 @@ properties: required: true description: | A 128/192/256 bit key. - A base64-encoded string. - !ruby/object:Api::Type::NestedObject name: 'kmsWrapped' @@ -989,75 +638,23 @@ properties: required: true description: | The wrapped data crypto key. - A base64-encoded string. - !ruby/object:Api::Type::String name: 'cryptoKeyName' required: true description: | The resource name of the KMS CryptoKey to use for unwrapping. - - !ruby/object:Api::Type::NestedObject - name: 'context' - description: | - The 'tweak', a context may be used for higher security since the same identifier in two different contexts won't be given the same surrogate. If the context is not set, a default tweak will be used. - - If the context is set but: - - 1. there is no record present when transforming a given value or - 2. the field is not present when transforming a given value, - - a default tweak will be used. - - Note that case (1) is expected when an `InfoTypeTransformation` is applied to both structured and non-structured `ContentItem`s. Currently, the referenced field may be of value type integer or string. - - The tweak is constructed as a sequence of bytes in big endian byte order such that: - - * a 64 bit integer is encoded followed by a single byte of value 1 - * a string is encoded in UTF-8 format followed by a single byte of value 2 - properties: - - !ruby/object:Api::Type::String - name: 'name' - description: | - Name describing the field. - - !ruby/object:Api::Type::NestedObject - name: 'surrogateInfoType' - description: | - The custom infoType to annotate the surrogate with. This annotation will be applied to the surrogate by prefixing it with the name of the custom infoType followed by the number of characters comprising the surrogate. The following scheme defines the format: info\_type\_name(surrogate\_character\_count):surrogate - - For example, if the name of custom infoType is 'MY\_TOKEN\_INFO\_TYPE' and the surrogate is 'abc', the full replacement value will be: 'MY\_TOKEN\_INFO\_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting content using the custom infoType [`SurrogateType`](https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype). This facilitates reversal of the surrogate when it occurs in free text. - - In order for inspection to work properly, the name of this infoType must not occur naturally anywhere in your data; otherwise, inspection may find a surrogate that does not correspond to an actual identifier. Therefore, choose your custom infoType name carefully after considering what your data looks like. One way to select a name that has a high chance of yielding reliable detection is to include one or more unicode characters that are highly improbable to exist in your data. For example, assuming your data is entered from a regular ASCII keyboard, the symbol with the hex code point 29DD might be used like so: ⧝MY\_TOKEN\_TYPE - properties: - - !ruby/object:Api::Type::String - name: 'name' - description: | - Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at [https://cloud.google.com/dlp/docs/infotypes-reference](https://cloud.google.com/dlp/docs/infotypes-reference) when specifying a built-in type. When sending Cloud DLP results to Data Catalog, infoType names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. - - !ruby/object:Api::Type::String - name: 'version' - description: | - Optional version name for this InfoType. - - !ruby/object:Api::Type::Enum - name: 'commonAlphabet' - description: | - Common alphabets. - values: - - :FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED # Unused. - - :NUMERIC # [0-9] (radix of 10) - - :HEXADECIMAL # [0-9A-F] (radix of 16) - - :UPPER_CASE_ALPHA_NUMERIC # [0-9A-Z] (radix of 36) - - :ALPHA_NUMERIC # [0-9A-Za-z] (radix of 62) - - !ruby/object:Api::Type::String - name: 'customAlphabet' + - !ruby/object:Api::Type::Integer + name: upperBoundDays + required: true description: | - This is supported by mapping these to the alphanumeric characters that the FFX mode natively supports. This happens before/after encryption/decryption. Each character listed must appear only once. Number of characters must be in the range \[2, 95\]. This must be encoded as ASCII. The order of characters does not matter. The full list of allowed characters is: - - ``0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/`` + Range of shift in days. Actual shift will be selected at random within this range (inclusive ends). + Negative means shift to earlier in time. Must not be more than 365250 days (1000 years) each direction. - !ruby/object:Api::Type::Integer - name: 'radix' + name: lowerBoundDays + required: true description: | - The native way to select the alphabet. Must be in the range \[2, 95\]. + Range of shift in days. Negative means shift to earlier in time. - !ruby/object:Api::Type::NestedObject name: fixedSizeBucketingConfig description: | @@ -1077,6 +674,7 @@ properties: Lower bound value of buckets. All values less than lower_bound are grouped together into a single bucket; for example if lower_bound = 10, then all values less than 10 are replaced with the value "-10". The `lower_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. + required: true properties: - !ruby/object:Api::Type::String name: integerValue @@ -1084,155 +682,20 @@ properties: - !ruby/object:Api::Type::Double name: floatValue description: A float value. - - !ruby/object:Api::Type::String - name: stringValue - description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - - !ruby/object:Api::Type::String - name: timestampValue - description: | - A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". - - !ruby/object:Api::Type::NestedObject - name: timeValue - description: Represents a time of day. - properties: - - !ruby/object:Api::Type::Integer - name: hours - description: | - Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. - - !ruby/object:Api::Type::Integer - name: minutes - description: - Minutes of hour of day. Must be from 0 to - 59. - - !ruby/object:Api::Type::Integer - name: seconds - description: - Seconds of minutes of the time. Must - normally be from 0 to 59. An API may allow - the value 60 if it allows leap-seconds. - - !ruby/object:Api::Type::Integer - name: nanos - description: - Fractions of seconds in nanoseconds. Must be - from 0 to 999,999,999. - - !ruby/object:Api::Type::NestedObject - name: dateValue - description: - Represents a whole or partial calendar date. - properties: - - !ruby/object:Api::Type::Integer - name: year - description: - Year of the date. Must be from 1 to 9999, or - 0 to specify a date without a year. - - !ruby/object:Api::Type::Integer - name: month - description: - Month of a year. Must be from 1 to 12, or 0 - to specify a year without a month and day. - - !ruby/object:Api::Type::Integer - name: day - description: - Day of a month. Must be from 1 to 31 and - valid for the year and month, or 0 to - specify a year by itself or a year and month - where the day isn't significant. - - !ruby/object:Api::Type::Enum - name: dayOfWeekValue - description: Represents a day of the week. - values: - - :MONDAY - - :TUESDAY - - :WEDNESDAY - - :THURSDAY - - :FRIDAY - - :SATURDAY - - :SUNDAY - required: true - - !ruby/object:Api::Type::NestedObject - name: upperBound - description: | - Upper bound value of buckets. - All values greater than upper_bound are grouped together into a single bucket; for example if upper_bound = 89, then all values greater than 89 are replaced with the value "89+". - The `upper_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. - required: true - properties: + - !ruby/object:Api::Type::NestedObject + name: upperBound + description: | + Upper bound value of buckets. + All values greater than upper_bound are grouped together into a single bucket; for example if upper_bound = 89, then all values greater than 89 are replaced with the value "89+". + The `upper_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. + required: true + properties: - !ruby/object:Api::Type::String name: integerValue description: An integer value (int64 format) - !ruby/object:Api::Type::Double name: floatValue description: A float value. - - !ruby/object:Api::Type::String - name: stringValue - description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - - !ruby/object:Api::Type::String - name: timestampValue - description: | - A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". - - !ruby/object:Api::Type::NestedObject - name: timeValue - description: Represents a time of day. - properties: - - !ruby/object:Api::Type::Integer - name: hours - description: | - Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. - - !ruby/object:Api::Type::Integer - name: minutes - description: - Minutes of hour of day. Must be from 0 to - 59. - - !ruby/object:Api::Type::Integer - name: seconds - description: - Seconds of minutes of the time. Must - normally be from 0 to 59. An API may allow - the value 60 if it allows leap-seconds. - - !ruby/object:Api::Type::Integer - name: nanos - description: - Fractions of seconds in nanoseconds. Must be - from 0 to 999,999,999. - - !ruby/object:Api::Type::NestedObject - name: dateValue - description: - Represents a whole or partial calendar date. - properties: - - !ruby/object:Api::Type::Integer - name: year - description: - Year of the date. Must be from 1 to 9999, or - 0 to specify a date without a year. - - !ruby/object:Api::Type::Integer - name: month - description: - Month of a year. Must be from 1 to 12, or 0 - to specify a year without a month and day. - - !ruby/object:Api::Type::Integer - name: day - description: - Day of a month. Must be from 1 to 31 and - valid for the year and month, or 0 to - specify a year by itself or a year and month - where the day isn't significant. - - !ruby/object:Api::Type::Enum - name: dayOfWeekValue - description: Represents a day of the week. - values: - - :MONDAY - - :TUESDAY - - :WEDNESDAY - - :THURSDAY - - :FRIDAY - - :SATURDAY - - :SUNDAY - !ruby/object:Api::Type::Double name: bucketSize description: | @@ -1270,9 +733,6 @@ properties: - !ruby/object:Api::Type::String name: stringValue description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - !ruby/object:Api::Type::String name: timestampValue description: | @@ -1354,9 +814,6 @@ properties: - !ruby/object:Api::Type::String name: stringValue description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - !ruby/object:Api::Type::String name: timestampValue description: | @@ -1439,9 +896,6 @@ properties: - !ruby/object:Api::Type::String name: stringValue description: A string value. - - !ruby/object:Api::Type::Boolean - name: booleanValue - description: A boolean value. - !ruby/object:Api::Type::String name: timestampValue description: | @@ -1518,254 +972,2161 @@ properties: name: partToExtract description: The part of the time to keep. values: - - :YEAR # [0-9999] - - :MONTH # [1-12] - - :DAY_OF_MONTH # [1-31] - - :DAY_OF_WEEK # [1-7] - - :WEEK_OF_YEAR # [1-53] - - :HOUR_OF_DAY # [0-23] + - :YEAR # [0-9999] + - :MONTH # [1-12] + - :DAY_OF_MONTH # [1-31] + - :DAY_OF_WEEK # [1-7] + - :WEEK_OF_YEAR # [1-53] + - :HOUR_OF_DAY # [0-23] - !ruby/object:Api::Type::NestedObject - name: cryptoHashConfig + name: redactConfig description: | - Pseudonymization method that generates surrogates via cryptographic hashing. Uses SHA-256. The key size must be either 32 or 64 bytes. - Outputs a base64 encoded representation of the hashed output (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). - Currently, only string and integer values can be hashed. - See https://cloud.google.com/dlp/docs/pseudonymization to learn more. - properties: - - !ruby/object:Api::Type::NestedObject - name: 'cryptoKey' - description: | - The key used by the encryption function. - properties: - - !ruby/object:Api::Type::NestedObject - name: 'transient' - description: | - Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. - properties: - - !ruby/object:Api::Type::String - name: 'name' - required: true - description: | - Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). - - !ruby/object:Api::Type::NestedObject - name: 'unwrapped' - description: | - Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. - properties: - - !ruby/object:Api::Type::String - name: 'key' - required: true - description: | - A 128/192/256 bit key. - - A base64-encoded string. - - !ruby/object:Api::Type::NestedObject - name: 'kmsWrapped' - description: | - KMS wrapped key. - Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt - For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). - Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). - properties: - - !ruby/object:Api::Type::String - name: 'wrappedKey' - required: true - description: | - The wrapped data crypto key. - - A base64-encoded string. - - !ruby/object:Api::Type::String - name: 'cryptoKeyName' - required: true - description: | - The resource name of the KMS CryptoKey to use for unwrapping. + Redact a given value. For example, if used with an InfoTypeTransformation transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the output would be 'My phone number is '. + properties: [] # Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/projects.deidentifyTemplates#redactconfig + # The fields below are necessary to include the "redactConfig" transformation in the payload + # A side-effect is null values when the field is unused, see: https://github.com/hashicorp/terraform-provider-google/issues/13201 + send_empty_value: true + allow_empty_object: true + - !ruby/object:Api::Type::NestedObject + name: 'recordTransformations' + description: + Treat the dataset as structured. Transformations can be applied to + specific locations within structured datasets, such as transforming a + column within a table. + exactly_one_of: + - deidentify_config.0.info_type_transformations + - deidentify_config.0.record_transformations + - deidentify_config.0.image_transformations + properties: + - !ruby/object:Api::Type::Array + name: 'fieldTransformations' + description: + Transform the record by applying various field transformations. + at_least_one_of: + - deidentify_config.0.record_transformations.0.field_transformations + - deidentify_config.0.record_transformations.0.record_suppressions + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Array + name: fields + description: | + Input field(s) to apply the transformation to. When you have columns that reference their position within a list, omit the index from the FieldId. + FieldId name matching ignores the index. For example, instead of "contact.nums[0].type", use "contact.nums.type". + required: true + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: name + description: Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: condition + description: | + Only apply the transformation if the condition evaluates to true for the given RecordCondition. The conditions are allowed to reference fields that are not used in the actual transformation. + Example Use Cases: + - Apply a different bucket transformation to an age column if the zip code column for the same record is within a specific range. + - Redact a field if the date of birth field is greater than 85. + properties: - !ruby/object:Api::Type::NestedObject - name: dateShiftConfig - description: | - Shifts dates by random number of days, with option to be consistent for the same context. See https://cloud.google.com/dlp/docs/concepts-date-shifting to learn more. + name: expressions + description: An expression. properties: - - !ruby/object:Api::Type::Integer - name: upperBoundDays - required: true - description: | - Range of shift in days. Actual shift will be selected at random within this range (inclusive ends). Negative means shift to earlier in time. Must not be more than 365250 days (1000 years) each direction. - - For example, 3 means shift date to at most 3 days into the future. - - !ruby/object:Api::Type::Integer - name: lowerBoundDays - required: true - description: | - For example, -5 means shift date to at most 5 days back in the past. + - !ruby/object:Api::Type::Enum + name: logicalOperator + description: + The operator to apply to the result of conditions. + Default and currently only supported value is AND + default_value: :AND + values: + - :AND - !ruby/object:Api::Type::NestedObject - name: 'context' - description: | - Points to the field that contains the context, for example, an entity id. - If set, must also set cryptoKey. If set, shift will be consistent for the given context. + name: conditions + description: Conditions to apply to the expression. properties: - - !ruby/object:Api::Type::String - name: 'name' + - !ruby/object:Api::Type::Array + name: conditions + description: A collection of conditions. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::NestedObject + name: field + description: + Field within the record this condition is + evaluated against. + required: true + properties: + - !ruby/object:Api::Type::String + name: name + description: Name describing the field. + - !ruby/object:Api::Type::Enum + name: operator + description: + Operator used to compare the field or + infoType to the value. + required: true + values: + - :EQUAL_TO + - :NOT_EQUAL_TO + - :GREATER_THAN + - :LESS_THAN + - :GREATER_THAN_OR_EQUALS + - :LESS_THAN_OR_EQUALS + - :EXISTS + - !ruby/object:Api::Type::NestedObject + name: value + description: | + Value to compare against. + The `value` block must only contain one argument. For example when a condition is evaluated against a string-type field, only `string_value` should be set. + This argument is mandatory, except for conditions using the `EXISTS` operator. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + validation: + !ruby/object:Provider::Terraform::Validation + function: 'verify.ValidateRFC3339Date' + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 24)' # "An API may choose to allow the value "24:00:00" for scenarios like business closing time." + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be + from 0 to 59. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 59)' + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. + Must normally be from 0 to 59. An + API may allow the value 60 if it + allows leap-seconds. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 60)' # "An API may allow the value 60 if it allows leap-seconds." + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in + nanoseconds. Must be from 0 to + 999,999,999. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, + 999999999)' + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar + date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 + to 9999, or 0 to specify a date + without a year. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(1, 9999)' + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to + 12, or 0 to specify a year without + a month and day. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 12)' + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to + 31 and valid for the year and + month, or 0 to specify a year by + itself or a year and month where + the day isn't significant. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 31)' + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: + Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: primitiveTransformation + description: | + Apply the transformation to the entire field. + The `primitive_transformation` block must only contain one argument, corresponding to the type of transformation. + Only one of `primitive_transformation` or `info_type_transformations` must be specified. + properties: + - !ruby/object:Api::Type::NestedObject + name: replaceConfig + description: Replace with a specified value. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'newValue' + required: true + description: | + Replace each input value with a given value. + The `new_value` block must only contain one argument. For example when replacing the contents of a string-type field, only `string_value` should be set. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + validation: + !ruby/object:Provider::Terraform::Validation + function: 'verify.ValidateRFC3339Date' + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + validation: + !ruby/object:Provider::Terraform::Validation + function: 'validation.IntBetween(0, 24)' # "An API may choose to allow the value "24:00:00" for scenarios like business closing time." + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be from 0 to + 59. + validation: + !ruby/object:Provider::Terraform::Validation + function: 'validation.IntBetween(0, 59)' + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. Must + normally be from 0 to 59. An API may allow + the value 60 if it allows leap-seconds. + validation: + !ruby/object:Provider::Terraform::Validation + function: 'validation.IntBetween(0, 60)' # "An API may allow the value 60 if it allows leap-seconds." + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in nanoseconds. Must be + from 0 to 999,999,999. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 999999999)' + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 to 9999, or + 0 to specify a date without a year. + validation: + !ruby/object:Provider::Terraform::Validation + function: 'validation.IntBetween(1, 9999)' + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to 12, or 0 + to specify a year without a month and day. + validation: + !ruby/object:Provider::Terraform::Validation + function: 'validation.IntBetween(0, 12)' + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to 31 and + valid for the year and month, or 0 to + specify a year by itself or a year and month + where the day isn't significant. + validation: + !ruby/object:Provider::Terraform::Validation + function: 'validation.IntBetween(0, 31)' + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: redactConfig + description: | + Redact a given value. For example, if used with an InfoTypeTransformation transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the output would be 'My phone number is '. + properties: [] # Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/projects.deidentifyTemplates#redactconfig + # The fields below are necessary to include the "redactConfig" transformation in the payload + # A side-effect is null values when the field is unused, see: https://github.com/hashicorp/terraform-provider-google/issues/13201 + send_empty_value: true + allow_empty_object: true + - !ruby/object:Api::Type::NestedObject + name: characterMaskConfig + description: | + Partially mask a string by replacing a given number of characters with a fixed character. Masking can start from the beginning or end of the string. This can be used on data of any type (numbers, longs, and so on) and when de-identifying structured data we'll attempt to preserve the original data's type. (This allows you to take a long like 123 and modify it to a string like **3). + properties: + - !ruby/object:Api::Type::String + name: 'maskingCharacter' + description: | + Character to use to mask the sensitive values—for example, * for an alphabetic string such as a name, or 0 for a numeric string + such as ZIP code or credit card number. This string must have a length of 1. If not supplied, this value defaults to * for + strings, and 0 for digits. + - !ruby/object:Api::Type::Integer + name: 'numberToMask' + description: | + Number of characters to mask. If not set, all matching chars will be masked. Skipped characters do not count towards this tally. + If number_to_mask is negative, this denotes inverse masking. Cloud DLP masks all but a number of characters. For example, suppose you have the following values: + - `masking_character` is * + - `number_to_mask` is -4 + - `reverse_order` is false + - `characters_to_ignore` includes - + - Input string is 1234-5678-9012-3456 + + The resulting de-identified string is ****-****-****-3456. Cloud DLP masks all but the last four characters. If reverseOrder is true, all but the first four characters are masked as 1234-****-****-****. + - !ruby/object:Api::Type::Boolean + name: 'reverseOrder' + description: | + Mask characters in reverse order. For example, if masking_character is 0, number_to_mask is 14, and reverse_order is `false`, then the + input string `1234-5678-9012-3456` is masked as `00000000000000-3456`. + - !ruby/object:Api::Type::Array + name: 'charactersToIgnore' + description: | + Characters to skip when doing de-identification of a value. These will be left alone and skipped. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'charactersToSkip' + description: | + Characters to not transform when masking. + - !ruby/object:Api::Type::Enum + name: 'commonCharactersToIgnore' + description: | + Common characters to not transform when masking. Useful to avoid removing punctuation. + values: + - :NUMERIC + - :ALPHA_UPPER_CASE + - :ALPHA_LOWER_CASE + - :PUNCTUATION + - :WHITESPACE + - !ruby/object:Api::Type::NestedObject + name: 'cryptoReplaceFfxFpeConfig' + description: | + Replaces an identifier with a surrogate using Format Preserving Encryption (FPE) with the FFX mode of operation; however when used in the `content.reidentify` API method, it serves the opposite function by reversing the surrogate back into the original identifier. The identifier must be encoded as ASCII. For a given crypto key and context, the same identifier will be replaced with the same surrogate. Identifiers must be at least two characters long. In the case that the identifier is the empty string, it will be skipped. See [https://cloud.google.com/dlp/docs/pseudonymization](https://cloud.google.com/dlp/docs/pseudonymization) to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases which do not require preserving the input alphabet space and size, plus warrant referential integrity. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'cryptoKey' + description: | + The key used by the encryption algorithm. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'transient' + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. + properties: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: 'unwrapped' + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. + properties: + - !ruby/object:Api::Type::String + name: 'key' + required: true + description: | + A 128/192/256 bit key. + + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: 'kmsWrapped' + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: 'wrappedKey' + required: true + description: | + The wrapped data crypto key. + + A base64-encoded string. + - !ruby/object:Api::Type::String + name: 'cryptoKeyName' + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: 'context' + description: | + The 'tweak', a context may be used for higher security since the same identifier in two different contexts won't be given the same surrogate. If the context is not set, a default tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an `InfoTypeTransformation` is applied to both structured and non-structured `ContentItem`s. Currently, the referenced field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big endian byte order such that: + + * a 64 bit integer is encoded followed by a single byte of value 1 + * a string is encoded in UTF-8 format followed by a single byte of value 2 + properties: + - !ruby/object:Api::Type::String + name: 'name' + description: | + Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: 'surrogateInfoType' + description: | + The custom infoType to annotate the surrogate with. This annotation will be applied to the surrogate by prefixing it with the name of the custom infoType followed by the number of characters comprising the surrogate. The following scheme defines the format: info\_type\_name(surrogate\_character\_count):surrogate + + For example, if the name of custom infoType is 'MY\_TOKEN\_INFO\_TYPE' and the surrogate is 'abc', the full replacement value will be: 'MY\_TOKEN\_INFO\_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting content using the custom infoType [`SurrogateType`](https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype). This facilitates reversal of the surrogate when it occurs in free text. + + In order for inspection to work properly, the name of this infoType must not occur naturally anywhere in your data; otherwise, inspection may find a surrogate that does not correspond to an actual identifier. Therefore, choose your custom infoType name carefully after considering what your data looks like. One way to select a name that has a high chance of yielding reliable detection is to include one or more unicode characters that are highly improbable to exist in your data. For example, assuming your data is entered from a regular ASCII keyboard, the symbol with the hex code point 29DD might be used like so: ⧝MY\_TOKEN\_TYPE + properties: + - !ruby/object:Api::Type::String + name: 'name' + description: | + Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at [https://cloud.google.com/dlp/docs/infotypes-reference](https://cloud.google.com/dlp/docs/infotypes-reference) when specifying a built-in type. When sending Cloud DLP results to Data Catalog, infoType names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. + - !ruby/object:Api::Type::String + name: 'version' + description: | + Optional version name for this InfoType. + - !ruby/object:Api::Type::Enum + name: 'commonAlphabet' + description: | + Common alphabets. + values: + - :FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED # Unused. + - :NUMERIC # [0-9] (radix of 10) + - :HEXADECIMAL # [0-9A-F] (radix of 16) + - :UPPER_CASE_ALPHA_NUMERIC # [0-9A-Z] (radix of 36) + - :ALPHA_NUMERIC # [0-9A-Za-z] (radix of 62) + - !ruby/object:Api::Type::String + name: 'customAlphabet' + description: | + This is supported by mapping these to the alphanumeric characters that the FFX mode natively supports. This happens before/after encryption/decryption. Each character listed must appear only once. Number of characters must be in the range \[2, 95\]. This must be encoded as ASCII. The order of characters does not matter. The full list of allowed characters is: + + ``0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/`` + - !ruby/object:Api::Type::Integer + name: 'radix' + description: | + The native way to select the alphabet. Must be in the range \[2, 95\]. + - !ruby/object:Api::Type::NestedObject + name: fixedSizeBucketingConfig + description: | + Buckets values based on fixed size ranges. The Bucketing transformation can provide all of this functionality, but requires more configuration. This message is provided as a convenience to the user for simple bucketing strategies. + + The transformed value will be a hyphenated string of {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and upper_bound = 20, all values that are within this bucket will be replaced with "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being transformed, we will first attempt converting the type of the data to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. + properties: + - !ruby/object:Api::Type::NestedObject + name: lowerBound + description: | + Lower bound value of buckets. + All values less than lower_bound are grouped together into a single bucket; for example if lower_bound = 10, then all values less than 10 are replaced with the value "-10". + The `lower_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be from 0 to + 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. Must + normally be from 0 to 59. An API may allow + the value 60 if it allows leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in nanoseconds. Must be + from 0 to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 to 9999, or + 0 to specify a date without a year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to 12, or 0 + to specify a year without a month and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to 31 and + valid for the year and month, or 0 to + specify a year by itself or a year and month + where the day isn't significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + required: true + - !ruby/object:Api::Type::NestedObject + name: upperBound + description: | + Upper bound value of buckets. + All values greater than upper_bound are grouped together into a single bucket; for example if upper_bound = 89, then all values greater than 89 are replaced with the value "89+". + The `upper_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. + required: true + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be from 0 to + 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. Must + normally be from 0 to 59. An API may allow + the value 60 if it allows leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in nanoseconds. Must be + from 0 to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 to 9999, or + 0 to specify a date without a year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to 12, or 0 + to specify a year without a month and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to 31 and + valid for the year and month, or 0 to + specify a year by itself or a year and month + where the day isn't significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::Double + name: bucketSize + description: | + Size of each bucket (except for minimum and maximum buckets). + So if lower_bound = 10, upper_bound = 89, and bucketSize = 10, then the following buckets would be used: -10, 10-20, 20-30, 30-40, 40-50, 50-60, 60-70, 70-80, 80-89, 89+. + Precision up to 2 decimals works. + required: true + - !ruby/object:Api::Type::NestedObject + name: bucketingConfig + description: | + Generalization function that buckets values based on ranges. The ranges and replacement values are dynamically provided by the user for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> HIGH + This can be used on data of type: number, long, string, timestamp. + If the provided value type differs from the type of data being transformed, we will first attempt converting the type of the data to be transformed to match the type of the bound before comparing. + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. + properties: + - !ruby/object:Api::Type::Array + name: buckets + description: | + Set of buckets. Ranges must be non-overlapping. + Bucket is represented as a range, along with replacement values. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::NestedObject + name: min + description: | + Lower bound of the range, inclusive. Type should be the same as max if used. + The `min` block must only contain one argument. See the `bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be from 0 + to 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. Must + normally be from 0 to 59. An API may + allow the value 60 if it allows + leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in nanoseconds. + Must be from 0 to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar + date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 to + 9999, or 0 to specify a date without a + year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to 12, + or 0 to specify a year without a month + and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to 31 + and valid for the year and month, or 0 + to specify a year by itself or a year + and month where the day isn't + significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: max + description: | + Upper bound of the range, exclusive; type must match min. + The `max` block must only contain one argument. See the `bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be from 0 + to 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. Must + normally be from 0 to 59. An API may + allow the value 60 if it allows + leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in nanoseconds. + Must be from 0 to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar + date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 to + 9999, or 0 to specify a date without a + year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to 12, + or 0 to specify a year without a month + and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to 31 + and valid for the year and month, or 0 + to specify a year by itself or a year + and month where the day isn't + significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: replacementValue + required: true + description: | + Replacement value for this bucket. + The `replacement_value` block must only contain one argument. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must be from 0 + to 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the time. Must + normally be from 0 to 59. An API may + allow the value 60 if it allows + leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in nanoseconds. + Must be from 0 to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar + date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be from 1 to + 9999, or 0 to specify a date without a + year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be from 1 to 12, + or 0 to specify a year without a month + and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from 1 to 31 + and valid for the year and month, or 0 + to specify a year by itself or a year + and month where the day isn't + significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: timePartConfig + description: + For use with Date, Timestamp, and TimeOfDay, extract or + preserve a portion of the value. + properties: + - !ruby/object:Api::Type::Enum + name: partToExtract + description: The part of the time to keep. + values: + - :YEAR # [0-9999] + - :MONTH # [1-12] + - :DAY_OF_MONTH # [1-31] + - :DAY_OF_WEEK # [1-7] + - :WEEK_OF_YEAR # [1-53] + - :HOUR_OF_DAY # [0-23] + - !ruby/object:Api::Type::NestedObject + name: cryptoHashConfig + description: | + Pseudonymization method that generates surrogates via cryptographic hashing. Uses SHA-256. The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. + See https://cloud.google.com/dlp/docs/pseudonymization to learn more. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'cryptoKey' + description: | + The key used by the encryption function. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'transient' + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. + properties: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: 'unwrapped' + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. + properties: + - !ruby/object:Api::Type::String + name: 'key' + required: true + description: | + A 128/192/256 bit key. + + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: 'kmsWrapped' + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: 'wrappedKey' + required: true + description: | + The wrapped data crypto key. + + A base64-encoded string. + - !ruby/object:Api::Type::String + name: 'cryptoKeyName' + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: dateShiftConfig + description: | + Shifts dates by random number of days, with option to be consistent for the same context. See https://cloud.google.com/dlp/docs/concepts-date-shifting to learn more. + properties: + - !ruby/object:Api::Type::Integer + name: upperBoundDays + required: true + description: | + Range of shift in days. Actual shift will be selected at random within this range (inclusive ends). Negative means shift to earlier in time. Must not be more than 365250 days (1000 years) each direction. + + For example, 3 means shift date to at most 3 days into the future. + - !ruby/object:Api::Type::Integer + name: lowerBoundDays + required: true + description: | + For example, -5 means shift date to at most 5 days back in the past. + - !ruby/object:Api::Type::NestedObject + name: 'context' + description: | + Points to the field that contains the context, for example, an entity id. + If set, must also set cryptoKey. If set, shift will be consistent for the given context. + properties: + - !ruby/object:Api::Type::String + name: 'name' + description: | + Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: 'cryptoKey' + description: | + Causes the shift to be computed based on this key and the context. This results in the same shift for the same context and cryptoKey. If set, must also set context. Can only be applied to table items. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'transient' + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. + properties: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: 'unwrapped' + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. + properties: + - !ruby/object:Api::Type::String + name: 'key' + required: true + description: | + A 128/192/256 bit key. + + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: 'kmsWrapped' + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: 'wrappedKey' + required: true + description: | + The wrapped data crypto key. + + A base64-encoded string. + - !ruby/object:Api::Type::String + name: 'cryptoKeyName' + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: cryptoDeterministicConfig + description: | + Pseudonymization method that generates deterministic encryption for the given input. Outputs a base64 encoded representation of the encrypted output. Uses AES-SIV based on the RFC [https://tools.ietf.org/html/rfc5297](https://tools.ietf.org/html/rfc5297). + properties: + - !ruby/object:Api::Type::NestedObject + name: 'cryptoKey' + description: | + The key used by the encryption function. For deterministic encryption using AES-SIV, the provided key is internally expanded to 64 bytes prior to use. + properties: + - !ruby/object:Api::Type::NestedObject + name: 'transient' + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. + properties: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: 'unwrapped' + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. + properties: + - !ruby/object:Api::Type::String + name: 'key' + required: true + description: | + A 128/192/256 bit key. + + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: 'kmsWrapped' + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: 'wrappedKey' + required: true + description: | + The wrapped data crypto key. + + A base64-encoded string. + - !ruby/object:Api::Type::String + name: 'cryptoKeyName' + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: 'surrogateInfoType' + description: | + The custom info type to annotate the surrogate with. This annotation will be applied to the surrogate by prefixing it with the name of the custom info type followed by the number of characters comprising the surrogate. The following scheme defines the format: {info type name}({surrogate character count}):{surrogate} + + For example, if the name of custom info type is 'MY\_TOKEN\_INFO\_TYPE' and the surrogate is 'abc', the full replacement value will be: 'MY\_TOKEN\_INFO\_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting content using the custom info type 'Surrogate'. This facilitates reversal of the surrogate when it occurs in free text. + + Note: For record transformations where the entire cell in a table is being transformed, surrogates are not mandatory. Surrogates are used to denote the location of the token and are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this info type must not occur naturally anywhere in your data; otherwise, inspection may either + + * reverse a surrogate that does not correspond to an actual identifier + * be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after considering what your data looks like. One way to select a name that has a high chance of yielding reliable detection is to include one or more unicode characters that are highly improbable to exist in your data. For example, assuming your data is entered from a regular ASCII keyboard, the symbol with the hex code point 29DD might be used like so: ⧝MY\_TOKEN\_TYPE. + properties: + - !ruby/object:Api::Type::String + name: 'name' + description: | + Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at [https://cloud.google.com/dlp/docs/infotypes-reference](https://cloud.google.com/dlp/docs/infotypes-reference) when specifying a built-in type. When sending Cloud DLP results to Data Catalog, infoType names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. + - !ruby/object:Api::Type::String + name: 'version' + description: | + Optional version name for this InfoType. + - !ruby/object:Api::Type::NestedObject + name: 'context' + description: | + A context may be used for higher security and maintaining referential integrity such that the same identifier in two different contexts will be given a distinct surrogate. The context is appended to plaintext value being encrypted. On decryption the provided context is validated against the value used during encryption. If a context was provided during encryption, same context must be provided during decryption as well. + + If the context is not set, plaintext would be used as is for encryption. If the context is set but: + + 1. there is no record present when transforming a given value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an InfoTypeTransformation is applied to both structured and unstructured ContentItems. + properties: + - !ruby/object:Api::Type::String + name: 'name' description: | Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: replaceDictionaryConfig + description: + Replace with a value randomly drawn (with replacement) + from a dictionary. + properties: - !ruby/object:Api::Type::NestedObject - name: 'cryptoKey' + name: wordList description: | - Causes the shift to be computed based on this key and the context. This results in the same shift for the same context and cryptoKey. If set, must also set context. Can only be applied to table items. + A list of words to select from for random replacement. The [limits](https://cloud.google.com/dlp/limits) page contains details about the size limits of dictionaries. properties: - - !ruby/object:Api::Type::NestedObject - name: 'transient' + - !ruby/object:Api::Type::Array + name: words + required: true description: | - Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. + Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits. + item_type: Api::Type::String + # infoTypeTransformations inside the recordTransformations + - !ruby/object:Api::Type::NestedObject + name: infoTypeTransformations + description: | + Treat the contents of the field as free text, and selectively transform content that matches an InfoType. + Only one of `primitive_transformation` or `info_type_transformations` must be specified. + properties: + - !ruby/object:Api::Type::Array + name: transformations + required: true + description: | + Transformation for each infoType. Cannot specify more than one for a given infoType. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Array + name: infoTypes + description: | + InfoTypes to apply the transformation to. Leaving this empty will apply the transformation to apply to + all findings that correspond to infoTypes that were requested in InspectConfig. + item_type: !ruby/object:Api::Type::NestedObject properties: - !ruby/object:Api::Type::String - name: 'name' + name: name required: true description: | - Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). - - !ruby/object:Api::Type::NestedObject - name: 'unwrapped' - description: | - Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. - properties: + Name of the information type. - !ruby/object:Api::Type::String - name: 'key' - required: true + name: version description: | - A 128/192/256 bit key. + Version name for this InfoType. + - !ruby/object:Api::Type::NestedObject + name: primitiveTransformation + required: true + description: | + Apply the transformation to the entire field. + The `primitive_transformation` block must only contain one argument, corresponding to the type of transformation. + properties: + - !ruby/object:Api::Type::NestedObject + name: replaceConfig + description: | + Replace each input value with a given value. + properties: + - !ruby/object:Api::Type::NestedObject + name: newValue + required: true + description: | + Replace each input value with a given value. + The `new_value` block must only contain one argument. For example when replacing the contents of a string-type field, only `string_value` should be set. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::Boolean + name: booleanValue + description: A boolean value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. + Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + validation: + !ruby/object:Provider::Terraform::Validation + function: 'verify.ValidateRFC3339Date' + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 24)' # "An API may choose to allow the value "24:00:00" for scenarios like business closing time." + - !ruby/object:Api::Type::Integer + name: minutes + description: | + Minutes of hour of day. Must be from 0 to 59. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 59)' + - !ruby/object:Api::Type::Integer + name: seconds + description: | + Seconds of minutes of the time. Must normally be from 0 to 59. An API may allow the value 60 if it allows leap-seconds. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 60)' # "An API may allow the value 60 if it allows leap-seconds." + - !ruby/object:Api::Type::Integer + name: nanos + description: | + Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, + 999999999)' + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial calendar + date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: | + Year of the date. Must be from 1 to 9999, or 0 to specify a date without a year. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(1, 9999)' + - !ruby/object:Api::Type::Integer + name: month + description: | + Month of a year. Must be from 1 to 12, or 0 to specify a year without a month and day. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 12)' + - !ruby/object:Api::Type::Integer + name: day + description: | + Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 to specify a year by itself or a year and month where the day isn't significant. + validation: + !ruby/object:Provider::Terraform::Validation + function: + 'validation.IntBetween(0, 31)' + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: + Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: redactConfig + description: | + Redact a given value. For example, if used with an InfoTypeTransformation transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the output would be 'My phone number is '. + properties: [] # Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/projects.deidentifyTemplates#redactconfig + # The fields below are necessary to include the "redactConfig" transformation in the payload + # A side-effect is null values when the field is unused, see: https://github.com/hashicorp/terraform-provider-google/issues/13201 + send_empty_value: true + allow_empty_object: true + - !ruby/object:Api::Type::NestedObject + name: characterMaskConfig + description: | + Partially mask a string by replacing a given number of characters with a fixed character. Masking can start from the beginning or end of the string. This can be used on data of any type (numbers, longs, and so on) and when de-identifying structured data we'll attempt to preserve the original data's type. (This allows you to take a long like 123 and modify it to a string like **3). + properties: + - !ruby/object:Api::Type::String + name: maskingCharacter + description: | + Character to use to mask the sensitive values—for example, * for an alphabetic string such as a name, or 0 for a numeric string + such as ZIP code or credit card number. This string must have a length of 1. If not supplied, this value defaults to * for + strings, and 0 for digits. + - !ruby/object:Api::Type::Integer + name: numberToMask + description: | + Number of characters to mask. If not set, all matching chars will be masked. Skipped characters do not count towards this tally. + If number_to_mask is negative, this denotes inverse masking. Cloud DLP masks all but a number of characters. For example, suppose you have the following values: + - `masking_character` is * + - `number_to_mask` is -4 + - `reverse_order` is false + - `characters_to_ignore` includes - + - Input string is 1234-5678-9012-3456 + + The resulting de-identified string is ****-****-****-3456. Cloud DLP masks all but the last four characters. If reverseOrder is true, all but the first four characters are masked as 1234-****-****-****. + - !ruby/object:Api::Type::Boolean + name: reverseOrder + description: | + Mask characters in reverse order. For example, if masking_character is 0, number_to_mask is 14, and reverse_order is `false`, then the + input string `1234-5678-9012-3456` is masked as `00000000000000-3456`. + - !ruby/object:Api::Type::Array + name: charactersToIgnore + description: | + Characters to skip when doing de-identification of a value. These will be left alone and skipped. + item_type: + !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: charactersToSkip + description: | + Characters to not transform when masking. Only one of this or `common_characters_to_ignore` must be specified. + - !ruby/object:Api::Type::Enum + name: commonCharactersToIgnore + description: | + Common characters to not transform when masking. Useful to avoid removing punctuation. Only one of this or `characters_to_skip` must be specified. + values: + - :NUMERIC + - :ALPHA_UPPER_CASE + - :ALPHA_LOWER_CASE + - :PUNCTUATION + - :WHITESPACE + - !ruby/object:Api::Type::NestedObject + name: cryptoReplaceFfxFpeConfig + description: | + Replaces an identifier with a surrogate using Format Preserving Encryption (FPE) with the FFX mode of operation; however when used in the `content.reidentify` API method, it serves the opposite function by reversing the surrogate back into the original identifier. The identifier must be encoded as ASCII. For a given crypto key and context, the same identifier will be replaced with the same surrogate. Identifiers must be at least two characters long. In the case that the identifier is the empty string, it will be skipped. See [https://cloud.google.com/dlp/docs/pseudonymization](https://cloud.google.com/dlp/docs/pseudonymization) to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases which do not require preserving the input alphabet space and size, plus warrant referential integrity. + properties: + - !ruby/object:Api::Type::NestedObject + name: cryptoKey + required: true + description: | + The key used by the encryption algorithm. + properties: + - !ruby/object:Api::Type::NestedObject + name: transient + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. Only one of this, `unwrapped` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: unwrapped + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. Only one of this, `transient` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: key + required: true + sensitive: true + description: | + A 128/192/256 bit key. + + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: kmsWrapped + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). Only one of this, `transient` or `unwrapped` must be specified. + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: wrappedKey + required: true + description: | + The wrapped data crypto key. + + A base64-encoded string. + - !ruby/object:Api::Type::String + name: cryptoKeyName + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: context + description: | + The 'tweak', a context may be used for higher security since the same identifier in two different contexts won't be given the same surrogate. If the context is not set, a default tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an `InfoTypeTransformation` is applied to both structured and non-structured `ContentItem`s. Currently, the referenced field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big endian byte order such that: + + * a 64 bit integer is encoded followed by a single byte of value 1 + * a string is encoded in UTF-8 format followed by a single byte of value 2 + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: surrogateInfoType + description: | + The custom infoType to annotate the surrogate with. This annotation will be applied to the surrogate by prefixing it with the name of the custom infoType followed by the number of characters comprising the surrogate. The following scheme defines the format: info\_type\_name(surrogate\_character\_count):surrogate + + For example, if the name of custom infoType is 'MY\_TOKEN\_INFO\_TYPE' and the surrogate is 'abc', the full replacement value will be: 'MY\_TOKEN\_INFO\_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting content using the custom infoType [`SurrogateType`](https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype). This facilitates reversal of the surrogate when it occurs in free text. + + In order for inspection to work properly, the name of this infoType must not occur naturally anywhere in your data; otherwise, inspection may find a surrogate that does not correspond to an actual identifier. Therefore, choose your custom infoType name carefully after considering what your data looks like. One way to select a name that has a high chance of yielding reliable detection is to include one or more unicode characters that are highly improbable to exist in your data. For example, assuming your data is entered from a regular ASCII keyboard, the symbol with the hex code point 29DD might be used like so: ⧝MY\_TOKEN\_TYPE + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at [https://cloud.google.com/dlp/docs/infotypes-reference](https://cloud.google.com/dlp/docs/infotypes-reference) when specifying a built-in type. When sending Cloud DLP results to Data Catalog, infoType names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. + - !ruby/object:Api::Type::String + name: version + description: | + Optional version name for this InfoType. + - !ruby/object:Api::Type::Enum + name: commonAlphabet + description: | + Common alphabets. Only one of this, `custom_alphabet` or `radix` must be specified. + values: + - :NUMERIC # [0-9] (radix of 10) + - :HEXADECIMAL # [0-9A-F] (radix of 16) + - :UPPER_CASE_ALPHA_NUMERIC # [0-9A-Z] (radix of 36) + - :ALPHA_NUMERIC # [0-9A-Za-z] (radix of 62) + - !ruby/object:Api::Type::String + name: customAlphabet + description: | + This is supported by mapping these to the alphanumeric characters that the FFX mode natively supports. This happens before/after encryption/decryption. Each character listed must appear only once. Number of characters must be in the range \[2, 95\]. This must be encoded as ASCII. The order of characters does not matter. The full list of allowed characters is: + + ``0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/``. Only one of this, `common_alphabet` or `radix` must be specified. + - !ruby/object:Api::Type::Integer + name: radix + description: | + The native way to select the alphabet. Must be in the range \[2, 95\]. Only one of this, `custom_alphabet` or `common_alphabet` must be specified. + - !ruby/object:Api::Type::NestedObject + name: fixedSizeBucketingConfig + description: | + Buckets values based on fixed size ranges. The Bucketing transformation can provide all of this functionality, but requires more configuration. This message is provided as a convenience to the user for simple bucketing strategies. + + The transformed value will be a hyphenated string of {lower_bound}-{upper_bound}. For example, if lower_bound = 10 and upper_bound = 20, all values that are within this bucket will be replaced with "10-20". - A base64-encoded string. - - !ruby/object:Api::Type::NestedObject - name: 'kmsWrapped' - description: | - KMS wrapped key. - Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt - For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). - Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). - properties: - - !ruby/object:Api::Type::String - name: 'wrappedKey' - required: true - description: | - The wrapped data crypto key. + This can be used on data of type: double, long. - A base64-encoded string. - - !ruby/object:Api::Type::String - name: 'cryptoKeyName' - required: true - description: | - The resource name of the KMS CryptoKey to use for unwrapping. - - !ruby/object:Api::Type::NestedObject - name: cryptoDeterministicConfig - description: | - Pseudonymization method that generates deterministic encryption for the given input. Outputs a base64 encoded representation of the encrypted output. Uses AES-SIV based on the RFC [https://tools.ietf.org/html/rfc5297](https://tools.ietf.org/html/rfc5297). - properties: - - !ruby/object:Api::Type::NestedObject - name: 'cryptoKey' - description: | - The key used by the encryption function. For deterministic encryption using AES-SIV, the provided key is internally expanded to 64 bytes prior to use. - properties: - - !ruby/object:Api::Type::NestedObject - name: 'transient' - description: | - Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. - properties: - - !ruby/object:Api::Type::String - name: 'name' - required: true - description: | - Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). - - !ruby/object:Api::Type::NestedObject - name: 'unwrapped' - description: | - Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. - properties: - - !ruby/object:Api::Type::String - name: 'key' - required: true - description: | - A 128/192/256 bit key. + If the bound Value type differs from the type of data being transformed, we will first attempt converting the type of the data to be transformed to match the type of the bound before comparing. - A base64-encoded string. - - !ruby/object:Api::Type::NestedObject - name: 'kmsWrapped' - description: | - KMS wrapped key. - Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt - For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). - Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). - properties: - - !ruby/object:Api::Type::String - name: 'wrappedKey' - required: true - description: | - The wrapped data crypto key. + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. + properties: + - !ruby/object:Api::Type::NestedObject + name: lowerBound + required: true + description: | + Lower bound value of buckets. + All values less than lower_bound are grouped together into a single bucket; for example if lower_bound = 10, then all values less than 10 are replaced with the value "-10". + The `lower_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::NestedObject + name: upperBound + required: true + description: | + Upper bound value of buckets. + All values greater than upper_bound are grouped together into a single bucket; for example if upper_bound = 89, then all values greater than 89 are replaced with the value "89+". + The `upper_bound` block must only contain one argument. See the `fixed_size_bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::Double + name: bucketSize + required: true + description: | + Size of each bucket (except for minimum and maximum buckets). + So if lower_bound = 10, upper_bound = 89, and bucketSize = 10, then the following buckets would be used: -10, 10-20, 20-30, 30-40, 40-50, 50-60, 60-70, 70-80, 80-89, 89+. + Precision up to 2 decimals works. + - !ruby/object:Api::Type::NestedObject + name: bucketingConfig + description: | + Generalization function that buckets values based on ranges. The ranges and replacement values are dynamically provided by the user for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> HIGH + This can be used on data of type: number, long, string, timestamp. + If the provided value type differs from the type of data being transformed, we will first attempt converting the type of the data to be transformed to match the type of the bound before comparing. + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. + properties: + - !ruby/object:Api::Type::Array + name: buckets + required: true + description: | + Set of buckets. Ranges must be non-overlapping. + Bucket is represented as a range, along with replacement values. + item_type: + !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::NestedObject + name: min + description: | + Lower bound of the range, inclusive. Type should be the same as max if used. + The `min` block must only contain one argument. See the `bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: + Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must + be from 0 to 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the + time. Must normally be from + 0 to 59. An API may allow + the value 60 if it allows + leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in + nanoseconds. Must be from 0 + to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial + calendar date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be + from 1 to 9999, or 0 to + specify a date without a + year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be + from 1 to 12, or 0 to + specify a year without a + month and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from + 1 to 31 and valid for the + year and month, or 0 to + specify a year by itself or + a year and month where the + day isn't significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: + Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: max + description: | + Upper bound of the range, exclusive; type must match min. + The `max` block must only contain one argument. See the `bucketing_config` block description for more information about choosing a data type. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: + Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must + be from 0 to 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the + time. Must normally be from + 0 to 59. An API may allow + the value 60 if it allows + leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in + nanoseconds. Must be from 0 + to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial + calendar date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be + from 1 to 9999, or 0 to + specify a date without a + year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be + from 1 to 12, or 0 to + specify a year without a + month and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from + 1 to 31 and valid for the + year and month, or 0 to + specify a year by itself or + a year and month where the + day isn't significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: + Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: replacementValue + required: true + description: | + Replacement value for this bucket. + The `replacement_value` block must only contain one argument. + properties: + - !ruby/object:Api::Type::String + name: integerValue + description: + An integer value (int64 format) + - !ruby/object:Api::Type::Double + name: floatValue + description: A float value. + - !ruby/object:Api::Type::String + name: stringValue + description: A string value. + - !ruby/object:Api::Type::String + name: timestampValue + description: | + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + - !ruby/object:Api::Type::NestedObject + name: timeValue + description: + Represents a time of day. + properties: + - !ruby/object:Api::Type::Integer + name: hours + description: | + Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time. + - !ruby/object:Api::Type::Integer + name: minutes + description: + Minutes of hour of day. Must + be from 0 to 59. + - !ruby/object:Api::Type::Integer + name: seconds + description: + Seconds of minutes of the + time. Must normally be from + 0 to 59. An API may allow + the value 60 if it allows + leap-seconds. + - !ruby/object:Api::Type::Integer + name: nanos + description: + Fractions of seconds in + nanoseconds. Must be from 0 + to 999,999,999. + - !ruby/object:Api::Type::NestedObject + name: dateValue + description: + Represents a whole or partial + calendar date. + properties: + - !ruby/object:Api::Type::Integer + name: year + description: + Year of the date. Must be + from 1 to 9999, or 0 to + specify a date without a + year. + - !ruby/object:Api::Type::Integer + name: month + description: + Month of a year. Must be + from 1 to 12, or 0 to + specify a year without a + month and day. + - !ruby/object:Api::Type::Integer + name: day + description: + Day of a month. Must be from + 1 to 31 and valid for the + year and month, or 0 to + specify a year by itself or + a year and month where the + day isn't significant. + - !ruby/object:Api::Type::Enum + name: dayOfWeekValue + description: + Represents a day of the week. + values: + - :MONDAY + - :TUESDAY + - :WEDNESDAY + - :THURSDAY + - :FRIDAY + - :SATURDAY + - :SUNDAY + - !ruby/object:Api::Type::NestedObject + name: replaceWithInfoTypeConfig + description: | + Replace each matching finding with the name of the info type. + properties: [] # Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/projects.deidentifyTemplates#DeidentifyTemplate.ReplaceWithInfoTypeConfig + # The fields below are necessary to include the "replaceWithInfoTypeConfig" transformation in the payload + # A side-effect is null values when the field is unused, see: https://github.com/hashicorp/terraform-provider-google/issues/13201 + send_empty_value: true + allow_empty_object: true + - !ruby/object:Api::Type::NestedObject + name: timePartConfig + description: + For use with Date, Timestamp, and TimeOfDay, + extract or preserve a portion of the value. + properties: + - !ruby/object:Api::Type::Enum + name: partToExtract + required: true + description: The part of the time to keep. + values: + - :YEAR # [0-9999] + - :MONTH # [1-12] + - :DAY_OF_MONTH # [1-31] + - :DAY_OF_WEEK # [1-7] + - :WEEK_OF_YEAR # [1-53] + - :HOUR_OF_DAY # [0-23] + - !ruby/object:Api::Type::NestedObject + name: cryptoHashConfig + description: | + Pseudonymization method that generates surrogates via cryptographic hashing. Uses SHA-256. The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. + See https://cloud.google.com/dlp/docs/pseudonymization to learn more. + properties: + - !ruby/object:Api::Type::NestedObject + name: cryptoKey + required: true + description: | + The key used by the encryption function. + properties: + - !ruby/object:Api::Type::NestedObject + name: transient + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. Only one of this, `unwrapped` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: unwrapped + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. Only one of this, `transient` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: key + required: true + sensitive: true + description: | + A 128/192/256 bit key. - A base64-encoded string. - - !ruby/object:Api::Type::String - name: 'cryptoKeyName' - required: true - description: | - The resource name of the KMS CryptoKey to use for unwrapping. - - !ruby/object:Api::Type::NestedObject - name: 'surrogateInfoType' - description: | - The custom info type to annotate the surrogate with. This annotation will be applied to the surrogate by prefixing it with the name of the custom info type followed by the number of characters comprising the surrogate. The following scheme defines the format: {info type name}({surrogate character count}):{surrogate} + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: kmsWrapped + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). Only one of this, `transient` or `unwrapped` must be specified. + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: wrappedKey + required: true + description: | + The wrapped data crypto key. - For example, if the name of custom info type is 'MY\_TOKEN\_INFO\_TYPE' and the surrogate is 'abc', the full replacement value will be: 'MY\_TOKEN\_INFO\_TYPE(3):abc' + A base64-encoded string. + - !ruby/object:Api::Type::String + name: cryptoKeyName + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: dateShiftConfig + description: | + Shifts dates by random number of days, with option to be consistent for the same context. See https://cloud.google.com/dlp/docs/concepts-date-shifting to learn more. + properties: + - !ruby/object:Api::Type::Integer + name: upperBoundDays + required: true + description: | + Range of shift in days. Actual shift will be selected at random within this range (inclusive ends). Negative means shift to earlier in time. Must not be more than 365250 days (1000 years) each direction. - This annotation identifies the surrogate when inspecting content using the custom info type 'Surrogate'. This facilitates reversal of the surrogate when it occurs in free text. + For example, 3 means shift date to at most 3 days into the future. + - !ruby/object:Api::Type::Integer + name: lowerBoundDays + required: true + description: | + For example, -5 means shift date to at most 5 days back in the past. + - !ruby/object:Api::Type::NestedObject + name: context + description: | + Points to the field that contains the context, for example, an entity id. + If set, must also set cryptoKey. If set, shift will be consistent for the given context. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: cryptoKey + description: | + Causes the shift to be computed based on this key and the context. This results in the same shift for the same context and cryptoKey. If set, must also set context. Can only be applied to table items. + properties: + - !ruby/object:Api::Type::NestedObject + name: transient + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. Only one of this, `unwrapped` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: unwrapped + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. Only one of this, `transient` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: key + required: true + sensitive: true + description: | + A 128/192/256 bit key. - Note: For record transformations where the entire cell in a table is being transformed, surrogates are not mandatory. Surrogates are used to denote the location of the token and are necessary for re-identification in free form text. + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: kmsWrapped + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). Only one of this, `transient` or `unwrapped` must be specified. + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: wrappedKey + required: true + description: | + The wrapped data crypto key. - In order for inspection to work properly, the name of this info type must not occur naturally anywhere in your data; otherwise, inspection may either + A base64-encoded string. + - !ruby/object:Api::Type::String + name: cryptoKeyName + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: cryptoDeterministicConfig + description: | + Pseudonymization method that generates deterministic encryption for the given input. Outputs a base64 encoded representation of the encrypted output. Uses AES-SIV based on the RFC [https://tools.ietf.org/html/rfc5297](https://tools.ietf.org/html/rfc5297). + properties: + - !ruby/object:Api::Type::NestedObject + name: cryptoKey + required: true + description: | + The key used by the encryption function. For deterministic encryption using AES-SIV, the provided key is internally expanded to 64 bytes prior to use. + properties: + - !ruby/object:Api::Type::NestedObject + name: transient + description: | + Transient crypto key. Use this to have a random data crypto key generated. It will be discarded after the request finishes. Only one of this, `unwrapped` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name of the key. This is an arbitrary string used to differentiate different keys. A unique key is generated per name: two separate `TransientCryptoKey` protos share the same generated key if their names are the same. When the data crypto key is generated, this name is not used in any way (repeating the api call will result in a different key being generated). + - !ruby/object:Api::Type::NestedObject + name: unwrapped + description: | + Unwrapped crypto key. Using raw keys is prone to security risks due to accidentally leaking the key. Choose another type of key if possible. Only one of this, `transient` or `kms_wrapped` must be specified. + properties: + - !ruby/object:Api::Type::String + name: key + required: true + sensitive: true + description: | + A 128/192/256 bit key. - * reverse a surrogate that does not correspond to an actual identifier - * be unable to parse the surrogate and result in an error + A base64-encoded string. + - !ruby/object:Api::Type::NestedObject + name: kmsWrapped + description: | + KMS wrapped key. + Include to use an existing data crypto key wrapped by KMS. The wrapped key must be a 128-, 192-, or 256-bit key. Authorization requires the following IAM permissions when sending a request to perform a crypto transformation using a KMS-wrapped crypto key: dlp.kms.encrypt + For more information, see [Creating a wrapped key](https://cloud.google.com/dlp/docs/create-wrapped-key). Only one of this, `transient` or `unwrapped` must be specified. + Note: When you use Cloud KMS for cryptographic operations, [charges apply](https://cloud.google.com/kms/pricing). + properties: + - !ruby/object:Api::Type::String + name: wrappedKey + required: true + description: | + The wrapped data crypto key. - Therefore, choose your custom info type name carefully after considering what your data looks like. One way to select a name that has a high chance of yielding reliable detection is to include one or more unicode characters that are highly improbable to exist in your data. For example, assuming your data is entered from a regular ASCII keyboard, the symbol with the hex code point 29DD might be used like so: ⧝MY\_TOKEN\_TYPE. - properties: - - !ruby/object:Api::Type::String - name: 'name' - description: | - Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at [https://cloud.google.com/dlp/docs/infotypes-reference](https://cloud.google.com/dlp/docs/infotypes-reference) when specifying a built-in type. When sending Cloud DLP results to Data Catalog, infoType names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. - - !ruby/object:Api::Type::String - name: 'version' - description: | - Optional version name for this InfoType. - - !ruby/object:Api::Type::NestedObject - name: 'context' - description: | - A context may be used for higher security and maintaining referential integrity such that the same identifier in two different contexts will be given a distinct surrogate. The context is appended to plaintext value being encrypted. On decryption the provided context is validated against the value used during encryption. If a context was provided during encryption, same context must be provided during decryption as well. + A base64-encoded string. + - !ruby/object:Api::Type::String + name: cryptoKeyName + required: true + description: | + The resource name of the KMS CryptoKey to use for unwrapping. + - !ruby/object:Api::Type::NestedObject + name: surrogateInfoType + required: true + description: | + The custom info type to annotate the surrogate with. This annotation will be applied to the surrogate by prefixing it with the name of the custom info type followed by the number of characters comprising the surrogate. The following scheme defines the format: {info type name}({surrogate character count}):{surrogate} - If the context is not set, plaintext would be used as is for encryption. If the context is set but: + For example, if the name of custom info type is 'MY\_TOKEN\_INFO\_TYPE' and the surrogate is 'abc', the full replacement value will be: 'MY\_TOKEN\_INFO\_TYPE(3):abc' - 1. there is no record present when transforming a given value or - 2. the field is not present when transforming a given value, + This annotation identifies the surrogate when inspecting content using the custom info type 'Surrogate'. This facilitates reversal of the surrogate when it occurs in free text. - plaintext would be used as is for encryption. + Note: For record transformations where the entire cell in a table is being transformed, surrogates are not mandatory. Surrogates are used to denote the location of the token and are necessary for re-identification in free form text. - Note that case (1) is expected when an InfoTypeTransformation is applied to both structured and unstructured ContentItems. - properties: - - !ruby/object:Api::Type::String - name: 'name' - description: | - Name describing the field. - - !ruby/object:Api::Type::NestedObject - name: replaceDictionaryConfig - description: - Replace with a value randomly drawn (with replacement) - from a dictionary. - properties: - - !ruby/object:Api::Type::NestedObject - name: wordList - description: | - A list of words to select from for random replacement. The [limits](https://cloud.google.com/dlp/limits) page contains details about the size limits of dictionaries. - properties: - - !ruby/object:Api::Type::Array - name: words - required: true - description: | - Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits. - item_type: Api::Type::String + In order for inspection to work properly, the name of this info type must not occur naturally anywhere in your data; otherwise, inspection may either + + * reverse a surrogate that does not correspond to an actual identifier + * be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after considering what your data looks like. One way to select a name that has a high chance of yielding reliable detection is to include one or more unicode characters that are highly improbable to exist in your data. For example, assuming your data is entered from a regular ASCII keyboard, the symbol with the hex code point 29DD might be used like so: ⧝MY\_TOKEN\_TYPE. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at [https://cloud.google.com/dlp/docs/infotypes-reference](https://cloud.google.com/dlp/docs/infotypes-reference) when specifying a built-in type. When sending Cloud DLP results to Data Catalog, infoType names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. + - !ruby/object:Api::Type::String + name: version + description: | + Optional version name for this InfoType. + - !ruby/object:Api::Type::NestedObject + name: context + description: | + A context may be used for higher security and maintaining referential integrity such that the same identifier in two different contexts will be given a distinct surrogate. The context is appended to plaintext value being encrypted. On decryption the provided context is validated against the value used during encryption. If a context was provided during encryption, same context must be provided during decryption as well. + + If the context is not set, plaintext would be used as is for encryption. If the context is set but: + + 1. there is no record present when transforming a given value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an InfoTypeTransformation is applied to both structured and unstructured ContentItems. + properties: + - !ruby/object:Api::Type::String + name: name + required: true + description: | + Name describing the field. + - !ruby/object:Api::Type::NestedObject + name: replaceDictionaryConfig + description: + Replace with a value randomly drawn (with + replacement) from a dictionary. + properties: + - !ruby/object:Api::Type::NestedObject + name: wordList + required: true + description: | + A list of words to select from for random replacement. The [limits](https://cloud.google.com/dlp/limits) page contains details about the size limits of dictionaries. + properties: + - !ruby/object:Api::Type::Array + name: words + required: true + description: | + Words or phrases defining the dictionary. The dictionary must contain at least one phrase and every phrase must contain at least 2 characters that are letters or digits. + item_type: Api::Type::String - !ruby/object:Api::Type::Array name: 'recordSuppressions' description: diff --git a/mmv1/products/dlp/InspectTemplate.yaml b/mmv1/products/dlp/InspectTemplate.yaml index 317139e77fee..a18d15740e46 100644 --- a/mmv1/products/dlp/InspectTemplate.yaml +++ b/mmv1/products/dlp/InspectTemplate.yaml @@ -45,8 +45,18 @@ examples: primary_resource_id: 'custom_type_surrogate' test_env_vars: project: :PROJECT_NAME + - !ruby/object:Provider::Terraform::Examples + name: 'dlp_inspect_template_with_template_id' + primary_resource_id: 'with_template_id' + vars: + name: 'my-template' + test_env_vars: + project: :PROJECT_NAME + skip_docs: true custom_code: !ruby/object:Provider::Terraform::CustomCode - encoder: templates/terraform/encoders/wrap_object.go.erb + decoder: templates/terraform/decoders/dlp_template_id.go.erb + encoder: templates/terraform/encoders/wrap_object_with_template_id.go.erb + update_encoder: templates/terraform/encoders/wrap_object.go.erb custom_import: templates/terraform/custom_import/dlp_import.go.erb parameters: - !ruby/object:Api::Type::String @@ -76,6 +86,15 @@ properties: name: 'displayName' description: | User set display name of the inspect template. + - !ruby/object:Api::Type::String + name: 'templateId' + description: | + The template id can contain uppercase and lowercase letters, numbers, and hyphens; + that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is + 100 characters. Can be empty to allow the system to generate one. + immutable: true + default_from_api: true + url_param_only: true - !ruby/object:Api::Type::NestedObject name: 'inspectConfig' description: The core content of the template. diff --git a/mmv1/products/dlp/JobTrigger.yaml b/mmv1/products/dlp/JobTrigger.yaml index 469284d74b71..6211b6726d8f 100644 --- a/mmv1/products/dlp/JobTrigger.yaml +++ b/mmv1/products/dlp/JobTrigger.yaml @@ -180,7 +180,6 @@ properties: - !ruby/object:Api::Type::String name: 'inspectTemplateName' description: The name of the template to run when this job is triggered. - required: true - !ruby/object:Api::Type::NestedObject name: 'inspectConfig' description: The core content of the template. @@ -828,6 +827,29 @@ properties: required: true description: | Name of a BigQuery field to be returned with the findings. + - !ruby/object:Api::Type::Array + name: 'includedFields' + description: | + Limit scanning only to these fields. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + Name describing the field to which scanning is limited. + - !ruby/object:Api::Type::Array + name: 'excludedFields' + description: | + References to fields excluded from scanning. + This allows you to skip inspection of entire columns which you know have no findings. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'name' + required: true + description: | + Name describing the field excluded from scanning. - !ruby/object:Api::Type::NestedObject name: 'hybridOptions' allow_empty_object: true @@ -883,7 +905,6 @@ properties: * `"pipeline" : "etl"` - !ruby/object:Api::Type::Array name: 'actions' - required: true description: | A task to execute on the completion of a job. item_type: !ruby/object:Api::Type::NestedObject diff --git a/mmv1/products/dlp/StoredInfoType.yaml b/mmv1/products/dlp/StoredInfoType.yaml index 5f4064b8e933..a9facc2f8200 100644 --- a/mmv1/products/dlp/StoredInfoType.yaml +++ b/mmv1/products/dlp/StoredInfoType.yaml @@ -25,7 +25,7 @@ references: !ruby/object:Api::Resource::ReferenceLinks 'Official Documentation': 'https://cloud.google.com/dlp/docs/creating-stored-infotypes' api: 'https://cloud.google.com/dlp/docs/reference/rest/v2/projects.storedInfoTypes' async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] id_format: '{{parent}}/storedInfoTypes/{{name}}' examples: @@ -47,11 +47,19 @@ examples: object_name: tf-test-object test_env_vars: project: :PROJECT_NAME + - !ruby/object:Provider::Terraform::Examples + name: 'dlp_stored_info_type_with_id' + primary_resource_id: 'with_stored_info_type_id' + vars: + name: 'id-' + test_env_vars: + project: :PROJECT_NAME custom_code: !ruby/object:Provider::Terraform::CustomCode resource_definition: templates/terraform/resource_definition/dlp_stored_info_type.go.erb constants: templates/terraform/constants/dlp_stored_info_type.go.erb decoder: templates/terraform/decoders/dlp_stored_info_type.go.erb encoder: templates/terraform/encoders/dlp_stored_info_type.go.erb + update_encoder: templates/terraform/update_encoder/dlp_stored_info_type.go.erb custom_import: templates/terraform/custom_import/dlp_import.go.erb parameters: - !ruby/object:Api::Type::String @@ -81,6 +89,15 @@ properties: name: 'displayName' description: | User set display name of the info type. + - !ruby/object:Api::Type::String + name: 'storedInfoTypeId' + description: | + The storedInfoType ID can contain uppercase and lowercase letters, numbers, and hyphens; + that is, it must match the regular expression: [a-zA-Z\d-_]+. The maximum length is 100 + characters. Can be empty to allow the system to generate one. + immutable: true + default_from_api: true + url_param_only: true - !ruby/object:Api::Type::NestedObject name: 'regex' description: Regular expression which defines the rule. diff --git a/mmv1/products/gkeonprem/BareMetalCluster.yaml b/mmv1/products/gkeonprem/BareMetalCluster.yaml index 1b3d026c06d5..97e2ba289e2c 100644 --- a/mmv1/products/gkeonprem/BareMetalCluster.yaml +++ b/mmv1/products/gkeonprem/BareMetalCluster.yaml @@ -28,6 +28,8 @@ taint_resource_on_failed_create: true examples: - !ruby/object:Provider::Terraform::Examples name: "gkeonprem_bare_metal_cluster_basic" + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: "cluster-basic" vars: @@ -36,6 +38,8 @@ examples: project: "fake-backend-360322" - !ruby/object:Provider::Terraform::Examples name: "gkeonprem_bare_metal_cluster_manuallb" + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: "cluster-manuallb" vars: @@ -44,6 +48,8 @@ examples: project: "fake-backend-360322" - !ruby/object:Provider::Terraform::Examples name: "gkeonprem_bare_metal_cluster_bgplb" + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: "cluster-bgplb" vars: @@ -71,7 +77,7 @@ properties: description: | The Admin Cluster this Bare Metal User Cluster belongs to. This is the full resource name of the Admin Cluster's hub membership. - diff_suppress_func: ProjectNumberDiffSuppress + diff_suppress_func: tpgresource.ProjectNumberDiffSuppress - !ruby/object:Api::Type::String name: "description" description: | diff --git a/mmv1/products/gkeonprem/BareMetalNodePool.yaml b/mmv1/products/gkeonprem/BareMetalNodePool.yaml index 3eff20444c85..3c93a869c834 100644 --- a/mmv1/products/gkeonprem/BareMetalNodePool.yaml +++ b/mmv1/products/gkeonprem/BareMetalNodePool.yaml @@ -31,6 +31,8 @@ taint_resource_on_failed_create: true examples: - !ruby/object:Provider::Terraform::Examples name: 'gkeonprem_bare_metal_node_pool_basic' + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true primary_resource_id: 'nodepool-basic' min_version: beta vars: @@ -39,6 +41,8 @@ examples: project: 'fake-backend-360322' - !ruby/object:Provider::Terraform::Examples name: 'gkeonprem_bare_metal_node_pool_full' + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true primary_resource_id: 'nodepool-full' min_version: beta vars: diff --git a/mmv1/products/gkeonprem/VmwareCluster.yaml b/mmv1/products/gkeonprem/VmwareCluster.yaml index 3155b1498617..cdd945d7a18d 100644 --- a/mmv1/products/gkeonprem/VmwareCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareCluster.yaml @@ -34,6 +34,8 @@ timeouts: !ruby/object:Api::Timeouts examples: - !ruby/object:Provider::Terraform::Examples name: 'gkeonprem_vmware_cluster_basic' + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: 'cluster-basic' vars: @@ -42,6 +44,8 @@ examples: project: 'fake-backend-360322' - !ruby/object:Provider::Terraform::Examples name: 'gkeonprem_vmware_cluster_f5lb' + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: 'cluster-f5lb' vars: @@ -50,6 +54,8 @@ examples: project: 'fake-backend-360322' - !ruby/object:Provider::Terraform::Examples name: 'gkeonprem_vmware_cluster_manuallb' + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: 'cluster-manuallb' vars: diff --git a/mmv1/products/gkeonprem/VmwareNodePool.yaml b/mmv1/products/gkeonprem/VmwareNodePool.yaml index a2767afef417..3734fc87ace7 100644 --- a/mmv1/products/gkeonprem/VmwareNodePool.yaml +++ b/mmv1/products/gkeonprem/VmwareNodePool.yaml @@ -31,6 +31,8 @@ timeouts: !ruby/object:Api::Timeouts examples: - !ruby/object:Provider::Terraform::Examples name: "gkeonprem_vmware_node_pool_basic" + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: "nodepool-basic" vars: @@ -39,6 +41,8 @@ examples: project: "fake-backend-360322" - !ruby/object:Provider::Terraform::Examples name: "gkeonprem_vmware_node_pool_full" + # TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + skip_test: true min_version: beta primary_resource_id: "nodepool-full" vars: diff --git a/mmv1/products/iam2/AccessBoundaryPolicy.yaml b/mmv1/products/iam2/AccessBoundaryPolicy.yaml index 34be3043287e..8fa285c90248 100644 --- a/mmv1/products/iam2/AccessBoundaryPolicy.yaml +++ b/mmv1/products/iam2/AccessBoundaryPolicy.yaml @@ -20,6 +20,8 @@ description: | **NOTE**: This is a private feature and users should contact GCP support if they would like to test it. autogen_async: true +# Skipping sweeper since this is a child resource +skip_sweeper: true import_format: ['{{parent}}/{{name}}'] id_format: '{{parent}}/{{name}}' examples: diff --git a/mmv1/products/iap/Brand.yaml b/mmv1/products/iap/Brand.yaml index e48fa6b062b1..e738122b2057 100644 --- a/mmv1/products/iap/Brand.yaml +++ b/mmv1/products/iap/Brand.yaml @@ -33,7 +33,7 @@ references: !ruby/object:Api::Resource::ReferenceLinks 'Setting up IAP Brand': 'https://cloud.google.com/iap/docs/tutorial-gce#set_up_iap' api: 'https://cloud.google.com/iap/docs/reference/rest/v1/projects.brands' async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence target_occurrences: 5 actions: ['create'] id_format: '{{name}}' diff --git a/mmv1/products/kms/CryptoKey.yaml b/mmv1/products/kms/CryptoKey.yaml index 926d81a39d7b..db9386e3f9d7 100644 --- a/mmv1/products/kms/CryptoKey.yaml +++ b/mmv1/products/kms/CryptoKey.yaml @@ -83,18 +83,14 @@ properties: name: 'labels' description: | Labels with user-defined metadata to apply to this resource. - - !ruby/object:Api::Type::Enum + - !ruby/object:Api::Type::String name: 'purpose' description: | The immutable purpose of this CryptoKey. See the [purpose reference](https://cloud.google.com/kms/docs/reference/rest/v1/projects.locations.keyRings.cryptoKeys#CryptoKeyPurpose) for possible inputs. - values: - - 'ENCRYPT_DECRYPT' - - 'ASYMMETRIC_SIGN' - - 'ASYMMETRIC_DECRYPT' - - 'MAC' - default_value: :ENCRYPT_DECRYPT + Default value is "ENCRYPT_DECRYPT". + default_value: 'ENCRYPT_DECRYPT' immutable: true - !ruby/object:Api::Type::String name: 'rotationPeriod' diff --git a/mmv1/products/monitoring/MetricDescriptor.yaml b/mmv1/products/monitoring/MetricDescriptor.yaml index 592e4ab66f3e..d83d04bc820b 100644 --- a/mmv1/products/monitoring/MetricDescriptor.yaml +++ b/mmv1/products/monitoring/MetricDescriptor.yaml @@ -26,8 +26,8 @@ description: deleting or altering it stops data collection and makes the metric type's existing data unusable. async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence - check_response_func_absence: PollCheckForAbsence + check_response_func_existence: transport_tpg.PollCheckForExistence + check_response_func_absence: transport_tpg.PollCheckForAbsence target_occurrences: 20 actions: ['create', 'update', 'delete'] id_format: '{{name}}' diff --git a/mmv1/products/networkservices/Gateway.yaml b/mmv1/products/networkservices/Gateway.yaml index 90e478e8f81f..a4c0d763cbdf 100644 --- a/mmv1/products/networkservices/Gateway.yaml +++ b/mmv1/products/networkservices/Gateway.yaml @@ -61,6 +61,48 @@ examples: primary_resource_id: 'default' vars: resource_name: 'my-gateway' + - !ruby/object:Provider::Terraform::Examples + min_version: beta + name: 'network_services_gateway_secure_web_proxy' + primary_resource_id: 'default' + vars: + certificate_name: 'my-certificate' + network_name: 'my-network' + subnetwork_name: 'my-subnetwork-name' + proxy_only_subnetwork_name: 'my-proxy-only-subnetwork' + policy_name: 'my-policy-name' + policy_rule_name: 'my-policyrule-name' + gateway_name_1: 'my-gateway1' + scope_1: 'my-default-scope1' + ignore_read_extra: + - 'delete_swg_autogen_router_on_destroy' + - !ruby/object:Provider::Terraform::Examples + min_version: beta + name: 'network_services_gateway_multiple_swp_same_network' + primary_resource_id: 'default' + vars: + certificate_name: 'my-certificate' + network_name: 'my-network' + subnetwork_name: 'my-subnetwork-name' + proxy_only_subnetwork_name: 'my-proxy-only-subnetwork' + policy_name: 'my-policy-name' + policy_rule_name: 'my-policyrule-name' + gateway_name_1: 'my-gateway1' + gateway_name_2: 'my-gateway2' + scope_1: 'my-default-scope1' + scope_2: 'my-default-scope2' + ignore_read_extra: + - 'delete_swg_autogen_router_on_destroy' +virtual_fields: + - !ruby/object:Api::Type::Boolean + name: 'delete_swg_autogen_router_on_destroy' + default_value: false + description: | + When deleting a gateway of type 'SECURE_WEB_GATEWAY', this boolean option will also delete auto generated router by the gateway creation. + If there is no other gateway of type 'SECURE_WEB_GATEWAY' remaining for that region and network it will be deleted. +custom_code: !ruby/object:Provider::Terraform::CustomCode + post_delete: 'templates/terraform/post_delete/network_services_gateway.go.erb' + constants: 'templates/terraform/constants/network_services_gateway.go.erb' parameters: - !ruby/object:Api::Type::String name: 'name' @@ -120,6 +162,7 @@ properties: - !ruby/object:Api::Type::String name: 'scope' required: true + immutable: true description: | Immutable. Scope determines how configuration across multiple Gateway instances are merged. The configuration for multiple Gateway instances with the same scope will be merged as presented as @@ -130,3 +173,39 @@ properties: description: | A fully-qualified ServerTLSPolicy URL reference. Specifies how TLS traffic is terminated. If empty, TLS termination is disabled. + - !ruby/object:Api::Type::Array + name: 'addresses' + immutable: true + description: | + Zero or one IPv4-address on which the Gateway will receive the traffic. When no address is provided, + an IP from the subnetwork is allocated This field only applies to gateways of type 'SECURE_WEB_GATEWAY'. + Gateways of type 'OPEN_MESH' listen on 0.0.0.0. + item_type: Api::Type::String + - !ruby/object:Api::Type::String + name: 'subnetwork' + immutable: true + description: | + The relative resource name identifying the subnetwork in which this SWG is allocated. + For example: `projects/*/regions/us-central1/subnetworks/network-1`. + Currently, this field is specific to gateways of type 'SECURE_WEB_GATEWAY. + - !ruby/object:Api::Type::String + name: 'network' + immutable: true + description: | + The relative resource name identifying the VPC network that is using this configuration. + For example: `projects/*/global/networks/network-1`. + Currently, this field is specific to gateways of type 'SECURE_WEB_GATEWAY'. + - !ruby/object:Api::Type::String + name: 'gatewaySecurityPolicy' + immutable: true + description: | + A fully-qualified GatewaySecurityPolicy URL reference. Defines how a server should apply security policy to inbound (VM to Proxy) initiated connections. + For example: `projects/*/locations/*/gatewaySecurityPolicies/swg-policy`. + This policy is specific to gateways of type 'SECURE_WEB_GATEWAY'. + - !ruby/object:Api::Type::Array + name: 'certificateUrls' + immutable: true + description: | + A fully-qualified Certificates URL reference. The proxy presents a Certificate (selected based on SNI) when establishing a TLS connection. + This feature only applies to gateways of type 'SECURE_WEB_GATEWAY'. + item_type: Api::Type::String diff --git a/mmv1/products/pubsub/Schema.yaml b/mmv1/products/pubsub/Schema.yaml index 68203f4981da..4da0d8608bc2 100644 --- a/mmv1/products/pubsub/Schema.yaml +++ b/mmv1/products/pubsub/Schema.yaml @@ -24,8 +24,8 @@ references: !ruby/object:Api::Resource::ReferenceLinks base_url: projects/{{project}}/schemas create_url: projects/{{project}}/schemas?schemaId={{name}} async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence - check_response_func_absence: PollCheckForAbsence + check_response_func_existence: transport_tpg.PollCheckForExistence + check_response_func_absence: transport_tpg.PollCheckForAbsence target_occurrences: 10 actions: ['delete'] examples: diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index f668bf9acd35..5cb4fe561fa2 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -26,7 +26,7 @@ update_verb: :PATCH update_mask: true update_url: projects/{{project}}/subscriptions/{{name}} async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] suppress_error: true examples: diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index 744ac09ecddb..6bb9290a058f 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -38,7 +38,7 @@ docs: !ruby/object:Provider::Terraform::Docs # resource until it exists and the negative cached result goes away. # Context: hashicorp/terraform-provider-google#4993 async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] suppress_error: true error_retry_predicates: ['transport_tpg.PubsubTopicProjectNotReady'] @@ -130,11 +130,9 @@ properties: The value of this field will be _deleted-schema_ if the schema has been deleted. required: true - immutable: true - !ruby/object:Api::Type::Enum name: 'encoding' description: The encoding of messages validated against schema. - immutable: true default_value: :ENCODING_UNSPECIFIED values: - :ENCODING_UNSPECIFIED diff --git a/mmv1/products/storage/HmacKey.yaml b/mmv1/products/storage/HmacKey.yaml index 824e843f6ff7..589b8a8ab11b 100644 --- a/mmv1/products/storage/HmacKey.yaml +++ b/mmv1/products/storage/HmacKey.yaml @@ -30,7 +30,7 @@ description: | id_format: 'projects/{{project}}/hmacKeys/{{access_id}}' import_format: ['projects/{{project}}/hmacKeys/{{access_id}}'] async: !ruby/object:Provider::Terraform::PollAsync - check_response_func_existence: PollCheckForExistence + check_response_func_existence: transport_tpg.PollCheckForExistence actions: ['create'] skip_sweeper: true examples: diff --git a/mmv1/products/tags/TagKey.yaml b/mmv1/products/tags/TagKey.yaml index d58d6f9e610d..2604c6acd375 100644 --- a/mmv1/products/tags/TagKey.yaml +++ b/mmv1/products/tags/TagKey.yaml @@ -55,6 +55,7 @@ properties: Input only. The resource name of the new TagKey's parent. Must be of the form organizations/{org_id} or projects/{project_id_or_number}. immutable: true required: true + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - !ruby/object:Api::Type::String name: shortName description: | diff --git a/mmv1/products/vertexai/FeaturestoreEntitytypeFeature.yaml b/mmv1/products/vertexai/FeaturestoreEntitytypeFeature.yaml index 83d0417513b6..4152ab0e6327 100644 --- a/mmv1/products/vertexai/FeaturestoreEntitytypeFeature.yaml +++ b/mmv1/products/vertexai/FeaturestoreEntitytypeFeature.yaml @@ -66,6 +66,9 @@ examples: custom_code: !ruby/object:Provider::Terraform::CustomCode pre_create: templates/terraform/constants/vertex_ai_featurestore_entitytype_feature.go.erb pre_delete: templates/terraform/constants/vertex_ai_featurestore_entitytype_feature.go.erb + custom_import: templates/terraform/custom_import/vertex_ai_featurestore_entitytype_feature.go.erb + extra_schema_entry: templates/terraform/extra_schema_entry/vertex_ai_featurestore_entitytype_feature.go.erb + encoder: templates/terraform/encoders/vertex_ai_featurestore_entitytype_feature.go.erb parameters: - !ruby/object:Api::Type::String name: entitytype diff --git a/mmv1/products/vertexai/IndexEndpoint.yaml b/mmv1/products/vertexai/IndexEndpoint.yaml new file mode 100644 index 000000000000..9fa5710d210d --- /dev/null +++ b/mmv1/products/vertexai/IndexEndpoint.yaml @@ -0,0 +1,99 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Resource +name: IndexEndpoint +base_url: projects/{{project}}/locations/{{region}}/indexEndpoints +create_url: projects/{{project}}/locations/{{region}}/indexEndpoints +self_link: projects/{{project}}/locations/{{region}}/indexEndpoints/{{name}} +update_verb: :PATCH +update_mask: true +create_verb: :POST +references: !ruby/object:Api::Resource::ReferenceLinks + api: https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.indexEndpoints/ +async: !ruby/object:Api::OpAsync + actions: + - create + - delete + operation: !ruby/object:Api::OpAsync::Operation + path: 'name' + base_url: '{{op_id}}' + wait_ms: 1000 + result: !ruby/object:Api::OpAsync::Result + path: 'response' + resource_inside_response: true + status: !ruby/object:Api::OpAsync::Status + path: 'done' + complete: True + allowed: + - True + - False + error: !ruby/object:Api::OpAsync::Error + path: 'error' + message: 'message' +description: |- + An endpoint indexes are deployed into. An index endpoint can have multiple deployed indexes. +autogen_async: false +examples: + - !ruby/object:Provider::Terraform::Examples + name: "vertex_ai_index_endpoint" + primary_resource_id: "index_endpoint" + vars: + address_name: "address-name" + network_name: "network-name" + test_vars_overrides: + network_name: 'BootstrapSharedTestNetwork(t, "vertex-ai-index-endpoint")' +parameters: + - !ruby/object:Api::Type::String + name: region + description: The region of the index endpoint. eg us-central1 + url_param_only: true + immutable: true +properties: + # Intentionally deployedIndexes[] is not included because it's an output-only field and another terraform resource will manage a deployed index. + - !ruby/object:Api::Type::String + name: 'name' + description: The resource name of the Index. + output: true + custom_flatten: templates/terraform/custom_flatten/name_from_self_link.erb + - !ruby/object:Api::Type::String + name: 'displayName' + description: The display name of the Index. The name can be up to 128 characters long and can consist of any UTF-8 characters. + required: true + - !ruby/object:Api::Type::String + name: 'description' + description: The description of the Index. + - !ruby/object:Api::Type::String + name: 'etag' + description: Used to perform consistent read-modify-write updates. + output: true + ignore_read: true + - !ruby/object:Api::Type::KeyValuePairs + name: 'labels' + description: The labels with user-defined metadata to organize your Indexes. + - !ruby/object:Api::Type::String + name: 'createTime' + output: true + description: The timestamp of when the Index was created in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. + - !ruby/object:Api::Type::String + name: 'updateTime' + output: true + description: The timestamp of when the Index was last updated in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. + - !ruby/object:Api::Type::String + name: network + description: |- + The full name of the Google Compute Engine [network](https://cloud.google.com//compute/docs/networks-and-firewalls#networks) to which the index endpoint should be peered. + Private services access must already be configured for the network. If left unspecified, the index endpoint is not peered with any network. + [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): `projects/{project}/global/networks/{network}`. + Where `{project}` is a project number, as in `12345`, and `{network}` is network name. + immutable: true \ No newline at end of file diff --git a/mmv1/products/vertexai/MetadataStore.yaml b/mmv1/products/vertexai/MetadataStore.yaml index acd98267558f..07299e8e0df5 100644 --- a/mmv1/products/vertexai/MetadataStore.yaml +++ b/mmv1/products/vertexai/MetadataStore.yaml @@ -27,6 +27,8 @@ async: !ruby/object:Api::OpAsync path: 'name' base_url: '{{op_id}}' wait_ms: 1000 + timeouts: !ruby/object:Api::Timeouts + insert_minutes: 40 result: !ruby/object:Api::OpAsync::Result path: 'response' resource_inside_response: true diff --git a/mmv1/products/vmwareengine/Network.yaml b/mmv1/products/vmwareengine/Network.yaml new file mode 100644 index 000000000000..16c4dee0f92f --- /dev/null +++ b/mmv1/products/vmwareengine/Network.yaml @@ -0,0 +1,127 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the License); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +!ruby/object:Api::Resource +name: "Network" +min_version: beta +base_url: "projects/{{project}}/locations/{{location}}/vmwareEngineNetworks" +self_link: "projects/{{project}}/locations/{{location}}/vmwareEngineNetworks/{{name}}" +create_url: "projects/{{project}}/locations/{{location}}/vmwareEngineNetworks?vmwareEngineNetworkId={{name}}" +update_mask: true +update_verb: :PATCH +references: !ruby/object:Api::Resource::ReferenceLinks + api: "https://cloud.google.com/vmware-engine/docs/reference/rest/v1/projects.locations.vmwareEngineNetworks" +description: | + Provides connectivity for VMware Engine private clouds. +async: !ruby/object:Api::OpAsync + operation: !ruby/object:Api::OpAsync::Operation + path: "name" + base_url: "{{op_id}}" + wait_ms: 1000 + result: !ruby/object:Api::OpAsync::Result + path: "response" + status: !ruby/object:Api::OpAsync::Status + path: "done" + complete: true + allowed: + - true + - false + error: !ruby/object:Api::OpAsync::Error + path: "error" + message: "message" + +import_format: + ["projects/{{project}}/locations/{{location}}/vmwareEngineNetworks/{{name}}"] +autogen_async: true +examples: + - !ruby/object:Provider::Terraform::Examples + name: "vmware_engine_network_legacy" + pull_external: true + min_version: beta + primary_resource_id: "vmw-engine-network" + vars: + proj_id: "vmw-proj" + test_env_vars: + location: :REGION + org_id: :ORG_ID + billing_account: :BILLING_ACCT +parameters: + - !ruby/object:Api::Type::String + name: "location" + immutable: true + required: true + url_param_only: true + description: | + The location where the VMwareEngineNetwork should reside. + - !ruby/object:Api::Type::String + name: "name" + immutable: true + required: true + url_param_only: true + description: | + The ID of the VMwareEngineNetwork. +properties: + - !ruby/object:Api::Type::String + name: "description" + description: | + User-provided description for this VMware Engine network. + - !ruby/object:Api::Type::Array + name: "vpcNetworks" + description: | + VMware Engine service VPC networks that provide connectivity from a private cloud to customer projects, + the internet, and other Google Cloud services. + output: true + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Enum + name: "type" + description: | + Type of VPC network (INTRANET, INTERNET, or GOOGLE_CLOUD) + output: true + values: + - :INTRANET + - :INTERNET + - :GOOGLE_CLOUD + + - !ruby/object:Api::Type::String + name: "network" + output: true + description: | + The relative resource name of the service VPC network this VMware Engine network is attached to. + For example: projects/123123/global/networks/my-network + - !ruby/object:Api::Type::Enum + name: "state" + description: | + State of the VMware Engine network. + output: true + values: + - :CREATING + - :ACTIVE + - :UPDATING + - :DELETING + + - !ruby/object:Api::Type::Enum + name: "type" + required: true + immutable: true + description: | + VMware Engine network type. + values: + - :LEGACY + + - !ruby/object:Api::Type::String + name: "uid" + output: true + description: | + System-generated unique identifier for the resource. diff --git a/mmv1/products/vmwareengine/product.yaml b/mmv1/products/vmwareengine/product.yaml new file mode 100644 index 000000000000..5d6e08562005 --- /dev/null +++ b/mmv1/products/vmwareengine/product.yaml @@ -0,0 +1,26 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Product +name: Vmwareengine +display_name: Cloud VMware Engine +versions: + - !ruby/object:Api::Product::Version + name: beta + base_url: https://vmwareengine.googleapis.com/v1/ +scopes: + - https://www.googleapis.com/auth/cloud-platform +apis_required: + - !ruby/object:Api::Product::ApiReference + name: VMwareEngine API + url: https://console.cloud.google.com/apis/library/vmwareengine.googleapis.com/ diff --git a/mmv1/products/vpcaccess/Connector.yaml b/mmv1/products/vpcaccess/Connector.yaml index ed5a9c6be1d8..c2034fc2bf3a 100644 --- a/mmv1/products/vpcaccess/Connector.yaml +++ b/mmv1/products/vpcaccess/Connector.yaml @@ -136,6 +136,12 @@ properties: The fully qualified name of this VPC connector output: true ignore_read: true + - !ruby/object:Api::Type::Array + name: connectedProjects + description: | + List of projects using the connector. + output: true + item_type: Api::Type::String - !ruby/object:Api::Type::NestedObject name: 'subnet' immutable: true diff --git a/mmv1/products/workstations/WorkstationConfig.yaml b/mmv1/products/workstations/WorkstationConfig.yaml index b81fac6d3307..f0d1fa929559 100644 --- a/mmv1/products/workstations/WorkstationConfig.yaml +++ b/mmv1/products/workstations/WorkstationConfig.yaml @@ -268,7 +268,6 @@ properties: description: | Directories to persist across workstation sessions. default_from_api: true - immutable: true item_type: !ruby/object:Api::Type::NestedObject properties: - !ruby/object:Api::Type::String @@ -280,41 +279,39 @@ properties: - !ruby/object:Api::Type::NestedObject name: 'gcePd' description: | - PersistentDirectory backed by a Compute Engine regional persistent disk. + A directory to persist across workstation sessions, backed by a Compute Engine regional persistent disk. Can only be updated if not empty during creation. default_from_api: true - immutable: true properties: - !ruby/object:Api::Type::String name: 'fsType' description: | - Type of file system that the disk should be formatted with. The workstation image must support this file system type. Must be empty if sourceSnapshot is set. + Type of file system that the disk should be formatted with. The workstation image must support this file system type. Must be empty if `sourceSnapshot` is set. Defaults to `ext4`. default_from_api: true immutable: true - !ruby/object:Api::Type::String name: 'diskType' description: | - Type of the disk to use. + The type of the persistent disk for the home directory. Defaults to `pd-standard`. default_from_api: true immutable: true - !ruby/object:Api::Type::Integer name: 'sizeGb' description: |- - Size of the disk in GB. Must be empty if sourceSnapshot is set. + The GB capacity of a persistent home directory for each workstation created with this configuration. Must be empty if `sourceSnapshot` is set. + Valid values are `10`, `50`, `100`, `200`, `500`, or `1000`. Defaults to `200`. If less than `200` GB, the `diskType` must be `pd-balanced` or `pd-ssd`. default_from_api: true immutable: true - !ruby/object:Api::Type::Enum name: 'reclaimPolicy' description: | - What should happen to the disk after the workstation is deleted. Defaults to DELETE. - immutable: true + Whether the persistent disk should be deleted when the workstation is deleted. Valid values are `DELETE` and `RETAIN`. Defaults to `DELETE`. values: - :DELETE - :RETAIN - !ruby/object:Api::Type::String name: 'sourceSnapshot' description: | - The snapshot to use as the source for the disk. This can be the snapshot's `self_link`, `id`, or a string in the format of `projects/{project}/global/snapshots/{snapshot}`. If set, sizeGb and fsType must be empty. - immutable: true + Name of the snapshot to use as the source for the disk. This can be the snapshot's `self_link`, `id`, or a string in the format of `projects/{project}/global/snapshots/{snapshot}`. If set, `sizeGb` and `fsType` must be empty. Can only be updated if it has an existing value. # TODO(esu): Add conflicting fields once complex lists are supported. - !ruby/object:Api::Type::NestedObject name: 'container' diff --git a/mmv1/provider/core.rb b/mmv1/provider/core.rb index f013e8b0b9f5..c6b33c0e92d3 100644 --- a/mmv1/provider/core.rb +++ b/mmv1/provider/core.rb @@ -143,6 +143,7 @@ def copy_file_list(output_folder, files) FileUtils.copy_entry source, target_file + add_hashicorp_copyright_header(output_folder, target) if File.extname(target) == '.go' replace_import_path(output_folder, target) if File.extname(target) == '.go' end end.map(&:join) @@ -190,12 +191,99 @@ def compile_file_list(output_folder, files, file_template, pwd = Dir.pwd) Google::LOGGER.debug "Compiling #{source} => #{target}" file_template.generate(pwd, source, target, self) + add_hashicorp_copyright_header(output_folder, target) replace_import_path(output_folder, target) end end.map(&:join) Dir.chdir pwd end + def add_hashicorp_copyright_header(output_folder, target) + unless expected_output_folder?(output_folder) + Google::LOGGER.info "Unexpected output folder (#{output_folder}) detected " \ + 'when deciding to add HashiCorp copyright headers. ' \ + 'Watch out for unexpected changes to copied files' + end + # only add copyright headers when generating TPG and TPGB + return unless output_folder.end_with?('terraform-provider-google') || + output_folder.end_with?('terraform-provider-google-beta') + + # Prevent adding copyright header to files with paths or names matching the strings below + # NOTE: these entries need to match the content of the .copywrite.hcl file originally + # created in https://github.com/GoogleCloudPlatform/magic-modules/pull/7336 + # The test-fixtures folder is not included here as it's copied as a whole, + # not file by file (see common~copy.yaml) + ignored_folders = [ + '.release/', + '.changelog/', + 'examples/', + 'scripts/', + 'META.d/' + ] + ignored_files = [ + 'go.mod', + '.goreleaser.yml', + '.golangci.yml', + 'terraform-registry-manifest.json' + ] + should_add_header = true + ignored_folders.each do |folder| + # folder will be path leading to file + next unless target.start_with? folder + + Google::LOGGER.debug 'Not adding HashiCorp copyright headers in ' \ + "ignored folder #{folder} : #{target}" + should_add_header = false + end + return unless should_add_header + + ignored_files.each do |file| + # file will be the filename and extension, with no preceding path + next unless target.end_with? file + + Google::LOGGER.debug 'Not adding HashiCorp copyright headers to ' \ + "ignored file #{file} : #{target}" + should_add_header = false + end + return unless should_add_header + + Google::LOGGER.debug "Adding HashiCorp copyright header to : #{target}" + data = File.read("#{output_folder}/#{target}") + + copyright_header = ['Copyright (c) HashiCorp, Inc.', 'SPDX-License-Identifier: MPL-2.0'] + lang = language_from_filename(target) + + # Some file types we don't want to add headers to + # e.g. .sh where headers are functional + # Also, this guards against new filetypes being added and triggering build errors + return unless lang != :unsupported + + # File is not ignored and is appropriate file type to add header to + header = comment_block(copyright_header, lang) + File.write("#{output_folder}/#{target}", header) + + File.write("#{output_folder}/#{target}", data, mode: 'a') # append mode + end + + def expected_output_folder?(output_folder) + expected_folders = %w[ + terraform-provider-google + terraform-provider-google-beta + terraform-next + terraform-google-conversion + tfplan2cai + ] + folder_name = output_folder.split('/')[-1] # Possible issue with Windows OS + is_expected = false + expected_folders.each do |folder| + next unless folder_name == folder + + is_expected = true + break + end + is_expected + end + def replace_import_path(output_folder, target) return unless @target_version_name != 'ga' @@ -379,5 +467,35 @@ def effective_copyright_year(out_file) end Time.now.year end + + # Adapted from the method used in templating + # See: mmv1/compile/core.rb + def comment_block(text, lang) + case lang + when :ruby, :python, :yaml, :git, :gemfile + header = text.map { |t| t&.empty? ? '#' : "# #{t}" } + when :go + header = text.map { |t| t&.empty? ? '//' : "// #{t}" } + else + raise "Unknown language for comment: #{lang}" + end + + header_string = header.join("\n") + "#{header_string}\n" # add trailing newline to returned value + end + + def language_from_filename(filename) + extension = filename.split('.')[-1] + case extension + when 'go' + :go + when 'rb' + :ruby + when 'yaml', 'yml' + :yaml + else + :unsupported + end + end end end diff --git a/mmv1/provider/file_template.rb b/mmv1/provider/file_template.rb index e35f8fb9b48a..33845a370a56 100644 --- a/mmv1/provider/file_template.rb +++ b/mmv1/provider/file_template.rb @@ -56,6 +56,7 @@ def generate(pwd, template, path, provider) end ctx.local_variable_set('pwd', pwd) + ctx.local_variable_set('hc_downstream', provider.generating_hashicorp_repo?) # check if the parent folder exists, and make it if not parent_path = File.dirname(path) diff --git a/mmv1/provider/terraform.rb b/mmv1/provider/terraform.rb index 5be740e1ee85..623cf5d4985f 100644 --- a/mmv1/provider/terraform.rb +++ b/mmv1/provider/terraform.rb @@ -31,6 +31,13 @@ class Terraform < Provider::AbstractCore include Provider::Terraform::SubTemplate include Google::GolangUtils + def generating_hashicorp_repo? + # The default Provider is used to generate TPG and TPGB in HashiCorp-owned repos. + # The compiler deviates from the default behaviour with a -f flag to produce + # non-HashiCorp downstreams. + true + end + # ProductFileTemplate with Terraform specific fields class TerraformProductFileTemplate < Provider::ProductFileTemplate # The async object used for making operations. @@ -187,10 +194,13 @@ def folder_name(version) # GCP Resource on Terraform. def generate_resource(pwd, data, generate_code, generate_docs) if generate_code - FileUtils.mkpath folder_name(data.version) + # @api.api_name is the service folder name + product_name = @api.api_name + target_folder = File.join(folder_name(data.version), 'services', product_name) + FileUtils.mkpath target_folder data.generate(pwd, '/templates/terraform/resource.erb', - "#{folder_name(data.version)}/resource_#{full_resource_name(data)}.go", + "#{target_folder}/resource_#{full_resource_name(data)}.go", self) end @@ -244,16 +254,18 @@ def generate_resource_sweepers(pwd, data) def generate_operation(pwd, output_folder, _types) return if @api.objects.select(&:autogen_async).empty? - product_name = @api.name.underscore + product_name = @api.api_name + product_name_underscore = @api.name.underscore data = build_object_data(pwd, @api.objects.first, output_folder, @target_version_name) data.object = @api.objects.select(&:autogen_async).first data.async = data.object.async - FileUtils.mkpath folder_name(data.version) + target_folder = File.join(folder_name(data.version), 'services', product_name) + FileUtils.mkpath target_folder data.generate(pwd, 'templates/terraform/operation.go.erb', - "#{folder_name(data.version)}/#{product_name}_operation.go", + "#{target_folder}/#{product_name_underscore}_operation.go", self) end @@ -263,10 +275,12 @@ def generate_iam_policy(pwd, data, generate_code, generate_docs) if generate_code \ && (!data.object.iam_policy.min_version \ || data.object.iam_policy.min_version >= data.version) - FileUtils.mkpath folder_name(data.version) + product_name = @api.api_name + target_folder = File.join(folder_name(data.version), 'services', product_name) + FileUtils.mkpath target_folder data.generate(pwd, 'templates/terraform/iam_policy.go.erb', - "#{folder_name(data.version)}/iam_#{full_resource_name(data)}.go", + "#{target_folder}/iam_#{full_resource_name(data)}.go", self) # Only generate test if testable examples exist. diff --git a/mmv1/provider/terraform/async.rb b/mmv1/provider/terraform/async.rb index 33b5b36b1b7b..91eab2866005 100644 --- a/mmv1/provider/terraform/async.rb +++ b/mmv1/provider/terraform/async.rb @@ -44,7 +44,8 @@ def validate super check :check_response_func_existence, type: String, required: true - check :check_response_func_absence, type: String, default: 'PollCheckForAbsence' + check :check_response_func_absence, type: String, + default: 'transport_tpg.PollCheckForAbsence' check :custom_poll_read, type: String check :suppress_error, type: :boolean, default: false check :target_occurrences, type: Integer, default: 1 diff --git a/mmv1/provider/terraform/common~compile.yaml b/mmv1/provider/terraform/common~compile.yaml index 827ddac295b0..e2e628c41520 100644 --- a/mmv1/provider/terraform/common~compile.yaml +++ b/mmv1/provider/terraform/common~compile.yaml @@ -107,4 +107,5 @@ 'go.mod': 'third_party/terraform/go.mod.erb' '.goreleaser.yml': 'third_party/terraform/.goreleaser.yml.erb' 'terraform-registry-manifest.json': 'third_party/terraform/terraform-registry-manifest.json.erb' -'.release/release-metadata.hcl': 'third_party/terraform/release-metadata.hcl.erb' \ No newline at end of file +'.release/release-metadata.hcl': 'third_party/terraform/release-metadata.hcl.erb' +'.copywrite.hcl': 'third_party/terraform/.copywrite.hcl.erb' \ No newline at end of file diff --git a/mmv1/provider/terraform/common~copy.yaml b/mmv1/provider/terraform/common~copy.yaml index 0f1034f39e33..34119677d956 100644 --- a/mmv1/provider/terraform/common~copy.yaml +++ b/mmv1/provider/terraform/common~copy.yaml @@ -16,6 +16,8 @@ dir = version_name == 'ga' ? 'google' : "google-#{version_name}" -%> +<% if generate_code -%> + # Copy all of the terraform resources that are still hand written into the specific service folder <% Dir["third_party/terraform/services/**"].each do |product_path| product_name = product_path.split('/')[-1] @@ -28,7 +30,6 @@ # Handwritten acceptance tests for autogenerated resources. # Adding them here allows updating the tests as part of a MM pull request. -<% if generate_code -%> <% Dir["third_party/terraform/tests/*.go"].each do |file_path| fname = file_path.split('/')[-1] -%> @@ -111,5 +112,5 @@ 'website/<%= fname -%>': 'third_party/terraform/website/<%= fname -%>' <% end -%> <% end -%> -'version': 'third_party/terraform/version' +'version/version.go': 'third_party/terraform/version/version.go' 'go.sum': 'third_party/terraform/go.sum' diff --git a/mmv1/provider/terraform/custom_code.rb b/mmv1/provider/terraform/custom_code.rb index 7b5417eef5e8..30d781b20199 100644 --- a/mmv1/provider/terraform/custom_code.rb +++ b/mmv1/provider/terraform/custom_code.rb @@ -102,6 +102,10 @@ class CustomCode < Api::Object # Just like the encoder, it is only used if object.input is # false. attr_reader :post_update + # This code replaces the entire contents of the Update call. It + # should be used for resources that don't have normal update + # semantics that cannot be supported well by other MM features. + attr_reader :custom_update # This code is run just before the Delete call happens. It's # useful to prepare an object for deletion, e.g. by detaching # a disk before deleting it. @@ -140,6 +144,7 @@ def validate check :pre_read, type: String check :pre_update, type: String check :post_update, type: String + check :custom_update, type: String check :pre_delete, type: String check :custom_import, type: String check :post_import, type: String diff --git a/mmv1/provider/terraform_kcc.rb b/mmv1/provider/terraform_kcc.rb index 87d56dd017c4..682f9c5bc640 100644 --- a/mmv1/provider/terraform_kcc.rb +++ b/mmv1/provider/terraform_kcc.rb @@ -43,6 +43,11 @@ class TerraformKCC < Provider::Terraform Ssl: 'SSL', Url: 'URL' }.freeze + def generating_hashicorp_repo? + # This code is not used when generating TPG/TPGB + false + end + def generate(output_folder, types, _product_path, _dump_yaml, generate_code, generate_docs) @base_url = @version.base_url generate_objects(output_folder, types, generate_code, generate_docs) diff --git a/mmv1/provider/terraform_oics.rb b/mmv1/provider/terraform_oics.rb index 2bf7924bab92..a4e1a6c16d7d 100644 --- a/mmv1/provider/terraform_oics.rb +++ b/mmv1/provider/terraform_oics.rb @@ -17,6 +17,11 @@ module Provider # Code generator for runnable Terraform examples that can be run via an # Open in Cloud Shell link. class TerraformOiCS < Provider::Terraform + def generating_hashicorp_repo? + # This code is not used when generating TPG/TPGB + false + end + # We don't want *any* static generation, so we override generate to only # generate objects. def generate(output_folder, types, _product_path, _dump_yaml, generate_code, generate_docs) diff --git a/mmv1/provider/terraform_validator.rb b/mmv1/provider/terraform_validator.rb index 733eafd01882..187fcff583ef 100644 --- a/mmv1/provider/terraform_validator.rb +++ b/mmv1/provider/terraform_validator.rb @@ -17,6 +17,11 @@ module Provider # Code generator for a library converting terraform state to gcp objects. class TerraformValidator < Provider::Terraform + def generating_hashicorp_repo? + # This code is not used when generating TPG/TPGB + false + end + def generate(output_folder, types, _product_path, _dump_yaml, generate_code, generate_docs) # Temporary shim to generate the missing resources directory. Can be removed # once the folder exists downstream. @@ -42,10 +47,14 @@ def generate_object(object, output_folder, version_name, generate_code, generate end def generate_resource(pwd, data, _generate_code, _generate_docs) - output_folder = data.output_folder product_name = data.object.__product.name.downcase + output_folder = File.join( + data.output_folder, + 'converters/google/resources/services', + product_name + ) object_name = data.object.name.underscore - target = "converters/google/resources/#{product_name}_#{object_name}.go" + target = "#{product_name}_#{object_name}.go" data.generate(pwd, 'templates/validator/resource_converter.go.erb', File.join(output_folder, target), @@ -145,35 +154,42 @@ def compile_common_files(output_folder, products, _common_compile_file) file_template ) - compile_file_list(output_folder, [ - ['converters/google/resources/compute_operation.go', - 'third_party/terraform/utils/compute_operation.go.erb'], - ['converters/google/resources/transport/config.go', - 'third_party/terraform/transport/config.go.erb'], - ['converters/google/resources/config_test_utils.go', - 'third_party/terraform/utils/config_test_utils.go.erb'], - ['converters/google/resources/iam.go', - 'third_party/terraform/utils/iam.go.erb'], - ['converters/google/resources/tpgiamresource/iam.go', - 'third_party/terraform/tpgiamresource/iam.go.erb'], - ['converters/google/resources/compute_instance_helpers.go', - 'third_party/terraform/utils/compute_instance_helpers.go.erb'], - ['converters/google/resources/transport/provider_handwritten_endpoint.go', - 'third_party/terraform/transport/provider_handwritten_endpoint.go.erb'], - ['converters/google/resources/resource_converters.go', - 'templates/validator/resource_converters.go.erb'], - ['converters/google/resources/mappers.go', - 'templates/validator/mappers/mappers.go.erb'], - ['converters/google/resources/iam_kms_key_ring.go', - 'third_party/terraform/utils/iam_kms_key_ring.go.erb'], - ['converters/google/resources/iam_kms_crypto_key.go', - 'third_party/terraform/utils/iam_kms_crypto_key.go.erb'], - ['converters/google/resources/metadata.go', - 'third_party/terraform/utils/metadata.go.erb'], - ['converters/google/resources/compute_instance.go', - 'third_party/validator/compute_instance.go.erb'] - ], - file_template) + compile_file_list( + output_folder, + [ + ['converters/google/resources/compute_operation.go', + 'third_party/terraform/utils/compute_operation.go.erb'], + ['converters/google/resources/services/compute/compute_operation.go', + 'third_party/terraform/services/compute/compute_operation.go.erb'], + ['converters/google/resources/transport/config.go', + 'third_party/terraform/transport/config.go.erb'], + ['converters/google/resources/config_test_utils.go', + 'third_party/terraform/utils/config_test_utils.go.erb'], + ['converters/google/resources/iam.go', + 'third_party/terraform/utils/iam.go.erb'], + ['converters/google/resources/tpgiamresource/iam.go', + 'third_party/terraform/tpgiamresource/iam.go.erb'], + ['converters/google/resources/tpgresource/common_diff_suppress.go', + 'third_party/terraform/tpgresource/common_diff_suppress.go.erb'], + ['converters/google/resources/services/compute/compute_instance_helpers.go', + 'third_party/terraform/services/compute/compute_instance_helpers.go.erb'], + ['converters/google/resources/transport/provider_handwritten_endpoint.go', + 'third_party/terraform/transport/provider_handwritten_endpoint.go.erb'], + ['converters/google/resources/resource_converters.go', + 'templates/validator/resource_converters.go.erb'], + ['converters/google/resources/mappers.go', + 'templates/validator/mappers/mappers.go.erb'], + ['converters/google/resources/services/kms/iam_kms_key_ring.go', + 'third_party/terraform/services/kms/iam_kms_key_ring.go.erb'], + ['converters/google/resources/services/kms/iam_kms_crypto_key.go', + 'third_party/terraform/services/kms/iam_kms_crypto_key.go.erb'], + ['converters/google/resources/services/compute/metadata.go', + 'third_party/terraform/services/compute/metadata.go.erb'], + ['converters/google/resources/services/compute/compute_instance.go', + 'third_party/validator/compute_instance.go.erb'] + ], + file_template + ) end def copy_common_files(output_folder, generate_code, _generate_docs) @@ -191,12 +207,16 @@ def copy_common_files(output_folder, generate_code, _generate_docs) ) copy_file_list(output_folder, [ + ['converters/google/resources/tpgresource/constants.go', + 'third_party/validator/tpgresource/constants.go'], ['converters/google/resources/constants.go', 'third_party/validator/constants.go'], ['converters/google/resources/cai.go', 'third_party/validator/cai.go'], - ['converters/google/resources/cai_test.go', - 'third_party/validator/cai_test.go'], + ['converters/google/resources/tpgresource/cai.go', + 'third_party/validator/tpgresource/cai.go'], + ['converters/google/resources/tpgresource/cai_test.go', + 'third_party/validator/tpgresource/cai_test.go'], ['converters/google/resources/org_policy_policy.go', 'third_party/validator/org_policy_policy.go'], ['converters/google/resources/getconfig.go', @@ -207,6 +227,8 @@ def copy_common_files(output_folder, generate_code, _generate_docs) 'third_party/validator/getconfig_test.go'], ['converters/google/resources/transport/config_test_utils.go', 'third_party/terraform/transport/config_test_utils.go'], + ['converters/google/resources/tpgresource/json_map.go', + 'third_party/validator/tpgresource/json_map.go'], ['converters/google/resources/json_map.go', 'third_party/validator/json_map.go'], ['converters/google/resources/project.go', @@ -227,10 +249,12 @@ def copy_common_files(output_folder, generate_code, _generate_docs) 'third_party/validator/bigtable_cluster.go'], ['converters/google/resources/bigtable_instance.go', 'third_party/validator/bigtable_instance.go'], + ['converters/google/resources/tpgiamresource/iam_helpers.go', + 'third_party/validator/tpgiamresource/iam_helpers.go'], ['converters/google/resources/iam_helpers.go', 'third_party/validator/iam_helpers.go'], - ['converters/google/resources/iam_helpers_test.go', - 'third_party/validator/iam_helpers_test.go'], + ['converters/google/resources/tpgiamresource/iam_helpers_test.go', + 'third_party/validator/tpgiamresource/iam_helpers_test.go'], ['converters/google/resources/organization_iam.go', 'third_party/validator/organization_iam.go'], ['converters/google/resources/project_iam.go', @@ -245,18 +269,18 @@ def copy_common_files(output_folder, generate_code, _generate_docs) 'third_party/validator/container.go'], ['converters/google/resources/project_service.go', 'third_party/validator/project_service.go'], - ['converters/google/resources/monitoring_slo_helper.go', + ['converters/google/resources/services/monitoring/monitoring_slo_helper.go', 'third_party/validator/monitoring_slo_helper.go'], ['converters/google/resources/service_account.go', 'third_party/validator/service_account.go'], - ['converters/google/resources/image.go', - 'third_party/terraform/utils/image.go'], + ['converters/google/resources/services/compute/image.go', + 'third_party/terraform/services/compute/image.go'], ['converters/google/resources/import.go', 'third_party/terraform/utils/import.go'], ['converters/google/resources/tpgresource/import.go', 'third_party/terraform/tpgresource/import.go'], - ['converters/google/resources/disk_type.go', - 'third_party/terraform/utils/disk_type.go'], + ['converters/google/resources/services/compute/disk_type.go', + 'third_party/terraform/services/compute/disk_type.go'], ['converters/google/resources/verify/validation.go', 'third_party/terraform/verify/validation.go'], ['converters/google/resources/validation.go', @@ -285,22 +309,22 @@ def copy_common_files(output_folder, generate_code, _generate_docs) 'third_party/terraform/utils/convert.go'], ['converters/google/resources/tpgresource/convert.go', 'third_party/terraform/tpgresource/convert.go'], - ['converters/google/resources/extract.go', - 'third_party/terraform/utils/extract.go'], + ['converters/google/resources/services/logging/extract.go', + 'third_party/terraform/services/logging/extract.go'], ['converters/google/resources/service_scope.go', 'third_party/terraform/utils/service_scope.go'], ['converters/google/resources/tpgresource/service_scope.go', 'third_party/terraform/tpgresource/service_scope.go'], - ['converters/google/resources/kms_utils.go', - 'third_party/terraform/utils/kms_utils.go'], + ['converters/google/resources/services/kms/kms_utils.go', + 'third_party/terraform/services/kms/kms_utils.go'], ['converters/google/resources/transport/batcher.go', 'third_party/terraform/transport/batcher.go'], ['converters/google/resources/transport/retry_utils.go', 'third_party/terraform/transport/retry_utils.go'], ['converters/google/resources/retry_utils.go', 'third_party/terraform/utils/retry_utils.go'], - ['converters/google/resources/source_repo_utils.go', - 'third_party/terraform/utils/source_repo_utils.go'], + ['converters/google/resources/services/sourcerepo/source_repo_utils.go', + 'third_party/terraform/services/sourcerepo/source_repo_utils.go'], ['converters/google/resources/transport/retry_transport.go', 'third_party/terraform/transport/retry_transport.go'], ['converters/google/resources/transport/transport.go', @@ -309,10 +333,12 @@ def copy_common_files(output_folder, generate_code, _generate_docs) 'third_party/terraform/transport/error_retry_predicates.go'], ['converters/google/resources/error_retry_predicates.go', 'third_party/terraform/utils/error_retry_predicates.go'], - ['converters/google/resources/pubsub_utils.go', - 'third_party/terraform/utils/pubsub_utils.go'], + ['converters/google/resources/services/pubsub/pubsub_utils.go', + 'third_party/terraform/services/pubsub/pubsub_utils.go'], ['converters/google/resources/sqladmin_operation.go', 'third_party/terraform/utils/sqladmin_operation.go'], + ['converters/google/resources/services/sql/sqladmin_operation.go', + 'third_party/terraform/services/sql/sqladmin_operation.go'], ['converters/google/resources/verify/path_or_contents.go', 'third_party/terraform/verify/path_or_contents.go'], ['converters/google/resources/path_or_contents.go', @@ -329,8 +355,8 @@ def copy_common_files(output_folder, generate_code, _generate_docs) 'third_party/terraform/utils/iam_folder.go'], ['converters/google/resources/iam_project.go', 'third_party/terraform/utils/iam_project.go'], - ['converters/google/resources/privateca_utils.go', - 'third_party/terraform/utils/privateca_utils.go'], + ['converters/google/resources/services/privateca/privateca_utils.go', + 'third_party/terraform/services/privateca/privateca_utils.go'], ['converters/google/resources/utils.go', 'third_party/terraform/utils/utils.go'], ['converters/google/resources/tpgresource/utils.go', @@ -398,19 +424,24 @@ def generate_iam_policy(pwd, data, generate_code, _generate_docs) return unless generate_code return if data.object.iam_policy.exclude_validator - output_folder = File.join(data.output_folder, 'converters/google/resources') name = data.object.filename_override || data.object.name.underscore - product_name = data.product.name.underscore + product_name = data.product.name.downcase + product_name_underscore = product_name.underscore + output_folder = File.join( + data.output_folder, + 'converters/google/resources/services', + product_name + ) FileUtils.mkpath output_folder - target = "#{product_name}_#{name}_iam.go" + target = "#{product_name_underscore}_#{name}_iam.go" data.generate(pwd, 'templates/validator/resource_converter_iam.go.erb', File.join(output_folder, target), self) replace_import_path(output_folder, target) - target = "iam_#{product_name}_#{name}.go" + target = "iam_#{product_name_underscore}_#{name}.go" data.generate(pwd, 'templates/terraform/iam_policy.go.erb', File.join(output_folder, target), diff --git a/mmv1/spec/hashicorp_copyright_spec.rb b/mmv1/spec/hashicorp_copyright_spec.rb new file mode 100644 index 000000000000..eae3af9f00b3 --- /dev/null +++ b/mmv1/spec/hashicorp_copyright_spec.rb @@ -0,0 +1,57 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +require 'spec_helper' +require 'provider/core' + +describe 'Provider::Core.expected_output_folder?' do + # Inputs for tests + config = 'foo' + api = Api::Compiler.new(File.read('spec/data/good-file.yaml')).run + version_name = 'ga' + start_time = Time.now + + provider = Provider::Core.new(config, api, version_name, start_time) + + # rubocop:disable Layout/LineLength + it 'should identify `terraform-provider-google` as an expected output folder' do + path = '/User/PersonsName/go/src/github.com/hashicorp/terraform-provider-google' + expect(provider.expected_output_folder?(path)).to eq true + end + + it 'should identify `terraform-provider-google-beta` as an expected output folder' do + path = '/User/PersonsName/go/src/github.com/hashicorp/terraform-provider-google-beta' + expect(provider.expected_output_folder?(path)).to eq true + end + + it 'should identify `terraform-next` as an expected output folder' do + path = '/User/PersonsName/go/src/github.com/GoogleCloudPlatform/terraform-next' + expect(provider.expected_output_folder?(path)).to eq true + end + + it 'should identify `terraform-google-conversion` as an expected output folder' do + path = '/User/PersonsName/go/src/github.com/GoogleCloudPlatform/terraform-google-conversion' + expect(provider.expected_output_folder?(path)).to eq true + end + + it 'should identify suffixed versions of expected folder names as unexpected' do + path = '/User/PersonsName/go/src/github.com/hashicorp/terraform-provider-google-unexpected-suffix' + expect(provider.expected_output_folder?(path)).to eq false + end + + it 'should identify prefixed versions of expected folder names as unexpected' do + path = '/User/PersonsName/go/src/github.com/hashicorp/unexpected-prefix-terraform-provider-google' + expect(provider.expected_output_folder?(path)).to eq false + end + # rubocop:enable Layout/LineLength +end diff --git a/mmv1/templates/hashicorp_copyright_header.erb b/mmv1/templates/hashicorp_copyright_header.erb new file mode 100644 index 000000000000..7c130f6016c4 --- /dev/null +++ b/mmv1/templates/hashicorp_copyright_header.erb @@ -0,0 +1,2 @@ +Copyright (c) HashiCorp, Inc. +SPDX-License-Identifier: MPL-2.0 diff --git a/mmv1/templates/terraform/constants/datastream_stream.go.erb b/mmv1/templates/terraform/constants/datastream_stream.go.erb index 76c324046214..d81ec0d71de0 100644 --- a/mmv1/templates/terraform/constants/datastream_stream.go.erb +++ b/mmv1/templates/terraform/constants/datastream_stream.go.erb @@ -18,7 +18,7 @@ func resourceDatastreamStreamCustomDiffFunc(diff tpgresource.TerraformResourceDi oldState := old.(string) newState := new.(string) - if isNewResource(diff) { + if tpgresource.IsNewResource(diff) { if newState != "NOT_STARTED" && newState != "RUNNING" { return fmt.Errorf("`desired_state` can only be set to `NOT_STARTED` or `RUNNING` when creating a new Stream") } diff --git a/mmv1/templates/terraform/constants/disk.erb b/mmv1/templates/terraform/constants/disk.erb index 96f66f30c817..25a44ca4e622 100644 --- a/mmv1/templates/terraform/constants/disk.erb +++ b/mmv1/templates/terraform/constants/disk.erb @@ -1,3 +1,22 @@ + +<% unless compiler == "terraformvalidator-codegen" -%> +// diffsupress for hyperdisk provisioned_iops +func hyperDiskIopsUpdateDiffSupress(_ context.Context, d *schema.ResourceDiff, meta interface{}) error { + if !strings.Contains(d.Get("type").(string), "hyperdisk") { + resourceSchema := ResourceComputeDisk().Schema + for field := range resourceSchema { + if field == "provisioned_iops" && d.HasChange(field) { + if err := d.ForceNew(field); err != nil { + return err + } + } + } + } + + return nil +} +<% end -%> + // diffsupress for beta and to check change in source_disk attribute func sourceDiskDiffSupress(_, old, new string, _ *schema.ResourceData) bool { s1 := strings.TrimPrefix(old, "https://www.googleapis.com/compute/beta") @@ -9,7 +28,7 @@ func sourceDiskDiffSupress(_, old, new string, _ *schema.ResourceData) bool { } // Is the new disk size smaller than the old one? -func isDiskShrinkage(_ context.Context, old, new, _ interface{}) bool { +func IsDiskShrinkage(_ context.Context, old, new, _ interface{}) bool { // It's okay to remove size entirely. if old == nil || new == nil { return false @@ -107,7 +126,7 @@ func DiskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { func diskImageProjectNameEquals(project1, project2 string) bool { // Convert short project name to full name // For instance, centos => centos-cloud - fullProjectName, ok := imageMap[project2] + fullProjectName, ok := ImageMap[project2] if ok { project2 = fullProjectName } diff --git a/mmv1/templates/terraform/constants/firewall.erb b/mmv1/templates/terraform/constants/firewall.erb index d464bfb1b1a6..ca97156f569d 100644 --- a/mmv1/templates/terraform/constants/firewall.erb +++ b/mmv1/templates/terraform/constants/firewall.erb @@ -31,10 +31,6 @@ func resourceComputeFirewallRuleHash(v interface{}) int { return tpgresource.Hashcode(buf.String()) } -func compareCaseInsensitive(k, old, new string, d *schema.ResourceData) bool { - return strings.ToLower(old) == strings.ToLower(new) -} - func diffSuppressEnableLogging(k, old, new string, d *schema.ResourceData) bool { if k == "log_config.#" { if new == "0" && d.Get("enable_logging").(bool) { diff --git a/mmv1/templates/terraform/constants/network_services_gateway.go.erb b/mmv1/templates/terraform/constants/network_services_gateway.go.erb new file mode 100644 index 000000000000..83af49eab04d --- /dev/null +++ b/mmv1/templates/terraform/constants/network_services_gateway.go.erb @@ -0,0 +1,128 @@ +<%# The license inside this block applies to this file + # Copyright 2023 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +-%> +// Checks if there is another gateway under the same location. +func gatewaysSameLocation(d *schema.ResourceData, config *transport_tpg.Config, billingProject, userAgent string) ([]interface{}, error) { + log.Print("[DEBUG] Looking for gateways under the same location.") + var gateways []interface{} + + gatewaysUrl, err := tpgresource.ReplaceVars(d, config, "{{NetworkServicesBasePath}}projects/{{project}}/locations/{{location}}/gateways") + if err != nil { + return gateways, err + } + + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: gatewaysUrl, + UserAgent: userAgent, + }) + if err != nil { + return gateways, err + } + + data, ok := resp["gateways"] + if !ok || data == nil { + log.Print("[DEBUG] No gateways under the same location found.") + return gateways, nil + } + + gateways = data.([]interface{}) + + log.Printf("[DEBUG] There are still gateways under the same location: %#v", gateways) + + return gateways, nil +} + +// Checks if the given list of gateways contains a gateway of type SECURE_WEB_GATEWAY. +func isLastSWGGateway(gateways []interface{}, network string) bool { + log.Print("[DEBUG] Checking if this is the last gateway of type SECURE_WEB_GATEWAY.") + for _, itemRaw := range gateways { + if itemRaw == nil { + continue + } + item := itemRaw.(map[string]interface{}) + + gType, ok := item["type"] + if !ok || gType == nil { + continue + } + + gNetwork, ok := item["network"] + if !ok || gNetwork == nil { + continue + } + + if gType.(string) == "SECURE_WEB_GATEWAY" && gNetwork.(string) == network { + return false + } + } + + log.Print("[DEBUG] There is no other gateway of type SECURE_WEB_GATEWAY.") + // no gateways of type SWG found. + return true +} + +//Deletes the swg-autogen-router if the current gateway being deleted is the type of swg so there is no other gateway using it. +func deleteSWGAutoGenRouter(d *schema.ResourceData, config *transport_tpg.Config, billingProject, userAgent string) error { + log.Printf("[DEBUG] Searching the network id by name %q.", d.Get("network")) + + networkPath := fmt.Sprintf("{{ComputeBasePath}}%s", d.Get("network")) + networkUrl, err := tpgresource.ReplaceVars(d, config, networkPath) + if err != nil { + return err + } + + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: networkUrl, + UserAgent: userAgent, + }) + if err != nil { + return err + } + + // The name of swg auto generated router is in the following format: swg-autogen-router-{NETWORK-ID} + routerId := fmt.Sprintf("swg-autogen-router-%s", resp["id"]) + log.Printf("[DEBUG] Deleting the auto generated router %q.", routerId) + + routerPath := fmt.Sprintf("{{ComputeBasePath}}projects/{{project}}/regions/{{location}}/routers/%s", routerId) + routerUrl, err := tpgresource.ReplaceVars(d, config, routerPath) + if err != nil { + return err + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: routerUrl, + UserAgent: userAgent, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSwgAutogenRouterRetryable}, + }) + if err != nil { + if transport_tpg.IsGoogleApiErrorWithCode(err, 404) { + // The swg auto gen router may have already been deleted. + // No further action needed. + return nil + } + + return err + } + + return nil +} diff --git a/mmv1/templates/terraform/constants/privateca_certificate_authority.go.erb b/mmv1/templates/terraform/constants/privateca_certificate_authority.go.erb index 161f90315082..97e7a44e3700 100644 --- a/mmv1/templates/terraform/constants/privateca_certificate_authority.go.erb +++ b/mmv1/templates/terraform/constants/privateca_certificate_authority.go.erb @@ -2,7 +2,7 @@ func resourcePrivateCaCACustomDiff(_ context.Context, diff *schema.ResourceDiff, if diff.HasChange("desired_state") { _, new := diff.GetChange("desired_state") - if isNewResource(diff) { + if tpgresource.IsNewResource(diff) { if diff.Get("type").(string) == "SUBORDINATE" { return fmt.Errorf("`desired_state` can not be specified when creating a SUBORDINATE CA") } @@ -16,9 +16,4 @@ func resourcePrivateCaCACustomDiff(_ context.Context, diff *schema.ResourceDiff, } } return nil -} - -func isNewResource(diff tpgresource.TerraformResourceDiff) bool { - name := diff.Get("name") - return name.(string) == "" -} +} \ No newline at end of file diff --git a/mmv1/templates/terraform/constants/tpu_node.erb b/mmv1/templates/terraform/constants/tpu_node.erb index 0c28ce031af2..716bdc9adb7e 100644 --- a/mmv1/templates/terraform/constants/tpu_node.erb +++ b/mmv1/templates/terraform/constants/tpu_node.erb @@ -36,7 +36,7 @@ func tpuNodeCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, meta int if networkLinkRegex.MatchString(old.(string)) { parts := networkLinkRegex.FindStringSubmatch(old.(string)) - i, err := StringToFixed64(parts[1]) + i, err := tpgresource.StringToFixed64(parts[1]) if err == nil { if project.ProjectNumber == i { if err := diff.SetNew("network", old); err != nil { diff --git a/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb b/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb index e3aaa562100c..1d41cfd4f205 100644 --- a/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb +++ b/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb @@ -4,7 +4,7 @@ config := GoogleProviderConfig(t) -url, err := replaceVarsForTest(config, rs, "{{FirestoreBasePath}}projects/{{project}}/databases/{{database}}/collectionGroups/{{collection}}/fields/{{field}}") +url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{FirestoreBasePath}}projects/{{project}}/databases/{{database}}/collectionGroups/{{collection}}/fields/{{field}}") if err != nil { return err } diff --git a/mmv1/templates/terraform/custom_delete/firestore_field_delete.go.erb b/mmv1/templates/terraform/custom_delete/firestore_field_delete.go.erb index 9a54f7b6cde1..7e78f7c53e0d 100644 --- a/mmv1/templates/terraform/custom_delete/firestore_field_delete.go.erb +++ b/mmv1/templates/terraform/custom_delete/firestore_field_delete.go.erb @@ -4,13 +4,13 @@ log.Printf("[DEBUG] Deleting Field %q", d.Id()) billingProject := "" -project, err := getProject(d, config) +project, err := tpgresource.GetProject(d, config) if err != nil { return fmt.Errorf("Error fetching project for App: %s", err) } billingProject = project -url, err := ReplaceVars(d, config, "{{FirestoreBasePath}}{{name}}") +url, err := tpgresource.ReplaceVars(d, config, "{{FirestoreBasePath}}{{name}}") if err != nil { return err } @@ -24,7 +24,7 @@ if err != nil { // err == nil indicates that the billing_project value was found -if bp, err := getBillingProject(d, config); err == nil { +if bp, err := tpgresource.GetBillingProject(d, config); err == nil { billingProject = bp } diff --git a/mmv1/templates/terraform/custom_delete/per_instance_config.go.erb b/mmv1/templates/terraform/custom_delete/per_instance_config.go.erb index 7bd004a7873b..ce069c2ff827 100644 --- a/mmv1/templates/terraform/custom_delete/per_instance_config.go.erb +++ b/mmv1/templates/terraform/custom_delete/per_instance_config.go.erb @@ -82,7 +82,7 @@ } // PerInstanceConfig goes into "DELETING" state while the instance is actually deleted - err = PollingWaitTime(resourceComputePerInstanceConfigPollRead(d, meta), PollCheckInstanceConfigDeleted, "Deleting PerInstanceConfig", d.Timeout(schema.TimeoutDelete), 1) + err = transport_tpg.PollingWaitTime(resourceComputePerInstanceConfigPollRead(d, meta), PollCheckInstanceConfigDeleted, "Deleting PerInstanceConfig", d.Timeout(schema.TimeoutDelete), 1) if err != nil { return fmt.Errorf("Error waiting for delete on PerInstanceConfig %q: %s", d.Id(), err) } diff --git a/mmv1/templates/terraform/custom_delete/region_per_instance_config.go.erb b/mmv1/templates/terraform/custom_delete/region_per_instance_config.go.erb index 5210a6d76135..72caafa23452 100644 --- a/mmv1/templates/terraform/custom_delete/region_per_instance_config.go.erb +++ b/mmv1/templates/terraform/custom_delete/region_per_instance_config.go.erb @@ -83,7 +83,7 @@ } // RegionPerInstanceConfig goes into "DELETING" state while the instance is actually deleted - err = PollingWaitTime(resourceComputeRegionPerInstanceConfigPollRead(d, meta), PollCheckInstanceConfigDeleted, "Deleting RegionPerInstanceConfig", d.Timeout(schema.TimeoutDelete), 1) + err = transport_tpg.PollingWaitTime(resourceComputeRegionPerInstanceConfigPollRead(d, meta), PollCheckInstanceConfigDeleted, "Deleting RegionPerInstanceConfig", d.Timeout(schema.TimeoutDelete), 1) if err != nil { return fmt.Errorf("Error waiting for delete on RegionPerInstanceConfig %q: %s", d.Id(), err) } diff --git a/mmv1/templates/terraform/custom_expand/binaryauthorization_attestors.erb b/mmv1/templates/terraform/custom_expand/binaryauthorization_attestors.erb index dbe29c3e2ba7..ca30fc413f74 100644 --- a/mmv1/templates/terraform/custom_expand/binaryauthorization_attestors.erb +++ b/mmv1/templates/terraform/custom_expand/binaryauthorization_attestors.erb @@ -30,7 +30,7 @@ func expand<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d t } } - return convertAndMapStringArr(v.(*schema.Set).List(), func(s string) string { + return tpgresource.ConvertAndMapStringArr(v.(*schema.Set).List(), func(s string) string { if r.MatchString(s) { return s } diff --git a/mmv1/templates/terraform/custom_expand/firestore_field_index_config.go.erb b/mmv1/templates/terraform/custom_expand/firestore_field_index_config.go.erb index 2873ccc4b101..7b0a5363c4f8 100644 --- a/mmv1/templates/terraform/custom_expand/firestore_field_index_config.go.erb +++ b/mmv1/templates/terraform/custom_expand/firestore_field_index_config.go.erb @@ -30,15 +30,15 @@ func expand<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d t transformed := make(map[string]interface{}) transformedField := make(map[string]interface{}) - if val := reflect.ValueOf(original["query_scope"]); val.IsValid() && !isEmptyValue(val) { + if val := reflect.ValueOf(original["query_scope"]); val.IsValid() && !tpgresource.IsEmptyValue(val) { transformed["queryScope"] = original["query_scope"] } - if val := reflect.ValueOf(original["order"]); val.IsValid() && !isEmptyValue(val) { + if val := reflect.ValueOf(original["order"]); val.IsValid() && !tpgresource.IsEmptyValue(val) { transformedField["order"] = original["order"] } - if val := reflect.ValueOf(original["array_config"]); val.IsValid() && !isEmptyValue(val) { + if val := reflect.ValueOf(original["array_config"]); val.IsValid() && !tpgresource.IsEmptyValue(val) { transformedField["arrayConfig"] = original["array_config"] } transformed["fields"] = [1]interface{}{ diff --git a/mmv1/templates/terraform/custom_flatten/default_if_empty.erb b/mmv1/templates/terraform/custom_flatten/default_if_empty.erb index 5a5dd7fc566e..d78b3fc4b48c 100644 --- a/mmv1/templates/terraform/custom_flatten/default_if_empty.erb +++ b/mmv1/templates/terraform/custom_flatten/default_if_empty.erb @@ -19,7 +19,7 @@ func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d <%- if property.is_a?(Api::Type::Integer) -%> // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } // let terraform core handle it if we can't convert the string to an int. } diff --git a/mmv1/templates/terraform/custom_flatten/guard_self_link_array.go.erb b/mmv1/templates/terraform/custom_flatten/guard_self_link_array.go.erb index 0b9d6edddd10..0e925671ba7f 100644 --- a/mmv1/templates/terraform/custom_flatten/guard_self_link_array.go.erb +++ b/mmv1/templates/terraform/custom_flatten/guard_self_link_array.go.erb @@ -17,5 +17,5 @@ func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d if v == nil { return v } - return convertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1) + return tpgresource.ConvertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1) } diff --git a/mmv1/templates/terraform/custom_flatten/nat_rules_ip_set.erb b/mmv1/templates/terraform/custom_flatten/nat_rules_ip_set.erb index 98a4c0f34444..f943b72efa9c 100644 --- a/mmv1/templates/terraform/custom_flatten/nat_rules_ip_set.erb +++ b/mmv1/templates/terraform/custom_flatten/nat_rules_ip_set.erb @@ -2,5 +2,5 @@ func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d if v == nil { return v } - return schema.NewSet(computeRouterNatIPsHash, tpgresource.ConvertStringArrToInterface(convertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1))) + return schema.NewSet(computeRouterNatIPsHash, tpgresource.ConvertStringArrToInterface(tpgresource.ConvertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1))) } \ No newline at end of file diff --git a/mmv1/templates/terraform/custom_flatten/os_config_patch_deployment_recurring_schedule_time_of_day.go.erb b/mmv1/templates/terraform/custom_flatten/os_config_patch_deployment_recurring_schedule_time_of_day.go.erb index 12d07898a058..5eda6def7d10 100644 --- a/mmv1/templates/terraform/custom_flatten/os_config_patch_deployment_recurring_schedule_time_of_day.go.erb +++ b/mmv1/templates/terraform/custom_flatten/os_config_patch_deployment_recurring_schedule_time_of_day.go.erb @@ -32,7 +32,7 @@ func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDayHours(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } @@ -49,7 +49,7 @@ func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDayHours(v interface{} func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDayMinutes(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } @@ -66,7 +66,7 @@ func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDayMinutes(v interface func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDaySeconds(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } @@ -83,7 +83,7 @@ func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDaySeconds(v interface func flattenOSConfigPatchDeploymentRecurringScheduleTimeOfDayNanos(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } diff --git a/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_egress_policy.go.erb b/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_egress_policy.go.erb index b2b39af2a8b1..269d4812fbdd 100644 --- a/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_egress_policy.go.erb +++ b/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_egress_policy.go.erb @@ -15,15 +15,12 @@ config := meta.(*transport_tpg.Config) // current import_formats can't import fields with forward slashes in their value - parts, err := tpgresource.GetImportIdQualifiers([]string{"accessPolicies/(?P[^/]+)/servicePerimeters/(?P[^/]+)/(?P.+)"}, d, config, d.Id()) + parts, err := tpgresource.GetImportIdQualifiers([]string{"accessPolicies/(?P[^/]+)/servicePerimeters/(?P[^/]+)"}, d, config, d.Id()) if err != nil { return nil, err } - if err := d.Set("egress_policy_name", fmt.Sprintf("accessPolicies/%s/servicePerimeters/%s", parts["accessPolicy"], parts["perimeter"])); err != nil { - return nil, fmt.Errorf("Error setting egress_policy_name: %s", err) - } - if err := d.Set("resource", parts["resource"]); err != nil { - return nil, fmt.Errorf("Error setting resource: %s", err) + if err := d.Set("perimeter", fmt.Sprintf("accessPolicies/%s/servicePerimeters/%s", parts["accessPolicy"], parts["perimeter"])); err != nil { + return nil, fmt.Errorf("Error setting perimeter: %s", err) } return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_ingress_policy.go.erb b/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_ingress_policy.go.erb index e01799b4305a..269d4812fbdd 100644 --- a/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_ingress_policy.go.erb +++ b/mmv1/templates/terraform/custom_import/access_context_manager_service_perimeter_ingress_policy.go.erb @@ -15,15 +15,12 @@ config := meta.(*transport_tpg.Config) // current import_formats can't import fields with forward slashes in their value - parts, err := tpgresource.GetImportIdQualifiers([]string{"accessPolicies/(?P[^/]+)/servicePerimeters/(?P[^/]+)/(?P.+)"}, d, config, d.Id()) + parts, err := tpgresource.GetImportIdQualifiers([]string{"accessPolicies/(?P[^/]+)/servicePerimeters/(?P[^/]+)"}, d, config, d.Id()) if err != nil { return nil, err } - if err := d.Set("ingress_policy_name", fmt.Sprintf("accessPolicies/%s/servicePerimeters/%s", parts["accessPolicy"], parts["perimeter"])); err != nil { - return nil, fmt.Errorf("Error setting ingress_policy_name: %s", err) - } - if err := d.Set("resource", parts["resource"]); err != nil { - return nil, fmt.Errorf("Error setting resource: %s", err) + if err := d.Set("perimeter", fmt.Sprintf("accessPolicies/%s/servicePerimeters/%s", parts["accessPolicy"], parts["perimeter"])); err != nil { + return nil, fmt.Errorf("Error setting perimeter: %s", err) } return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_import/vertex_ai_featurestore_entitytype_feature.go.erb b/mmv1/templates/terraform/custom_import/vertex_ai_featurestore_entitytype_feature.go.erb new file mode 100644 index 000000000000..f0e2a695facc --- /dev/null +++ b/mmv1/templates/terraform/custom_import/vertex_ai_featurestore_entitytype_feature.go.erb @@ -0,0 +1,22 @@ +config := meta.(*transport_tpg.Config) +if err := tpgresource.ParseImportId([]string{ + "(?P.+)/features/(?P[^/]+)", +}, d, config); err != nil { + return nil, err +} + +// Replace import id for the resource id +id, err := tpgresource.ReplaceVars(d, config, "{{entitytype}}/features/{{name}}") +if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) +} +d.SetId(id) + +entitytype := d.Get("entitytype").(string) + +re := regexp.MustCompile("^projects/(.+)/locations/(.+)/featurestores/(.+)/entityTypes/(.+)$") +if parts := re.FindStringSubmatch(entitytype); parts != nil { + d.Set("region", parts[2]) +} + +return []*schema.ResourceData{d}, nil diff --git a/mmv1/templates/terraform/custom_update/.gitkeep b/mmv1/templates/terraform/custom_update/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mmv1/templates/terraform/decoders/avoid_meaningless_project_update.erb b/mmv1/templates/terraform/decoders/avoid_meaningless_project_update.erb index daf61cb8fe06..533c57ce50fb 100644 --- a/mmv1/templates/terraform/decoders/avoid_meaningless_project_update.erb +++ b/mmv1/templates/terraform/decoders/avoid_meaningless_project_update.erb @@ -34,7 +34,7 @@ // If it's a project ID var oldProjId int64 var newProjId int64 - if oldVal, err := StringToFixed64(old); err == nil { + if oldVal, err := tpgresource.StringToFixed64(old); err == nil { log.Printf("[DEBUG] The old value was a real number: %d", oldVal) oldProjId = oldVal } else { @@ -44,7 +44,7 @@ } oldProjId = pOld.ProjectNumber } - if newVal, err := StringToFixed64(new); err == nil { + if newVal, err := tpgresource.StringToFixed64(new); err == nil { log.Printf("[DEBUG] The new value was a real number: %d", newVal) newProjId = newVal } else { diff --git a/mmv1/templates/terraform/decoders/dlp_stored_info_type.go.erb b/mmv1/templates/terraform/decoders/dlp_stored_info_type.go.erb index 42e6bbe5a82a..65407883b9b8 100644 --- a/mmv1/templates/terraform/decoders/dlp_stored_info_type.go.erb +++ b/mmv1/templates/terraform/decoders/dlp_stored_info_type.go.erb @@ -16,4 +16,9 @@ config := configRaw.(map[string]interface{}) // Name comes back on the top level, so set here config["name"] = name +configMeta := meta.(*transport_tpg.Config) +if err := d.Set("stored_info_type_id", flattenDataLossPreventionStoredInfoTypeName(res["name"], d, configMeta)); err != nil { + return nil, fmt.Errorf("Error reading StoredInfoType: %s", err) +} + return config, nil diff --git a/mmv1/templates/terraform/decoders/dlp_template_id.go.erb b/mmv1/templates/terraform/decoders/dlp_template_id.go.erb new file mode 100644 index 000000000000..dad4696aca4e --- /dev/null +++ b/mmv1/templates/terraform/decoders/dlp_template_id.go.erb @@ -0,0 +1,19 @@ +<%# The license inside this block applies to this file. + # Copyright 2023 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +-%> +config := meta.(*transport_tpg.Config) +if err := d.Set("template_id", flattenDataLossPrevention<%= object.name -%>Name(res["name"], d, config)); err != nil { + return nil, fmt.Errorf("Error reading <%= object.name -%>: %s", err) +} +return res, nil diff --git a/mmv1/templates/terraform/encoders/disk.erb b/mmv1/templates/terraform/encoders/disk.erb index 86637638e7d1..55dd63d40757 100644 --- a/mmv1/templates/terraform/encoders/disk.erb +++ b/mmv1/templates/terraform/encoders/disk.erb @@ -42,7 +42,7 @@ end if v, ok := d.GetOk("image"); ok { log.Printf("[DEBUG] Resolving image name: %s", v.(string)) - imageUrl, err := resolveImage(config, project, v.(string), userAgent) + imageUrl, err := ResolveImage(config, project, v.(string), userAgent) if err != nil { return nil, fmt.Errorf( "Error resolving image name '%s': %s", diff --git a/mmv1/templates/terraform/encoders/dlp_stored_info_type.go.erb b/mmv1/templates/terraform/encoders/dlp_stored_info_type.go.erb index 7750fb8e12c7..03d85bbd04ce 100644 --- a/mmv1/templates/terraform/encoders/dlp_stored_info_type.go.erb +++ b/mmv1/templates/terraform/encoders/dlp_stored_info_type.go.erb @@ -14,4 +14,8 @@ -%> newObj := make(map[string]interface{}) newObj["config"] = obj + storedInfoTypeIdProp, ok := d.GetOk("stored_info_type_id") + if ok && storedInfoTypeIdProp != nil { + newObj["storedInfoTypeId"] = storedInfoTypeIdProp + } return newObj, nil diff --git a/mmv1/templates/terraform/encoders/location_from_region.go.erb b/mmv1/templates/terraform/encoders/location_from_region.go.erb index 068652c3b68d..b9c4d6c7be29 100644 --- a/mmv1/templates/terraform/encoders/location_from_region.go.erb +++ b/mmv1/templates/terraform/encoders/location_from_region.go.erb @@ -14,7 +14,7 @@ -%> config := meta.(*transport_tpg.Config) if _, ok := d.GetOk("location"); !ok { - location, err := getRegionFromSchema("region", "zone", d, config) + location, err := tpgresource.GetRegionFromSchema("region", "zone", d, config) if err != nil { return nil, fmt.Errorf("Cannot determine location: set in this resource, or set provider-level 'region' or 'zone'.") } diff --git a/mmv1/templates/terraform/encoders/vertex_ai_featurestore_entitytype_feature.go.erb b/mmv1/templates/terraform/encoders/vertex_ai_featurestore_entitytype_feature.go.erb new file mode 100644 index 000000000000..450c499ba865 --- /dev/null +++ b/mmv1/templates/terraform/encoders/vertex_ai_featurestore_entitytype_feature.go.erb @@ -0,0 +1,8 @@ +if v, ok := d.GetOk("entitytype"); ok { + re := regexp.MustCompile("^projects/(.+)/locations/(.+)/featurestores/(.+)/entityTypes/(.+)$") + if parts := re.FindStringSubmatch(v.(string)); parts != nil { + d.Set("region", parts[2]) + } +} + +return obj, nil diff --git a/mmv1/templates/terraform/encoders/wrap_object_with_template_id.go.erb b/mmv1/templates/terraform/encoders/wrap_object_with_template_id.go.erb new file mode 100644 index 000000000000..97c9513d6ad8 --- /dev/null +++ b/mmv1/templates/terraform/encoders/wrap_object_with_template_id.go.erb @@ -0,0 +1,21 @@ +<%# The license inside this block applies to this file. + # Copyright 2023 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +-%> + newObj := make(map[string]interface{}) + newObj["<%= object.name.camelize(:lower) -%>"] = obj + templateIdProp, ok := d.GetOk("template_id") + if ok && templateIdProp != nil { + newObj["templateId"] = templateIdProp + } + return newObj, nil diff --git a/mmv1/templates/terraform/examples/apigee_instance_service_attachment_basic_test.tf.erb b/mmv1/templates/terraform/examples/apigee_instance_service_attachment_basic_test.tf.erb index c6cda891c090..0b477416ced7 100644 --- a/mmv1/templates/terraform/examples/apigee_instance_service_attachment_basic_test.tf.erb +++ b/mmv1/templates/terraform/examples/apigee_instance_service_attachment_basic_test.tf.erb @@ -156,5 +156,5 @@ resource "google_apigee_instance" "<%= ctx[:primary_resource_id] %>" { name = "tf-test%{random_suffix}" location = "us-central1" org_id = google_apigee_organization.apigee_org.id - consumer_accept_list = [123456, google_project.project.number] + consumer_accept_list = [google_project.project.number] } diff --git a/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.erb b/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.erb index 9be5a7527160..6b13f3aa9e7e 100644 --- a/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.erb +++ b/mmv1/templates/terraform/examples/base_configs/iam_test_file.go.erb @@ -1,3 +1,7 @@ +<% if hc_downstream -%> +<%= lines(hashicorp_copyright_header(:go, pwd)) -%> +<% end -%> + <%= lines(autogen_notice(:go, pwd)) -%> package google diff --git a/mmv1/templates/terraform/examples/base_configs/test_body.go.erb b/mmv1/templates/terraform/examples/base_configs/test_body.go.erb index 8243b3a0792c..9d2daab180cf 100644 --- a/mmv1/templates/terraform/examples/base_configs/test_body.go.erb +++ b/mmv1/templates/terraform/examples/base_configs/test_body.go.erb @@ -1,3 +1,3 @@ - return Nprintf(` + return tpgresource.Nprintf(` <%= ctx[:content] -%> `, context) diff --git a/mmv1/templates/terraform/examples/base_configs/test_file.go.erb b/mmv1/templates/terraform/examples/base_configs/test_file.go.erb index 3f90b0d3331c..caad397715dc 100644 --- a/mmv1/templates/terraform/examples/base_configs/test_file.go.erb +++ b/mmv1/templates/terraform/examples/base_configs/test_file.go.erb @@ -1,3 +1,7 @@ +<% if hc_downstream -%> +<%= lines(hashicorp_copyright_header(:go, pwd)) -%> +<% end -%> + <%= lines(autogen_notice(:go, pwd)) -%> package google diff --git a/mmv1/templates/terraform/examples/certificate_manager_certificate_issuance_config.tf.erb b/mmv1/templates/terraform/examples/certificate_manager_certificate_issuance_config.tf.erb new file mode 100644 index 000000000000..e80ed7edc104 --- /dev/null +++ b/mmv1/templates/terraform/examples/certificate_manager_certificate_issuance_config.tf.erb @@ -0,0 +1,60 @@ +resource "google_certificate_manager_certificate_issuance_config" "<%= ctx[:primary_resource_id] %>" { + name = "issuanceconfigtestterraform" + description = "sample description for the certificate issuanceConfigs" + certificate_authority_config { + certificate_authority_service_config { + ca_pool = google_privateca_ca_pool.pool.id + } + } + lifetime = "1814400s" + rotation_window_percentage = 34 + key_algorithm = "ECDSA_P256" + labels = { "name": "wrench", "count": "3" } + + depends_on=[google_privateca_certificate_authority.ca_authority] +} + +resource "google_privateca_ca_pool" "pool" { + name = "<%= ctx[:vars]["pool_name"] %>" + location = "us-central1" + tier = "ENTERPRISE" +} + +resource "google_privateca_certificate_authority" "ca_authority" { + location = "us-central1" + pool = google_privateca_ca_pool.pool.name + certificate_authority_id = "<%= ctx[:vars]["ca_name"] %>" + config { + subject_config { + subject { + organization = "HashiCorp" + common_name = "my-certificate-authority" + } + subject_alt_name { + dns_names = ["hashicorp.com"] + } + } + x509_config { + ca_options { + is_ca = true + } + key_usage { + base_key_usage { + cert_sign = true + crl_sign = true + } + extended_key_usage { + server_auth = true + } + } + } + } + key_spec { + algorithm = "RSA_PKCS1_4096_SHA256" + } + + // Disable CA deletion related safe checks for easier cleanup. + deletion_protection = false + skip_grace_period = true + ignore_active_certificates_on_deletion = true +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb b/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb index d4605eae30f2..c5abd47bc47b 100644 --- a/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb +++ b/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb @@ -1,7 +1,7 @@ resource "google_certificate_manager_dns_authorization" "<%= ctx[:primary_resource_id] %>" { name = "<%= ctx[:vars]['dns_auth_name'] %>" description = "The default dnss" - domain = "%{random_suffix}.hashicorptest.com" + domain = "<%= ctx[:vars]['subdomain'] %>.hashicorptest.com" } output "record_name_to_insert" { diff --git a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config.tf.erb b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config.tf.erb index b58ceb96a0de..bb1a7c4262f7 100644 --- a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config.tf.erb +++ b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config.tf.erb @@ -1,5 +1,5 @@ resource "google_cloudbuild_bitbucket_server_config" "<%= ctx[:primary_resource_id] %>" { - config_id = "mybbsconfig" + config_id = "<%= ctx[:vars]['config_id'] %>" location = "us-central1" host_uri = "https://bbs.com" secrets { diff --git a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.erb b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.erb index ff6d169311e2..416b54983ddc 100644 --- a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.erb +++ b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.erb @@ -26,7 +26,7 @@ resource "google_service_networking_connection" "default" { } resource "google_cloudbuild_bitbucket_server_config" "<%= ctx[:primary_resource_id] %>" { - config_id = "mybbsconfig" + config_id = "<%= ctx[:vars]['config_id'] %>" location = "us-central1" host_uri = "https://bbs.com" secrets { diff --git a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_repositories.tf.erb b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_repositories.tf.erb index 7fe70071e67e..b51f6d459399 100644 --- a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_repositories.tf.erb +++ b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_repositories.tf.erb @@ -1,5 +1,5 @@ resource "google_cloudbuild_bitbucket_server_config" "<%= ctx[:primary_resource_id] %>" { - config_id = "mybbsconfig" + config_id = "<%= ctx[:vars]['config_id'] %>" location = "us-central1" host_uri = "https://bbs.com" secrets { diff --git a/mmv1/templates/terraform/examples/dataform_repository.tf.erb b/mmv1/templates/terraform/examples/dataform_repository.tf.erb index 9f1705dce9d2..9db54244e401 100644 --- a/mmv1/templates/terraform/examples/dataform_repository.tf.erb +++ b/mmv1/templates/terraform/examples/dataform_repository.tf.erb @@ -28,4 +28,10 @@ resource "google_dataform_repository" "<%= ctx[:primary_resource_id] %>" { default_branch = "main" authentication_token_secret_version = google_secret_manager_secret_version.secret_version.id } + + workspace_compilation_overrides { + default_database = "database" + schema_suffix = "_suffix" + table_prefix = "prefix_" + } } diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_basic_profile.tf.erb b/mmv1/templates/terraform/examples/dataplex_datascan_basic_profile.tf.erb new file mode 100644 index 000000000000..17560e0dd2aa --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_basic_profile.tf.erb @@ -0,0 +1,21 @@ +resource "google_dataplex_datascan" "<%= ctx[:primary_resource_id] %>" { + location = "us-central1" + data_scan_id = "tf-test-datascan%{random_suffix}" + + data { + resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" + } + + execution_spec { + trigger { + on_demand {} + } + } + + data_profile_spec { + } + + project = "<%= ctx[:test_env_vars]['project_name'] %>" +} + + diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_basic_quality.tf.erb b/mmv1/templates/terraform/examples/dataplex_datascan_basic_quality.tf.erb new file mode 100644 index 000000000000..0b4712f81e73 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_basic_quality.tf.erb @@ -0,0 +1,27 @@ +resource "google_dataplex_datascan" "<%= ctx[:primary_resource_id] %>" { + location = "us-central1" + data_scan_id = "tf-test-datascan%{random_suffix}" + + data { + resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" + } + + execution_spec { + trigger { + on_demand {} + } + } + + data_quality_spec { + rules { + dimension = "VALIDITY" + table_condition_expectation { + sql_expression = "COUNT(*) > 0" + } + } + } + + project = "<%= ctx[:test_env_vars]['project_name'] %>" +} + + diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_full_profile.tf.erb b/mmv1/templates/terraform/examples/dataplex_datascan_full_profile.tf.erb new file mode 100644 index 000000000000..a40044211210 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_full_profile.tf.erb @@ -0,0 +1,30 @@ +resource "google_dataplex_datascan" "<%= ctx[:primary_resource_id] %>" { + location = "us-central1" + display_name = "Full Datascan Profile" + data_scan_id = "tf-test-datascan%{random_suffix}" + description = "Example resource - Full Datascan Profile" + labels = { + author = "billing" + } + + data { + resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare" + } + + execution_spec { + trigger { + schedule { + cron = "TZ=America/New_York 1 1 * * *" + } + } + } + + data_profile_spec { + sampling_percent = 80 + row_filter = "word_count > 10" + } + + project = "<%= ctx[:test_env_vars]['project_name'] %>" +} + + diff --git a/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.erb b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.erb new file mode 100644 index 000000000000..54a3f7c1acf9 --- /dev/null +++ b/mmv1/templates/terraform/examples/dataplex_datascan_full_quality.tf.erb @@ -0,0 +1,101 @@ +resource "google_dataplex_datascan" "<%= ctx[:primary_resource_id] %>" { + location = "us-central1" + display_name = "Full Datascan Quality" + data_scan_id = "tf-test-datascan%{random_suffix}" + description = "Example resource - Full Datascan Quality" + labels = { + author = "billing" + } + + data { + resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations" + } + + execution_spec { + trigger { + schedule { + cron = "TZ=America/New_York 1 1 * * *" + } + } + field = "modified_date" + } + + data_quality_spec { + sampling_percent = 5 + row_filter = "station_id > 1000" + rules { + column = "address" + dimension = "VALIDITY" + threshold = 0.99 + non_null_expectation {} + } + + rules { + column = "council_district" + dimension = "VALIDITY" + ignore_null = true + threshold = 0.9 + range_expectation { + min_value = 1 + max_value = 10 + strict_min_enabled = true + strict_max_enabled = false + } + } + + rules { + column = "power_type" + dimension = "VALIDITY" + ignore_null = false + regex_expectation { + regex = ".*solar.*" + } + } + + rules { + column = "property_type" + dimension = "VALIDITY" + ignore_null = false + set_expectation { + values = ["sidewalk", "parkland"] + } + } + + + rules { + column = "address" + dimension = "UNIQUENESS" + uniqueness_expectation {} + } + + rules { + column = "number_of_docks" + dimension = "VALIDITY" + statistic_range_expectation { + statistic = "MEAN" + min_value = 5 + max_value = 15 + strict_min_enabled = true + strict_max_enabled = true + } + } + + rules { + column = "footprint_length" + dimension = "VALIDITY" + row_condition_expectation { + sql_expression = "footprint_length > 0 AND footprint_length <= 10" + } + } + + rules { + dimension = "VALIDITY" + table_condition_expectation { + sql_expression = "COUNT(*) > 0" + } + } + } + + + project = "<%= ctx[:test_env_vars]['project_name'] %>" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/dlp_deidentify_template_with_template_id.tf.erb b/mmv1/templates/terraform/examples/dlp_deidentify_template_with_template_id.tf.erb new file mode 100644 index 000000000000..db69427c18fb --- /dev/null +++ b/mmv1/templates/terraform/examples/dlp_deidentify_template_with_template_id.tf.erb @@ -0,0 +1,25 @@ +resource "google_data_loss_prevention_deidentify_template" "<%= ctx[:primary_resource_id] %>" { + parent = "projects/<%= ctx[:test_env_vars]['project'] %>" + template_id = "<%= ctx[:vars]['name'] %>" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "AGE" + } + + primitive_transformation { + replace_config { + new_value { + integer_value = 9 + } + } + } + } + } + } +} diff --git a/mmv1/templates/terraform/examples/dlp_inspect_template_with_template_id.tf.erb b/mmv1/templates/terraform/examples/dlp_inspect_template_with_template_id.tf.erb new file mode 100644 index 000000000000..1aa226b7120c --- /dev/null +++ b/mmv1/templates/terraform/examples/dlp_inspect_template_with_template_id.tf.erb @@ -0,0 +1,47 @@ +resource "google_data_loss_prevention_inspect_template" "<%= ctx[:primary_resource_id] %>" { + parent = "projects/<%= ctx[:test_env_vars]['project'] %>" + template_id = "<%= ctx[:vars]['name'] %>" + + inspect_config { + info_types { + name = "EMAIL_ADDRESS" + } + info_types { + name = "PERSON_NAME" + } + + min_likelihood = "UNLIKELY" + rule_set { + info_types { + name = "EMAIL_ADDRESS" + } + rules { + exclusion_rule { + regex { + pattern = ".+@example.com" + } + matching_type = "MATCHING_TYPE_FULL_MATCH" + } + } + } + + rule_set { + info_types { + name = "PERSON_NAME" + } + rules { + hotword_rule { + hotword_regex { + pattern = "patient" + } + proximity { + window_before = 50 + } + likelihood_adjustment { + fixed_likelihood = "VERY_LIKELY" + } + } + } + } + } +} diff --git a/mmv1/templates/terraform/examples/dlp_stored_info_type_with_id.tf.erb b/mmv1/templates/terraform/examples/dlp_stored_info_type_with_id.tf.erb new file mode 100644 index 000000000000..799db328d3db --- /dev/null +++ b/mmv1/templates/terraform/examples/dlp_stored_info_type_with_id.tf.erb @@ -0,0 +1,11 @@ +resource "google_data_loss_prevention_stored_info_type" "<%= ctx[:primary_resource_id] %>" { + parent = "projects/<%= ctx[:test_env_vars]['project'] %>" + description = "Description" + display_name = "Displayname" + stored_info_type_id = "<%= ctx[:vars]['name'] %>" + + regex { + pattern = "patient" + group_indexes = [2] + } +} diff --git a/mmv1/templates/terraform/examples/forwarding_rule_vpc_psc.tf.erb b/mmv1/templates/terraform/examples/forwarding_rule_vpc_psc.tf.erb index 65b15d614f31..c6561ddbd4b0 100644 --- a/mmv1/templates/terraform/examples/forwarding_rule_vpc_psc.tf.erb +++ b/mmv1/templates/terraform/examples/forwarding_rule_vpc_psc.tf.erb @@ -1,6 +1,5 @@ // Forwarding rule for VPC private service connect resource "google_compute_forwarding_rule" "<%= ctx[:primary_resource_id] %>" { - provider = google-beta name = "<%= ctx[:vars]['forwarding_rule_name'] %>" region = "us-central1" load_balancing_scheme = "" @@ -13,13 +12,11 @@ resource "google_compute_forwarding_rule" "<%= ctx[:primary_resource_id] %>" { // Consumer service endpoint resource "google_compute_network" "consumer_net" { - provider = google-beta name = "<%= ctx[:vars]['consumer_network_name'] %>" auto_create_subnetworks = false } resource "google_compute_subnetwork" "consumer_subnet" { - provider = google-beta name = "<%= ctx[:vars]['consumer_network_name'] %>" ip_cidr_range = "10.0.0.0/16" region = "us-central1" @@ -27,7 +24,6 @@ resource "google_compute_subnetwork" "consumer_subnet" { } resource "google_compute_address" "consumer_address" { - provider = google-beta name = "<%= ctx[:vars]['ip_name'] %>-1" region = "us-central1" subnetwork = google_compute_subnetwork.consumer_subnet.id @@ -38,13 +34,11 @@ resource "google_compute_address" "consumer_address" { // Producer service attachment resource "google_compute_network" "producer_net" { - provider = google-beta name = "<%= ctx[:vars]['producer_network_name'] %>" auto_create_subnetworks = false } resource "google_compute_subnetwork" "producer_subnet" { - provider = google-beta name = "<%= ctx[:vars]['producer_network_name'] %>" ip_cidr_range = "10.0.0.0/16" region = "us-central1" @@ -52,7 +46,6 @@ resource "google_compute_subnetwork" "producer_subnet" { } resource "google_compute_subnetwork" "psc_producer_subnet" { - provider = google-beta name = "<%= ctx[:vars]['producer_psc_network_name'] %>" ip_cidr_range = "10.1.0.0/16" region = "us-central1" @@ -62,7 +55,6 @@ resource "google_compute_subnetwork" "psc_producer_subnet" { } resource "google_compute_service_attachment" "producer_service_attachment" { - provider = google-beta name = "<%= ctx[:vars]['service_attachment_name'] %>" region = "us-central1" description = "A service attachment configured with Terraform" @@ -74,7 +66,6 @@ resource "google_compute_service_attachment" "producer_service_attachment" { } resource "google_compute_forwarding_rule" "producer_target_service" { - provider = google-beta name = "<%= ctx[:vars]['producer_forwarding_rule_name'] %>" region = "us-central1" @@ -86,7 +77,6 @@ resource "google_compute_forwarding_rule" "producer_target_service" { } resource "google_compute_region_backend_service" "producer_service_backend" { - provider = google-beta name = "<%= ctx[:vars]['producer_backend_name'] %>" region = "us-central1" @@ -94,7 +84,6 @@ resource "google_compute_region_backend_service" "producer_service_backend" { } resource "google_compute_health_check" "producer_service_health_check" { - provider = google-beta name = "<%= ctx[:vars]['producer_healthcheck_name'] %>" check_interval_sec = 1 diff --git a/mmv1/templates/terraform/examples/network_services_gateway_multiple_swp_same_network.tf.erb b/mmv1/templates/terraform/examples/network_services_gateway_multiple_swp_same_network.tf.erb new file mode 100644 index 000000000000..8c86c2246a80 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_services_gateway_multiple_swp_same_network.tf.erb @@ -0,0 +1,85 @@ +resource "google_certificate_manager_certificate" "default" { + provider = google-beta + name = "<%= ctx[:vars]['certificate_name'] %>" + location = "us-south1" + self_managed { + pem_certificate = file("test-fixtures/certificatemanager/cert.pem") + pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") + } +} + +resource "google_compute_network" "default" { + provider = google-beta + name = "<%= ctx[:vars]['network_name'] %>" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "<%= ctx[:vars]['subnetwork_name'] %>" + purpose = "PRIVATE" + ip_cidr_range = "10.128.0.0/20" + region = "us-south1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "proxyonlysubnet" { + provider = google-beta + name = "<%= ctx[:vars]['proxy_only_subnetwork_name'] %>" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.168.0.0/23" + region = "us-south1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_security_gateway_security_policy" "default" { + provider = google-beta + name = "<%= ctx[:vars]['policy_name'] %>" + location = "us-south1" +} + +resource "google_network_security_gateway_security_policy_rule" "default" { + provider = google-beta + name = "<%= ctx[:vars]['policy_rule_name'] %>" + location = "us-south1" + gateway_security_policy = google_network_security_gateway_security_policy.default.name + enabled = true + priority = 1 + session_matcher = "host() == 'example.com'" + basic_profile = "ALLOW" +} + +resource "google_network_services_gateway" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + name = "<%= ctx[:vars]['gateway_name_1'] %>" + location = "us-south1" + addresses = ["10.128.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + scope = "<%= ctx[:vars]['scope_1'] %>" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} + +resource "google_network_services_gateway" "gateway2" { + provider = google-beta + name = "<%= ctx[:vars]['gateway_name_2'] %>" + location = "us-south1" + addresses = ["10.128.0.98"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + scope = "<%= ctx[:vars]['scope_2'] %>" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/network_services_gateway_secure_web_proxy.tf.erb b/mmv1/templates/terraform/examples/network_services_gateway_secure_web_proxy.tf.erb new file mode 100644 index 000000000000..9dfdd66e2486 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_services_gateway_secure_web_proxy.tf.erb @@ -0,0 +1,69 @@ +resource "google_certificate_manager_certificate" "default" { + provider = google-beta + name = "<%= ctx[:vars]['certificate_name'] %>" + location = "us-central1" + self_managed { + pem_certificate = file("test-fixtures/certificatemanager/cert.pem") + pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") + } +} + +resource "google_compute_network" "default" { + provider = google-beta + name = "<%= ctx[:vars]['network_name'] %>" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "<%= ctx[:vars]['subnetwork_name'] %>" + purpose = "PRIVATE" + ip_cidr_range = "10.128.0.0/20" + region = "us-central1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "proxyonlysubnet" { + provider = google-beta + name = "<%= ctx[:vars]['proxy_only_subnetwork_name'] %>" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.168.0.0/23" + region = "us-central1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_security_gateway_security_policy" "default" { + provider = google-beta + name = "<%= ctx[:vars]['policy_name'] %>" + location = "us-central1" +} + +resource "google_network_security_gateway_security_policy_rule" "default" { + provider = google-beta + name = "<%= ctx[:vars]['policy_rule_name'] %>" + location = "us-central1" + gateway_security_policy = google_network_security_gateway_security_policy.default.name + enabled = true + priority = 1 + session_matcher = "host() == 'example.com'" + basic_profile = "ALLOW" +} + +resource "google_network_services_gateway" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + name = "<%= ctx[:vars]['gateway_name_1'] %>" + location = "us-central1" + addresses = ["10.128.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + scope = "<%= ctx[:vars]['scope_1'] %>" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} diff --git a/mmv1/templates/terraform/examples/vertex_ai_index_endpoint.tf.erb b/mmv1/templates/terraform/examples/vertex_ai_index_endpoint.tf.erb new file mode 100644 index 000000000000..c15e65269e41 --- /dev/null +++ b/mmv1/templates/terraform/examples/vertex_ai_index_endpoint.tf.erb @@ -0,0 +1,32 @@ +resource "google_vertex_ai_index_endpoint" "<%= ctx[:primary_resource_id] %>" { + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} + +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = data.google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} + +resource "google_compute_global_address" "vertex_range" { + name = "<%= ctx[:vars]['address_name'] %>" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = data.google_compute_network.vertex_network.id +} + +data "google_compute_network" "vertex_network" { + name = "<%= ctx[:vars]['network_name'] %>" +} + +data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/vmware_engine_network_legacy.tf.erb b/mmv1/templates/terraform/examples/vmware_engine_network_legacy.tf.erb new file mode 100644 index 000000000000..152363dbd104 --- /dev/null +++ b/mmv1/templates/terraform/examples/vmware_engine_network_legacy.tf.erb @@ -0,0 +1,33 @@ +resource "google_vmwareengine_network" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + project = google_project_service.acceptance.project + name = "<%= ctx[:test_env_vars]['location'] %>-default" #Legacy network IDs are in the format: {region-id}-default + location = "<%= ctx[:test_env_vars]['location'] %>" + type = "LEGACY" + description = "VMwareEngine legacy network sample" +} + +resource "google_project_service" "acceptance" { + project = google_project.acceptance.project_id + provider = google-beta + service = "vmwareengine.googleapis.com" + + # Needed for CI tests for permissions to propagate, should not be needed for actual usage + depends_on = [time_sleep.wait_60_seconds] +} + +# there can be only 1 Legacy network per region for a given project, +# so creating new project for isolation in CI. +resource "google_project" "acceptance" { + name = "<%= ctx[:vars]['proj_id'] %>" + provider = google-beta + project_id = "<%= ctx[:vars]['proj_id'] %>" + org_id = "<%= ctx[:test_env_vars]['org_id'] %>" + billing_account = "<%= ctx[:test_env_vars]['billing_account'] %>" +} + +resource "time_sleep" "wait_60_seconds" { + depends_on = [google_project.acceptance] + + create_duration = "60s" +} diff --git a/mmv1/templates/terraform/examples/workstation_config_persistent_directories.tf.erb b/mmv1/templates/terraform/examples/workstation_config_persistent_directories.tf.erb index 4ae975b9ea8f..21b8807c0412 100644 --- a/mmv1/templates/terraform/examples/workstation_config_persistent_directories.tf.erb +++ b/mmv1/templates/terraform/examples/workstation_config_persistent_directories.tf.erb @@ -50,6 +50,8 @@ resource "google_workstations_workstation_config" "<%= ctx[:primary_resource_id] mount_path = "/home" gce_pd { size_gb = 200 + fs_type = "ext4" + disk_type = "pd-standard" reclaim_policy = "DELETE" } } diff --git a/mmv1/templates/terraform/extra_schema_entry/vertex_ai_featurestore_entitytype_feature.go.erb b/mmv1/templates/terraform/extra_schema_entry/vertex_ai_featurestore_entitytype_feature.go.erb new file mode 100644 index 000000000000..9f78c3ba2dc5 --- /dev/null +++ b/mmv1/templates/terraform/extra_schema_entry/vertex_ai_featurestore_entitytype_feature.go.erb @@ -0,0 +1,5 @@ +"region": { + Type: schema.TypeString, + Computed: true, + Description: "The region of the feature", +}, diff --git a/mmv1/templates/terraform/flatten_property_method.erb b/mmv1/templates/terraform/flatten_property_method.erb index f4bd74eb7512..ed6eda44afcc 100644 --- a/mmv1/templates/terraform/flatten_property_method.erb +++ b/mmv1/templates/terraform/flatten_property_method.erb @@ -97,7 +97,7 @@ func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d <% elsif property.is_a?(Api::Type::Integer) -%> // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } @@ -113,7 +113,7 @@ func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d if v == nil { return v } - return convertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1) + return tpgresource.ConvertAndMapStringArr(v.([]interface{}), tpgresource.ConvertSelfLinkToV1) <% elsif property.is_a?(Api::Type::ResourceRef) -%> if v == nil { return v diff --git a/mmv1/templates/terraform/iam_policy.go.erb b/mmv1/templates/terraform/iam_policy.go.erb index ab096f5e66eb..d3e8c41bc9d9 100644 --- a/mmv1/templates/terraform/iam_policy.go.erb +++ b/mmv1/templates/terraform/iam_policy.go.erb @@ -12,9 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -%> +<% if hc_downstream -%> +<%= lines(hashicorp_copyright_header(:go, pwd)) -%> +<% end -%> + <%= lines(autogen_notice(:go, pwd)) -%> -package google +package <%= product_ns.downcase -%> import ( "fmt" @@ -48,7 +52,7 @@ var <%= resource_name -%>IamSchema = map[string]*schema.Schema{ <%# The last parameter can be used as a long name for IAM policies -%> <% if i == resource_params.size - 1 -%> <% if object.iam_policy.custom_diff_suppress.nil? -%> - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, <% else -%> DiffSuppressFunc: <%= resource_name -%>DiffSuppress, <% end -%> @@ -75,7 +79,7 @@ func <%= resource_name -%>IamUpdaterProducer(d tpgresource.TerraformResourceData <% resource_params.each do |param| -%> <% if provider_default_values.include?(param) -%> - <%= param -%>, _ := get<%= param.capitalize -%>(d, config) + <%= param -%>, _ := tpgresource.Get<%= param.capitalize -%>(d, config) if <%= param -%> != "" { if err := d.Set("<%= param -%>", <%= param -%>); err != nil { return nil, fmt.Errorf("Error setting <%= param -%>: %s", err) @@ -138,7 +142,7 @@ func <%= resource_name -%>IdParseFunc(d *schema.ResourceData, config *transport_ <% resource_params.each do |param| -%> <% if provider_default_values.include?(param) -%> - <%= param -%>, _ := get<%= param.capitalize -%>(d, config) + <%= param -%>, _ := tpgresource.Get<%= param.capitalize -%>(d, config) if <%= param -%> != "" { values["<%= param -%>"] = <%= param -%> } diff --git a/mmv1/templates/terraform/nested_query.go.erb b/mmv1/templates/terraform/nested_query.go.erb index 9704f8fb2f6f..09d92ec1b4df 100644 --- a/mmv1/templates/terraform/nested_query.go.erb +++ b/mmv1/templates/terraform/nested_query.go.erb @@ -197,7 +197,7 @@ func resource<%= resource_name -%>PatchDeleteEncoder(d *schema.ResourceData, met } if item == nil { // Spoof 404 error for proper handling by Delete (i.e. no-op) - return nil, fake404("nested", "<%= resource_name%>") + return nil, tpgresource.Fake404("nested", "<%= resource_name%>") } updatedItems := append(currItems[:idx], currItems[idx+1:]...) diff --git a/mmv1/templates/terraform/operation.go.erb b/mmv1/templates/terraform/operation.go.erb index 0c6f3326ccf0..04b36fb264af 100644 --- a/mmv1/templates/terraform/operation.go.erb +++ b/mmv1/templates/terraform/operation.go.erb @@ -3,10 +3,13 @@ has_project = object.base_url.include?("{{project}}") has_project = has_project || (object.async.is_a? Api::OpAsync and object.async.include_project) -%> +<% if hc_downstream -%> +<%= lines(hashicorp_copyright_header(:go, pwd)) -%> +<% end -%> <%= lines(autogen_notice(:go, pwd)) -%> -package google +package <%= product_ns.downcase -%> import ( <% if async.result.resource_inside_response -%> diff --git a/mmv1/templates/terraform/post_delete/network_services_gateway.go.erb b/mmv1/templates/terraform/post_delete/network_services_gateway.go.erb new file mode 100644 index 000000000000..90b06bc46791 --- /dev/null +++ b/mmv1/templates/terraform/post_delete/network_services_gateway.go.erb @@ -0,0 +1,15 @@ +if d.Get("delete_swg_autogen_router_on_destroy").(bool) { + log.Print("[DEBUG] The field delete_swg_autogen_router_on_destroy is true. Deleting swg_autogen_router.") + gateways, err := gatewaysSameLocation(d, config, billingProject, userAgent) + if err != nil { + return err + } + + network := d.Get("network").(string) + if isLastSWGGateway(gateways, network) { + err := deleteSWGAutoGenRouter(d, config, billingProject, userAgent) + if err != nil { + return err + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/detach_disk.erb b/mmv1/templates/terraform/pre_delete/detach_disk.erb index eec0d5196dcc..441ac7c99279 100644 --- a/mmv1/templates/terraform/pre_delete/detach_disk.erb +++ b/mmv1/templates/terraform/pre_delete/detach_disk.erb @@ -33,7 +33,7 @@ if v, ok := readRes["users"].([]interface{}); ok { return fmt.Errorf("Error retrieving instance %s: %s", instance, err.Error()) } for _, disk := range i.Disks { - if compareSelfLinkOrResourceName("", disk.Source, self, nil) { + if tpgresource.CompareSelfLinkOrResourceName("", disk.Source, self, nil) { detachCalls = append(detachCalls, detachArgs{ project: instanceProject, zone: tpgresource.GetResourceNameFromSelfLink(i.Zone), diff --git a/mmv1/templates/terraform/pre_update/bigtable_app_profile.go.erb b/mmv1/templates/terraform/pre_update/bigtable_app_profile.go.erb index 77fc6417da5e..a8da83e52f2e 100644 --- a/mmv1/templates/terraform/pre_update/bigtable_app_profile.go.erb +++ b/mmv1/templates/terraform/pre_update/bigtable_app_profile.go.erb @@ -13,7 +13,7 @@ # limitations under the License. -%> -if d.HasChange("multi_cluster_routing_cluster_ids") && !stringInSlice(updateMask, "multiClusterRoutingUseAny") { +if d.HasChange("multi_cluster_routing_cluster_ids") && !tpgresource.StringInSlice(updateMask, "multiClusterRoutingUseAny") { updateMask = append(updateMask, "multiClusterRoutingUseAny") } diff --git a/mmv1/templates/terraform/resource.erb b/mmv1/templates/terraform/resource.erb index 89d57f136ff8..e1d66b14b8e8 100644 --- a/mmv1/templates/terraform/resource.erb +++ b/mmv1/templates/terraform/resource.erb @@ -12,9 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -%> +<% if hc_downstream -%> +<%= lines(hashicorp_copyright_header(:go, pwd)) -%> +<% end -%> + <%= lines(autogen_notice(:go, pwd)) -%> -package google +package <%= product_ns.downcase -%> import ( @@ -369,7 +373,7 @@ func resource<%= resource_name -%>Create(d *schema.ResourceData, meta interface{ <% if object.async&.allow?('create') -%> <% if object.async.is_a? Provider::Terraform::PollAsync -%> - err = PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_existence -%>, "Creating <%= object.name -%>", d.Timeout(schema.TimeoutCreate), <%= object.async.target_occurrences -%>) + err = transport_tpg.PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_existence -%>, "Creating <%= object.name -%>", d.Timeout(schema.TimeoutCreate), <%= object.async.target_occurrences -%>) if err != nil { <% if object.async.suppress_error -%> log.Printf("[ERROR] Unable to confirm eventually consistent <%= object.name -%> %q finished updating: %q", d.Id(), err) @@ -449,7 +453,7 @@ func resource<%= resource_name -%>PollRead(d *schema.ResourceData, meta interfac } if res == nil { - return nil, fake404("nested", "<%= resource_name%>") + return nil, tpgresource.Fake404("nested", "<%= resource_name%>") } <% end -%> @@ -459,7 +463,7 @@ func resource<%= resource_name -%>PollRead(d *schema.ResourceData, meta interfac return nil, err } if res == nil { - return nil, fake404("decoded", "<%= resource_name%>") + return nil, tpgresource.Fake404("decoded", "<%= resource_name%>") } <% end -%> @@ -616,6 +620,9 @@ func resource<%= resource_name -%>Update(d *schema.ResourceData, meta interface{ var project string <% end -%> config := meta.(*transport_tpg.Config) +<% if object.custom_code.custom_update -%> + <%= lines(compile(pwd + '/' + object.custom_code.custom_update)) -%> +<% else -%> userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err @@ -732,7 +739,7 @@ if len(updateMask) > 0 { return err } <% elsif object.async.is_a? Provider::Terraform::PollAsync -%> - err = PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_existence -%>, "Updating <%= object.name -%>", d.Timeout(schema.TimeoutUpdate), <%= object.async.target_occurrences -%>) + err = transport_tpg.PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_existence -%>, "Updating <%= object.name -%>", d.Timeout(schema.TimeoutUpdate), <%= object.async.target_occurrences -%>) if err != nil { <% if object.async.suppress_error-%> log.Printf("[ERROR] Unable to confirm eventually consistent <%= object.name -%> %q finished updating: %q", d.Id(), err) @@ -885,7 +892,7 @@ if len(updateMask) > 0 { return err } <% elsif object.async.is_a? Provider::Terraform::PollAsync -%> - err = PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_existence -%>, "Updating <%= object.name -%>", d.Timeout(schema.TimeoutUpdate), <%= object.async.target_occurrences -%>) + err = transport_tpg.PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_existence -%>, "Updating <%= object.name -%>", d.Timeout(schema.TimeoutUpdate), <%= object.async.target_occurrences -%>) if err != nil { <% if object.async.suppress_error-%> log.Printf("[ERROR] Unable to confirm eventually consistent <%= object.name -%> %q finished updating: %q", d.Id(), err) @@ -903,6 +910,7 @@ if len(updateMask) > 0 { <%= lines(compile(pwd + '/' + object.custom_code.post_update)) if object.custom_code.post_update -%> return resource<%= resource_name -%>Read(d, meta) +<% end # if custom_update -%> } <% end # if updatable? -%> @@ -1000,7 +1008,7 @@ func resource<%= resource_name -%>Delete(d *schema.ResourceData, meta interface{ <% if object.async&.allow?('delete') -%> <% if object.async.is_a? Provider::Terraform::PollAsync -%> - err = PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_absence -%>, "Deleting <%= object.name -%>", d.Timeout(schema.TimeoutCreate), <%= object.async.target_occurrences -%>) + err = transport_tpg.PollingWaitTime(resource<%= resource_name -%>PollRead(d, meta), <%= object.async.check_response_func_absence -%>, "Deleting <%= object.name -%>", d.Timeout(schema.TimeoutCreate), <%= object.async.target_occurrences -%>) if err != nil { <% if object.async.suppress_error -%> log.Printf("[ERROR] Unable to confirm eventually consistent <%= object.name -%> %q finished updating: %q", d.Id(), err) diff --git a/mmv1/templates/terraform/resource_definition/disk.erb b/mmv1/templates/terraform/resource_definition/disk.erb index 099aae46ef99..bc4e042af1e5 100644 --- a/mmv1/templates/terraform/resource_definition/disk.erb +++ b/mmv1/templates/terraform/resource_definition/disk.erb @@ -1,2 +1,4 @@ CustomizeDiff: customdiff.All( - customdiff.ForceNewIfChange("size", isDiskShrinkage)), + customdiff.ForceNewIfChange("size", IsDiskShrinkage), + hyperDiskIopsUpdateDiffSupress), + diff --git a/mmv1/templates/terraform/resource_definition/region_backend_service.go.erb b/mmv1/templates/terraform/resource_definition/region_backend_service.go.erb index 20e67f11296b..8b9865f79cf0 100644 --- a/mmv1/templates/terraform/resource_definition/region_backend_service.go.erb +++ b/mmv1/templates/terraform/resource_definition/region_backend_service.go.erb @@ -13,5 +13,5 @@ # limitations under the License. -%> SchemaVersion: 1, -MigrateState: migrateStateNoop, +MigrateState: tpgresource.MigrateStateNoop, CustomizeDiff: customDiffRegionBackendService, diff --git a/mmv1/templates/terraform/schema_property.erb b/mmv1/templates/terraform/schema_property.erb index ff1efe5b9035..887fcf10b41d 100644 --- a/mmv1/templates/terraform/schema_property.erb +++ b/mmv1/templates/terraform/schema_property.erb @@ -59,7 +59,7 @@ <% if !property.diff_suppress_func.nil? -%> DiffSuppressFunc: <%= property.diff_suppress_func %>, <% elsif property.is_a?(Api::Type::ResourceRef) -%> - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, <% end -%> <% unless property.state_func.nil? -%> StateFunc: <%= property.state_func %>, @@ -124,7 +124,7 @@ Elem: &schema.Schema{ Type: <%= tf_types[property.item_type.class] -%>, <% if property.item_type.is_a?(Api::Type::ResourceRef) -%> - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, <% end -%> }, <% end -%> diff --git a/mmv1/templates/terraform/state_migrations/workflows_workflow.go.erb b/mmv1/templates/terraform/state_migrations/workflows_workflow.go.erb index a061272fe6b4..89e14b615b3e 100644 --- a/mmv1/templates/terraform/state_migrations/workflows_workflow.go.erb +++ b/mmv1/templates/terraform/state_migrations/workflows_workflow.go.erb @@ -31,7 +31,7 @@ func resourceWorkflowsWorkflowResourceV0() *schema.Resource { Computed: true, Optional: true, ForceNew: true, - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `Name of the service account associated with the latest workflow version. This service account represents the identity of the workflow and determines what permissions the workflow has. diff --git a/mmv1/templates/terraform/sweeper_file.go.erb b/mmv1/templates/terraform/sweeper_file.go.erb index 9da4b503cd59..5f5e115229ec 100644 --- a/mmv1/templates/terraform/sweeper_file.go.erb +++ b/mmv1/templates/terraform/sweeper_file.go.erb @@ -1,3 +1,7 @@ +<% if hc_downstream -%> +<%= lines(hashicorp_copyright_header(:go, pwd)) -%> +<% end -%> + <%= lines(autogen_notice(:go, pwd)) -%> package google @@ -12,6 +16,7 @@ import ( "<%= import_path() -%>/acctest" "<%= import_path() -%>/tpgresource" + transport_tpg "<%= import_path() -%>/transport" ) <% diff --git a/mmv1/templates/terraform/update_encoder/dlp_stored_info_type.go.erb b/mmv1/templates/terraform/update_encoder/dlp_stored_info_type.go.erb new file mode 100644 index 000000000000..0e83e629b1b9 --- /dev/null +++ b/mmv1/templates/terraform/update_encoder/dlp_stored_info_type.go.erb @@ -0,0 +1,17 @@ +<%# The license inside this block applies to this file. + # Copyright 2023 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +-%> + newObj := make(map[string]interface{}) + newObj["config"] = obj + return newObj, nil diff --git a/mmv1/templates/terraform/update_encoder/hyper_disk.go.erb b/mmv1/templates/terraform/update_encoder/hyper_disk.go.erb new file mode 100644 index 000000000000..9c28eb1043a3 --- /dev/null +++ b/mmv1/templates/terraform/update_encoder/hyper_disk.go.erb @@ -0,0 +1,8 @@ + +if d.HasChange("provisioned_iops") && strings.Contains(d.Get("type").(string), "hyperdisk"){ + nameProp := d.Get("name") + if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp + } +} +return obj, nil diff --git a/mmv1/templates/validator/resource_converter.go.erb b/mmv1/templates/validator/resource_converter.go.erb index 6c1c40091d2a..3123e3dece53 100644 --- a/mmv1/templates/validator/resource_converter.go.erb +++ b/mmv1/templates/validator/resource_converter.go.erb @@ -1,6 +1,6 @@ <%= lines(autogen_notice(:go, pwd)) -%> -package google +package <%= product_ns.downcase -%> import ( <%- # We list all the v2 imports here and unstable imports, because we run 'goimports' to guess the correct @@ -19,6 +19,7 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/verify" ) <% @@ -41,23 +42,23 @@ import ( const <%= resource_name -%>AssetType string = "<%= product_backend_name.downcase -%>.googleapis.com/<%= object.name -%>" -func resourceConverter<%= resource_name -%>() ResourceConverter { - return ResourceConverter{ +func ResourceConverter<%= resource_name -%>() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: <%= resource_name -%>AssetType, Convert: Get<%= resource_name -%>CaiObject, } } -func Get<%= resource_name -%>CaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "<%= asset_name_template -%>") +func Get<%= resource_name -%>CaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "<%= asset_name_template -%>") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := Get<%= resource_name -%>ApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: <%= resource_name -%>AssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "<%= api_version -%>", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/<%= product_backend_name.downcase -%>/<%= api_version -%>/rest", DiscoveryName: "<%= object.name -%>", @@ -65,7 +66,7 @@ func Get<%= resource_name -%>CaiObject(d tpgresource.TerraformResourceData, conf }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/templates/validator/resource_converter_iam.go.erb b/mmv1/templates/validator/resource_converter_iam.go.erb index 668b7749f965..acd8977d42b2 100644 --- a/mmv1/templates/validator/resource_converter_iam.go.erb +++ b/mmv1/templates/validator/resource_converter_iam.go.erb @@ -32,11 +32,12 @@ end -%> -package google +package <%= product_ns.downcase -%> import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) @@ -44,16 +45,16 @@ import ( // Provide a separate asset type constant so we don't have to worry about name conflicts between IAM and non-IAM converter files const <%= resource_name -%>IAMAssetType string = "<%= product_backend_name.downcase -%>.googleapis.com/<%= object.name -%>" -func resourceConverter<%= resource_name -%>IamPolicy() ResourceConverter { - return ResourceConverter{ +func ResourceConverter<%= resource_name -%>IamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: <%= resource_name -%>IAMAssetType, Convert: Get<%= resource_name -%>IamPolicyCaiObject, MergeCreateUpdate: Merge<%= resource_name -%>IamPolicy, } } -func resourceConverter<%= resource_name -%>IamBinding() ResourceConverter { - return ResourceConverter{ +func ResourceConverter<%= resource_name -%>IamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: <%= resource_name -%>IAMAssetType, Convert: Get<%= resource_name -%>IamBindingCaiObject, FetchFullResource: Fetch<%= resource_name -%>IamPolicy, @@ -62,8 +63,8 @@ func resourceConverter<%= resource_name -%>IamBinding() ResourceConverter { } } -func resourceConverter<%= resource_name -%>IamMember() ResourceConverter { - return ResourceConverter{ +func ResourceConverter<%= resource_name -%>IamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: <%= resource_name -%>IAMAssetType, Convert: Get<%= resource_name -%>IamMemberCaiObject, FetchFullResource: Fetch<%= resource_name -%>IamPolicy, @@ -72,73 +73,73 @@ func resourceConverter<%= resource_name -%>IamMember() ResourceConverter { } } -func Get<%= resource_name -%>IamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return new<%= resource_name -%>IamAsset(d, config, expandIamPolicyBindings) +func Get<%= resource_name -%>IamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return new<%= resource_name -%>IamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func Get<%= resource_name -%>IamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return new<%= resource_name -%>IamAsset(d, config, expandIamRoleBindings) +func Get<%= resource_name -%>IamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return new<%= resource_name -%>IamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func Get<%= resource_name -%>IamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return new<%= resource_name -%>IamAsset(d, config, expandIamMemberBindings) +func Get<%= resource_name -%>IamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return new<%= resource_name -%>IamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func Merge<%= resource_name -%>IamPolicy(existing, incoming Asset) Asset { +func Merge<%= resource_name -%>IamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func Merge<%= resource_name -%>IamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func Merge<%= resource_name -%>IamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func Merge<%= resource_name -%>IamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func Merge<%= resource_name -%>IamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func Merge<%= resource_name -%>IamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func Merge<%= resource_name -%>IamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func Merge<%= resource_name -%>IamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func Merge<%= resource_name -%>IamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func new<%= resource_name -%>IamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "<%= asset_name_template -%>") + name, err := tpgresource.AssetName(d, config, "<%= asset_name_template -%>") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: <%= resource_name -%>IAMAssetType, - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func Fetch<%= resource_name -%>IamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func Fetch<%= resource_name -%>IamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value <% resource_params.each do |param| -%> if _, ok := d.GetOk("<%= param.underscore -%>"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } <% end # resource_params.each -%> - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( <%= resource_name -%>IamUpdaterProducer, d, config, diff --git a/mmv1/templates/validator/resource_converters.go.erb b/mmv1/templates/validator/resource_converters.go.erb index cb1fdcb6914e..4f93de5cb767 100644 --- a/mmv1/templates/validator/resource_converters.go.erb +++ b/mmv1/templates/validator/resource_converters.go.erb @@ -18,7 +18,8 @@ package google import ( "sort" - + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/services/compute" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" ) @@ -26,71 +27,71 @@ import ( // to a slice of ResourceConverters. // // Modelling of relationships: -// terraform resources to CAI assets as []ResourceConverter: +// terraform resources to CAI assets as []tpgresource.ResourceConverter: // 1:1 = [ResourceConverter{Convert: convertAbc}] (len=1) // 1:N = [ResourceConverter{Convert: convertAbc}, ...] (len=N) // N:1 = [ResourceConverter{Convert: convertAbc, merge: mergeAbc}] (len=1) -func ResourceConverters() map[string][]ResourceConverter { - return map[string][]ResourceConverter{ - "google_compute_address": {resourceConverterComputeAddress()}, - "google_compute_firewall": {resourceConverterComputeFirewall()}, - "google_compute_disk": {resourceConverterComputeDisk()}, - "google_compute_forwarding_rule": {resourceConverterComputeForwardingRule()}, - "google_compute_global_address": {resourceConverterComputeGlobalAddress()}, - "google_compute_global_forwarding_rule": {resourceConverterComputeGlobalForwardingRule()}, - "google_compute_instance": {resourceConverterComputeInstance()}, - "google_compute_network": {resourceConverterComputeNetwork()}, +func ResourceConverters() map[string][]tpgresource.ResourceConverter { + return map[string][]tpgresource.ResourceConverter{ + "google_compute_address": {compute.ResourceConverterComputeAddress()}, + "google_compute_firewall": {compute.ResourceConverterComputeFirewall()}, + "google_compute_disk": {compute.ResourceConverterComputeDisk()}, + "google_compute_forwarding_rule": {compute.ResourceConverterComputeForwardingRule()}, + "google_compute_global_address": {compute.ResourceConverterComputeGlobalAddress()}, + "google_compute_global_forwarding_rule": {compute.ResourceConverterComputeGlobalForwardingRule()}, + "google_compute_instance": {compute.ResourceConverterComputeInstance()}, + "google_compute_network": {compute.ResourceConverterComputeNetwork()}, "google_compute_security_policy": {resourceConverterComputeSecurityPolicy()}, - "google_compute_snapshot": {resourceConverterComputeSnapshot()}, - "google_compute_subnetwork": {resourceConverterComputeSubnetwork()}, - "google_compute_ssl_policy": {resourceConverterComputeSslPolicy()}, - "google_compute_target_https_proxy": {resourceConverterComputeTargetHttpsProxy()}, - "google_compute_target_ssl_proxy": {resourceConverterComputeTargetSslProxy()}, - "google_dns_managed_zone": {resourceConverterDNSManagedZone()}, - "google_dns_policy": {resourceConverterDNSPolicy()}, + "google_compute_snapshot": {compute.ResourceConverterComputeSnapshot()}, + "google_compute_subnetwork": {compute.ResourceConverterComputeSubnetwork()}, + "google_compute_ssl_policy": {compute.ResourceConverterComputeSslPolicy()}, + "google_compute_target_https_proxy": {compute.ResourceConverterComputeTargetHttpsProxy()}, + "google_compute_target_ssl_proxy": {compute.ResourceConverterComputeTargetSslProxy()}, + "google_dns_managed_zone": {dns.ResourceConverterDNSManagedZone()}, + "google_dns_policy": {dns.ResourceConverterDNSPolicy()}, "google_storage_bucket": {resourceConverterStorageBucket()}, "google_sql_database_instance": {resourceConverterSQLDatabaseInstance()}, - "google_sql_database": {resourceConverterSQLDatabase()}, + "google_sql_database": {sql.ResourceConverterSQLDatabase()}, "google_container_cluster": {resourceConverterContainerCluster()}, "google_container_node_pool": {resourceConverterContainerNodePool()}, - "google_bigquery_dataset": {resourceConverterBigQueryDataset()}, + "google_bigquery_dataset": {bigquery.ResourceConverterBigQueryDataset()}, "google_bigquery_dataset_iam_policy": {resourceConverterBigqueryDatasetIamPolicy()}, "google_bigquery_dataset_iam_binding": {resourceConverterBigqueryDatasetIamBinding()}, "google_bigquery_dataset_iam_member": {resourceConverterBigqueryDatasetIamMember()}, "google_bigquery_table": {resourceConverterBigQueryTable()}, - "google_org_policy_policy": {resourceConverterOrgPolicyPolicy()}, - "google_redis_instance": {resourceConverterRedisInstance()}, - "google_spanner_database": {resourceConverterSpannerDatabase()}, + "google_org_policy_policy": {resourceConverterOrgPolicyPolicy()}, + "google_redis_instance": {redis.ResourceConverterRedisInstance()}, + "google_spanner_database": {spanner.ResourceConverterSpannerDatabase()}, "google_spanner_database_iam_policy": {resourceConverterSpannerDatabaseIamPolicy()}, "google_spanner_database_iam_binding": {resourceConverterSpannerDatabaseIamBinding()}, "google_spanner_database_iam_member": {resourceConverterSpannerDatabaseIamMember()}, - "google_spanner_instance": {resourceConverterSpannerInstance()}, + "google_spanner_instance": {spanner.ResourceConverterSpannerInstance()}, "google_spanner_instance_iam_policy": {resourceConverterSpannerInstanceIamPolicy()}, "google_spanner_instance_iam_binding": {resourceConverterSpannerInstanceIamBinding()}, "google_spanner_instance_iam_member": {resourceConverterSpannerInstanceIamMember()}, "google_project_service": {resourceConverterServiceUsage()}, - "google_pubsub_lite_reservation": {resourceConverterPubsubLiteReservation()}, - "google_pubsub_lite_subscription": {resourceConverterPubsubLiteSubscription()}, - "google_pubsub_lite_topic": {resourceConverterPubsubLiteTopic()}, - "google_pubsub_schema": {resourceConverterPubsubSchema()}, - "google_pubsub_subscription": {resourceConverterPubsubSubscription()}, + "google_pubsub_lite_reservation": {pubsublite.ResourceConverterPubsubLiteReservation()}, + "google_pubsub_lite_subscription": {pubsublite.ResourceConverterPubsubLiteSubscription()}, + "google_pubsub_lite_topic": {pubsublite.ResourceConverterPubsubLiteTopic()}, + "google_pubsub_schema": {pubsub.ResourceConverterPubsubSchema()}, + "google_pubsub_subscription": {pubsub.ResourceConverterPubsubSubscription()}, "google_pubsub_subscription_iam_policy": {resourceConverterPubsubSubscriptionIamPolicy()}, "google_pubsub_subscription_iam_binding": {resourceConverterPubsubSubscriptionIamBinding()}, "google_pubsub_subscription_iam_member": {resourceConverterPubsubSubscriptionIamMember()}, "google_storage_bucket_iam_policy": {resourceConverterStorageBucketIamPolicy()}, "google_storage_bucket_iam_binding": {resourceConverterStorageBucketIamBinding()}, "google_storage_bucket_iam_member": {resourceConverterStorageBucketIamMember()}, - "google_pubsub_topic": {resourceConverterPubsubTopic()}, - "google_kms_crypto_key": {resourceConverterKMSCryptoKey()}, - "google_kms_key_ring": {resourceConverterKMSKeyRing()}, - "google_filestore_instance": {resourceConverterFilestoreInstance()}, - "google_access_context_manager_service_perimeter": {resourceConverterAccessContextManagerServicePerimeter()}, - "google_access_context_manager_access_policy": {resourceConverterAccessContextManagerAccessPolicy()}, - "google_cloud_run_service": {resourceConverterCloudRunService()}, - "google_cloud_run_domain_mapping": {resourceConverterCloudRunDomainMapping()}, + "google_pubsub_topic": {pubsub.ResourceConverterPubsubTopic()}, + "google_kms_crypto_key": {kms.ResourceConverterKMSCryptoKey()}, + "google_kms_key_ring": {kms.ResourceConverterKMSKeyRing()}, + "google_filestore_instance": {filestore.ResourceConverterFilestoreInstance()}, + "google_access_context_manager_service_perimeter": {accesscontextmanager.ResourceConverterAccessContextManagerServicePerimeter()}, + "google_access_context_manager_access_policy": {accesscontextmanager.ResourceConverterAccessContextManagerAccessPolicy()}, + "google_cloud_run_service": {cloudrun.ResourceConverterCloudRunService()}, + "google_cloud_run_domain_mapping": {cloudrun.ResourceConverterCloudRunDomainMapping()}, "google_cloudfunctions_function": {resourceConverterCloudFunctionsCloudFunction()}, - "google_monitoring_notification_channel": {resourceConverterMonitoringNotificationChannel()}, - "google_monitoring_alert_policy": {resourceConverterMonitoringAlertPolicy()}, + "google_monitoring_notification_channel": {monitoring.ResourceConverterMonitoringNotificationChannel()}, + "google_monitoring_alert_policy": {monitoring.ResourceConverterMonitoringAlertPolicy()}, <% products.each do |product| -%> <% (product[:definitions].objects || []).each do |object| -%> <% if object.min_version.name == "ga" -%> @@ -98,10 +99,11 @@ func ResourceConverters() map[string][]ResourceConverter { <% tf_product = (object.__product.legacy_name || object.__product.name).underscore -%> <% terraform_name = object.legacy_name || "google_#{tf_product}_#{object.name.underscore}" -%> <% cai_name = object.__product.name + object.name -%> + <% service = product[:definitions].name.downcase -%> <% if (!object.iam_policy.nil?) && (!object.iam_policy.exclude) && (!object.iam_policy.exclude_validator) -%> - "<%= terraform_name -%>_iam_policy": {resourceConverter<%= cai_name -%>IamPolicy()}, - "<%= terraform_name -%>_iam_binding": {resourceConverter<%= cai_name -%>IamBinding()}, - "<%= terraform_name -%>_iam_member": {resourceConverter<%= cai_name -%>IamMember()}, + "<%= terraform_name -%>_iam_policy": {<%= service -%>.ResourceConverter<%= cai_name -%>IamPolicy()}, + "<%= terraform_name -%>_iam_binding": {<%= service -%>.ResourceConverter<%= cai_name -%>IamBinding()}, + "<%= terraform_name -%>_iam_member": {<%= service -%>.ResourceConverter<%= cai_name -%>IamMember()}, <% end -%> <% end -%> <% end -%> @@ -136,8 +138,8 @@ func ResourceConverters() map[string][]ResourceConverter { "google_project_iam_member": {resourceConverterProjectIamMember()}, "google_project_iam_custom_role": {resourceConverterProjectIAMCustomRole()}, "google_organization_iam_custom_role": {resourceConverterOrganizationIAMCustomRole()}, - "google_vpc_access_connector": {resourceConverterVPCAccessConnector()}, - "google_logging_metric": {resourceConverterLoggingMetric()}, + "google_vpc_access_connector": {vpcaccess.ResourceConverterVPCAccessConnector()}, + "google_logging_metric": {logging.ResourceConverterLoggingMetric()}, "google_service_account": {resourceConverterServiceAccount()}, } } diff --git a/mmv1/third_party/terraform/.copywrite.hcl.erb b/mmv1/third_party/terraform/.copywrite.hcl.erb new file mode 100644 index 000000000000..ffe6a69fcda1 --- /dev/null +++ b/mmv1/third_party/terraform/.copywrite.hcl.erb @@ -0,0 +1,32 @@ +<% autogen_exception -%> +schema_version = 1 + +project { + license = "MPL-2.0" + copyright_year = 2017 + + # (OPTIONAL) A list of globs that should not have copyright/license headers. + # Supports doublestar glob patterns for more flexibility in defining which + # files or folders should be ignored + header_ignore = [ + # Some ignores here are not strictly needed, but protects us if we change the types of files we put in those folders + # See here for file extensions altered by copywrite CLI (all other extensions are ignored) + # https://github.com/hashicorp/copywrite/blob/4af928579f5aa8f1dece9de1bb3098218903053d/addlicense/main.go#L357-L394 + ".release/**", + ".changelog/**", + "examples/**", + "scripts/**", +<% if version.nil? || version == 'ga' -%> + "google/test-fixtures/**", +<% else -%> + "google-<%= version -%>/test-fixtures/**", +<% end -%> + "META.d/*.yml", + ".golangci.yml", + ".goreleaser.yml", + ] + + # (OPTIONAL) Links to an upstream repo for determining repo relationships + # This is for special cases and should not normally be set. + upstream = "GoogleCloudPlatform/magic-modules" +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_billing_account.go b/mmv1/third_party/terraform/data_sources/data_source_google_billing_account.go index 893009717edd..6238e54b1db0 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_billing_account.go +++ b/mmv1/third_party/terraform/data_sources/data_source_google_billing_account.go @@ -42,6 +42,11 @@ func DataSourceGoogleBillingAccount() *schema.Resource { Type: schema.TypeString, }, }, + "lookup_projects": { + Type: schema.TypeBool, + Optional: true, + Default: true, + }, }, } } @@ -98,11 +103,17 @@ func dataSourceBillingAccountRead(d *schema.ResourceData, meta interface{}) erro return fmt.Errorf("one of billing_account or display_name must be set") } - resp, err := config.NewBillingClient(userAgent).BillingAccounts.Projects.List(billingAccount.Name).Do() - if err != nil { - return fmt.Errorf("Error reading billing account projects: %s", err) + if d.Get("lookup_projects").(bool) { + resp, err := config.NewBillingClient(userAgent).BillingAccounts.Projects.List(billingAccount.Name).Do() + if err != nil { + return fmt.Errorf("Error reading billing account projects: %s", err) + } + projectIds := flattenBillingProjects(resp.ProjectBillingInfo) + + if err := d.Set("project_ids", projectIds); err != nil { + return fmt.Errorf("Error setting project_ids: %s", err) + } } - projectIds := flattenBillingProjects(resp.ProjectBillingInfo) d.SetId(tpgresource.GetResourceNameFromSelfLink(billingAccount.Name)) if err := d.Set("name", billingAccount.Name); err != nil { @@ -114,9 +125,6 @@ func dataSourceBillingAccountRead(d *schema.ResourceData, meta interface{}) erro if err := d.Set("open", billingAccount.Open); err != nil { return fmt.Errorf("Error setting open: %s", err) } - if err := d.Set("project_ids", projectIds); err != nil { - return fmt.Errorf("Error setting project_ids: %s", err) - } return nil } diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_iam_policy.go.erb b/mmv1/third_party/terraform/data_sources/data_source_google_iam_policy.go.erb index f882741c4168..fe816d87bf2c 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_iam_policy.go.erb +++ b/mmv1/third_party/terraform/data_sources/data_source_google_iam_policy.go.erb @@ -126,7 +126,7 @@ func dataSourceGoogleIamPolicyRead(d *schema.ResourceData, meta interface{}) err bindingMap := map[string]*cloudresourcemanager.Binding{} for _, v := range bset.List() { binding := v.(map[string]interface{}) - members := convertStringSet(binding["members"].(*schema.Set)) + members := tpgresource.ConvertStringSet(binding["members"].(*schema.Set)) condition := expandIamCondition(binding["condition"]) // Map keys are used to identify binding{} blocks that are identical except for the member lists diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_iam_testable_permissions.go b/mmv1/third_party/terraform/data_sources/data_source_google_iam_testable_permissions.go index c9e7546958fb..b365893b3c99 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_iam_testable_permissions.go +++ b/mmv1/third_party/terraform/data_sources/data_source_google_iam_testable_permissions.go @@ -133,7 +133,7 @@ func flattenTestablePermissionsList(v interface{}, custom_support_level string, } else { csl = p["customRolesSupportLevel"] == custom_support_level } - if csl && p["stage"] != nil && stringInSlice(stages, p["stage"].(string)) { + if csl && p["stage"] != nil && tpgresource.StringInSlice(stages, p["stage"].(string)) { permissions = append(permissions, map[string]interface{}{ "name": p["name"], "title": p["title"], diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_organization.go b/mmv1/third_party/terraform/data_sources/data_source_google_organization.go index d3cfc0524dc7..a01e8bed1beb 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_organization.go +++ b/mmv1/third_party/terraform/data_sources/data_source_google_organization.go @@ -61,12 +61,15 @@ func dataSourceOrganizationRead(d *schema.ResourceData, meta interface{}) error if v, ok := d.GetOk("domain"); ok { filter := fmt.Sprintf("domain=%s", v.(string)) var resp *cloudresourcemanager.SearchOrganizationsResponse - err := transport_tpg.RetryTimeDuration(func() (err error) { - resp, err = config.NewResourceManagerClient(userAgent).Organizations.Search(&cloudresourcemanager.SearchOrganizationsRequest{ - Filter: filter, - }).Do() - return err - }, d.Timeout(schema.TimeoutRead)) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (err error) { + resp, err = config.NewResourceManagerClient(userAgent).Organizations.Search(&cloudresourcemanager.SearchOrganizationsRequest{ + Filter: filter, + }).Do() + return err + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return fmt.Errorf("Error reading organization: %s", err) } @@ -92,10 +95,13 @@ func dataSourceOrganizationRead(d *schema.ResourceData, meta interface{}) error } else if v, ok := d.GetOk("organization"); ok { var resp *cloudresourcemanager.Organization - err := transport_tpg.RetryTimeDuration(func() (err error) { - resp, err = config.NewResourceManagerClient(userAgent).Organizations.Get(canonicalOrganizationName(v.(string))).Do() - return err - }, d.Timeout(schema.TimeoutRead)) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (err error) { + resp, err = config.NewResourceManagerClient(userAgent).Organizations.Get(canonicalOrganizationName(v.(string))).Do() + return err + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Organization Not Found : %s", v)) } diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_service_account_access_token.go b/mmv1/third_party/terraform/data_sources/data_source_google_service_account_access_token.go index 39e09a2ac7f9..653db0431b71 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_service_account_access_token.go +++ b/mmv1/third_party/terraform/data_sources/data_source_google_service_account_access_token.go @@ -72,8 +72,8 @@ func dataSourceGoogleServiceAccountAccessTokenRead(d *schema.ResourceData, meta name := fmt.Sprintf("projects/-/serviceAccounts/%s", d.Get("target_service_account").(string)) tokenRequest := &iamcredentials.GenerateAccessTokenRequest{ Lifetime: d.Get("lifetime").(string), - Delegates: convertStringSet(d.Get("delegates").(*schema.Set)), - Scope: tpgresource.CanonicalizeServiceScopes(convertStringSet(d.Get("scopes").(*schema.Set))), + Delegates: tpgresource.ConvertStringSet(d.Get("delegates").(*schema.Set)), + Scope: tpgresource.CanonicalizeServiceScopes(tpgresource.ConvertStringSet(d.Get("scopes").(*schema.Set))), } at, err := service.Projects.ServiceAccounts.GenerateAccessToken(name, tokenRequest).Do() if err != nil { diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_service_account_id_token.go b/mmv1/third_party/terraform/data_sources/data_source_google_service_account_id_token.go index dfb9fdba9984..fe74df0253bf 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_service_account_id_token.go +++ b/mmv1/third_party/terraform/data_sources/data_source_google_service_account_id_token.go @@ -86,7 +86,7 @@ func dataSourceGoogleServiceAccountIdTokenRead(d *schema.ResourceData, meta inte tokenRequest := &iamcredentials.GenerateIdTokenRequest{ Audience: targetAudience, IncludeEmail: d.Get("include_email").(bool), - Delegates: convertStringSet(d.Get("delegates").(*schema.Set)), + Delegates: tpgresource.ConvertStringSet(d.Get("delegates").(*schema.Set)), } at, err := service.Projects.ServiceAccounts.GenerateIdToken(name, tokenRequest).Do() if err != nil { diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_service_account_jwt.go b/mmv1/third_party/terraform/data_sources/data_source_google_service_account_jwt.go index 19e9e5bd62c3..2c945e7b4860 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_service_account_jwt.go +++ b/mmv1/third_party/terraform/data_sources/data_source_google_service_account_jwt.go @@ -88,7 +88,7 @@ func dataSourceGoogleServiceAccountJwtRead(d *schema.ResourceData, meta interfac jwtRequest := &iamcredentials.SignJwtRequest{ Payload: payload, - Delegates: convertStringSet(d.Get("delegates").(*schema.Set)), + Delegates: tpgresource.ConvertStringSet(d.Get("delegates").(*schema.Set)), } service := config.NewIamCredentialsClient(userAgent) diff --git a/mmv1/third_party/terraform/data_sources/data_source_sql_database_instances.go b/mmv1/third_party/terraform/data_sources/data_source_sql_database_instances.go index 6b54a9d2a111..e3573ba205e4 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_sql_database_instances.go +++ b/mmv1/third_party/terraform/data_sources/data_source_sql_database_instances.go @@ -100,10 +100,14 @@ func dataSourceSqlDatabaseInstancesRead(d *schema.ResourceData, meta interface{} databaseInstances := make([]map[string]interface{}, 0) for { var instances *sqladmin.InstancesListResponse - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - instances, rerr = config.NewSqlAdminClient(userAgent).Instances.List(project).Filter(filter).PageToken(pageToken).Do() - return rerr - }, d.Timeout(schema.TimeoutRead), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + instances, rerr = config.NewSqlAdminClient(userAgent).Instances.List(project).Filter(filter).PageToken(pageToken).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return err } diff --git a/mmv1/third_party/terraform/framework_utils/framework_transport.go b/mmv1/third_party/terraform/framework_utils/framework_transport.go index 6ab539eaf6c3..438bd754d541 100644 --- a/mmv1/third_party/terraform/framework_utils/framework_transport.go +++ b/mmv1/third_party/terraform/framework_utils/framework_transport.go @@ -40,8 +40,8 @@ func sendFrameworkRequestWithTimeout(p *frameworkProvider, method, project, rawu } var res *http.Response - err := transport_tpg.RetryTimeDuration( - func() error { + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { var buf bytes.Buffer if body != nil { err := json.NewEncoder(&buf).Encode(body) @@ -72,9 +72,9 @@ func sendFrameworkRequestWithTimeout(p *frameworkProvider, method, project, rawu return nil }, - timeout, - errorRetryPredicates..., - ) + Timeout: timeout, + ErrorRetryPredicates: errorRetryPredicates, + }) if err != nil { diags.AddError("error sending request", err.Error()) return nil, diags diff --git a/mmv1/third_party/terraform/go.mod.erb b/mmv1/third_party/terraform/go.mod.erb index 51e3e28fb174..29e3f5508120 100644 --- a/mmv1/third_party/terraform/go.mod.erb +++ b/mmv1/third_party/terraform/go.mod.erb @@ -5,7 +5,7 @@ go 1.19 require ( cloud.google.com/go/bigtable v1.17.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.40.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.42.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index f045f8cfea51..68b1ba8850de 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -47,10 +47,8 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.39.0 h1:js9wqpbWMsjRUWCezgiw+bmZ4Jke9nkcQJLjdnQZwpc= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.39.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.40.0 h1:ONa3+D4jOZd+B0JAFlAGf6kxY8ymjpX4L7UhyZ3Z1XA= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.40.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.42.0 h1:ClnwLCqnr8/exvPWhBLJOj16oa8bvw8Fhu45wCjvQbU= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.42.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk= github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= diff --git a/mmv1/third_party/terraform/resources/resource_apigee_env_keystore_alias_pkcs12.go b/mmv1/third_party/terraform/resources/resource_apigee_env_keystore_alias_pkcs12.go index 4899754653de..2920819443ce 100644 --- a/mmv1/third_party/terraform/resources/resource_apigee_env_keystore_alias_pkcs12.go +++ b/mmv1/third_party/terraform/resources/resource_apigee_env_keystore_alias_pkcs12.go @@ -387,7 +387,7 @@ func flattenApigeeKeystoreAliasesPkcsCertsInfoCertInfo(v interface{}, d *schema. func flattenApigeeKeystoreAliasesPkcsCertsInfoCertInfoVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } diff --git a/mmv1/third_party/terraform/resources/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/resources/resource_apigee_keystores_aliases_key_cert_file.go index 654f56a109cf..4dd161a42db5 100644 --- a/mmv1/third_party/terraform/resources/resource_apigee_keystores_aliases_key_cert_file.go +++ b/mmv1/third_party/terraform/resources/resource_apigee_keystores_aliases_key_cert_file.go @@ -450,7 +450,7 @@ func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } diff --git a/mmv1/third_party/terraform/resources/resource_apigee_sharedflow.go b/mmv1/third_party/terraform/resources/resource_apigee_sharedflow.go index eb12e22828ea..68830cef16de 100644 --- a/mmv1/third_party/terraform/resources/resource_apigee_sharedflow.go +++ b/mmv1/third_party/terraform/resources/resource_apigee_sharedflow.go @@ -412,8 +412,8 @@ func sendRequestRawBodyWithTimeout(config *transport_tpg.Config, method, project log.Printf("[DEBUG] sendRequestRawBodyWithTimeout sending request") - err := transport_tpg.RetryTimeDuration( - func() error { + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { req, err := http.NewRequest(method, rawurl, body) if err != nil { return err @@ -432,9 +432,9 @@ func sendRequestRawBodyWithTimeout(config *transport_tpg.Config, method, project return nil }, - timeout, - errorRetryPredicates..., - ) + Timeout: timeout, + ErrorRetryPredicates: errorRetryPredicates, + }) if err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/resources/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/resources/resource_apigee_sharedflow_deployment.go index 0f5be0082953..bddc4b79931e 100644 --- a/mmv1/third_party/terraform/resources/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/resources/resource_apigee_sharedflow_deployment.go @@ -14,6 +14,7 @@ func ResourceApigeeSharedFlowDeployment() *schema.Resource { return &schema.Resource{ Create: resourceApigeeSharedflowDeploymentCreate, Read: resourceApigeeSharedflowDeploymentRead, + Update: resourceApigeeSharedflowDeploymentUpdate, Delete: resourceApigeeSharedflowDeploymentDelete, Importer: &schema.ResourceImporter{ @@ -42,7 +43,6 @@ func ResourceApigeeSharedFlowDeployment() *schema.Resource { "revision": { Type: schema.TypeString, Required: true, - ForceNew: true, Description: `Revision of the Sharedflow to be deployed.`, }, "service_account": { @@ -142,6 +142,50 @@ func resourceApigeeSharedflowDeploymentRead(d *schema.ResourceData, meta interfa return nil } +func resourceApigeeSharedflowDeploymentUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/sharedflows/{{sharedflow_id}}/revisions/{{revision}}/deployments?override=true&serviceAccount={{service_account}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating new SharedflowDeployment at %s", url) + billingProject := "" + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Timeout: d.Timeout(schema.TimeoutUpdate), + }) + if err != nil { + return fmt.Errorf("Error updating SharedflowDeployment: %s", err) + } + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/sharedflows/{{sharedflow_id}}/revisions/{{revision}}/deployments") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished updating SharedflowDeployment %q: %#v", d.Id(), res) + + return resourceApigeeSharedflowDeploymentRead(d, meta) +} + func resourceApigeeSharedflowDeploymentDelete(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) diff --git a/mmv1/third_party/terraform/resources/resource_bigtable_gc_policy.go b/mmv1/third_party/terraform/resources/resource_bigtable_gc_policy.go index fbf553f8368c..e9174ced4f12 100644 --- a/mmv1/third_party/terraform/resources/resource_bigtable_gc_policy.go +++ b/mmv1/third_party/terraform/resources/resource_bigtable_gc_policy.go @@ -215,9 +215,9 @@ func resourceBigtableGCPolicyUpsert(d *schema.ResourceData, meta interface{}) er tableName := d.Get("table").(string) columnFamily := d.Get("column_family").(string) - retryFunc := func() (interface{}, error) { + retryFunc := func() error { reqErr := c.SetGCPolicy(ctx, tableName, columnFamily, gcPolicy) - return "", reqErr + return reqErr } // The default create timeout is 20 minutes. timeout := d.Timeout(schema.TimeoutCreate) @@ -225,7 +225,12 @@ func resourceBigtableGCPolicyUpsert(d *schema.ResourceData, meta interface{}) er // Mutations to gc policies can only happen one-at-a-time and take some amount of time. // Use a fixed polling rate of 30s based on the RetryInfo returned by the server rather than // the standard up-to-10s exponential backoff for those operations. - _, err = transport_tpg.RetryWithPolling(retryFunc, timeout, pollInterval, transport_tpg.IsBigTableRetryableError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: retryFunc, + Timeout: timeout, + PollInterval: pollInterval, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsBigTableRetryableError}, + }) if err != nil { return err } @@ -407,14 +412,19 @@ func resourceBigtableGCPolicyDestroy(d *schema.ResourceData, meta interface{}) e defer c.Close() - retryFunc := func() (interface{}, error) { + retryFunc := func() error { reqErr := c.SetGCPolicy(ctx, d.Get("table").(string), d.Get("column_family").(string), bigtable.NoGcPolicy()) - return "", reqErr + return reqErr } // The default delete timeout is 20 minutes. timeout := d.Timeout(schema.TimeoutDelete) pollInterval := time.Duration(30) * time.Second - _, err = transport_tpg.RetryWithPolling(retryFunc, timeout, pollInterval, transport_tpg.IsBigTableRetryableError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: retryFunc, + Timeout: timeout, + PollInterval: pollInterval, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsBigTableRetryableError}, + }) if err != nil { return err } diff --git a/mmv1/third_party/terraform/resources/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/resources/resource_cloudfunctions_function.go index 1e3a0b29a506..bcf2de24d93a 100644 --- a/mmv1/third_party/terraform/resources/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/resources/resource_cloudfunctions_function.go @@ -603,19 +603,23 @@ func resourceCloudFunctionsCreate(d *schema.ResourceData, meta interface{}) erro // We retry the whole create-and-wait because Cloud Functions // will sometimes fail a creation operation entirely if it fails to pull // source code and we need to try the whole creation again. - rerr := transport_tpg.RetryTimeDuration(func() error { - op, err := config.NewCloudFunctionsClient(userAgent).Projects.Locations.Functions.Create( - cloudFuncId.locationId(), function).Do() - if err != nil { - return err - } + rerr := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + op, err := config.NewCloudFunctionsClient(userAgent).Projects.Locations.Functions.Create( + cloudFuncId.locationId(), function).Do() + if err != nil { + return err + } - // Name of function should be unique - d.SetId(cloudFuncId.cloudFunctionId()) + // Name of function should be unique + d.SetId(cloudFuncId.cloudFunctionId()) - return cloudFunctionsOperationWait(config, op, "Creating CloudFunctions Function", userAgent, - d.Timeout(schema.TimeoutCreate)) - }, d.Timeout(schema.TimeoutCreate), IsCloudFunctionsSourceCodeError) + return cloudFunctionsOperationWait(config, op, "Creating CloudFunctions Function", userAgent, + d.Timeout(schema.TimeoutCreate)) + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{IsCloudFunctionsSourceCodeError}, + }) if rerr != nil { return rerr } @@ -906,16 +910,19 @@ func resourceCloudFunctionsUpdate(d *schema.ResourceData, meta interface{}) erro if len(updateMaskArr) > 0 { log.Printf("[DEBUG] Send Patch CloudFunction Configuration request: %#v", function) updateMask := strings.Join(updateMaskArr, ",") - rerr := transport_tpg.RetryTimeDuration(func() error { - op, err := config.NewCloudFunctionsClient(userAgent).Projects.Locations.Functions.Patch(function.Name, function). - UpdateMask(updateMask).Do() - if err != nil { - return err - } - - return cloudFunctionsOperationWait(config, op, "Updating CloudFunctions Function", userAgent, - d.Timeout(schema.TimeoutUpdate)) - }, d.Timeout(schema.TimeoutUpdate)) + rerr := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + op, err := config.NewCloudFunctionsClient(userAgent).Projects.Locations.Functions.Patch(function.Name, function). + UpdateMask(updateMask).Do() + if err != nil { + return err + } + + return cloudFunctionsOperationWait(config, op, "Updating CloudFunctions Function", userAgent, + d.Timeout(schema.TimeoutUpdate)) + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + }) if rerr != nil { return fmt.Errorf("Error while updating cloudfunction configuration: %s", rerr) } diff --git a/mmv1/third_party/terraform/resources/resource_composer_environment.go.erb b/mmv1/third_party/terraform/resources/resource_composer_environment.go.erb index bc9a534361a0..811e4479c8df 100644 --- a/mmv1/third_party/terraform/resources/resource_composer_environment.go.erb +++ b/mmv1/third_party/terraform/resources/resource_composer_environment.go.erb @@ -2170,7 +2170,7 @@ func expandComposerEnvironmentConfigSoftwareConfig(v interface{}, d *schema.Reso func expandComposerEnvironmentConfigSoftwareConfigStringMap(softwareConfig map[string]interface{}, k string) map[string]string { v, ok := softwareConfig[k] if ok && v != nil { - return convertStringMap(v.(map[string]interface{})) + return tpgresource.ConvertStringMap(v.(map[string]interface{})) } return map[string]string{} } diff --git a/mmv1/third_party/terraform/resources/resource_compute_disk_async_replication.go.erb b/mmv1/third_party/terraform/resources/resource_compute_disk_async_replication.go.erb index b1e2b6f72652..9666c9d53828 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_disk_async_replication.go.erb +++ b/mmv1/third_party/terraform/resources/resource_compute_disk_async_replication.go.erb @@ -41,7 +41,7 @@ func ResourceComputeDiskAsyncReplication() *schema.Resource { Required: true, ForceNew: true, Description: `Primary disk for asynchronous replication.`, - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, }, "secondary_disk": { Type: schema.TypeList, @@ -56,7 +56,7 @@ func ResourceComputeDiskAsyncReplication() *schema.Resource { Required: true, ForceNew: true, Description: `Secondary disk for asynchronous replication.`, - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, }, "state": { Type: schema.TypeString, diff --git a/mmv1/third_party/terraform/resources/resource_container_cluster.go.erb b/mmv1/third_party/terraform/resources/resource_container_cluster.go.erb index f278ca4d5b4e..241391b1049e 100644 --- a/mmv1/third_party/terraform/resources/resource_container_cluster.go.erb +++ b/mmv1/third_party/terraform/resources/resource_container_cluster.go.erb @@ -2107,7 +2107,7 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er if tpgresource.IsZone(location) { locationsSet.Add(location) } - cluster.Locations = convertStringSet(locationsSet) + cluster.Locations = tpgresource.ConvertStringSet(locationsSet) } if v, ok := d.GetOk("network"); ok { @@ -2221,13 +2221,15 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er parent := fmt.Sprintf("projects/%s/locations/%s", project, location) var op *container.Operation - err = transport_tpg.Retry(func() error { - clusterCreateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.Create(parent, req) - if config.UserProjectOverride { - clusterCreateCall.Header().Add("X-Goog-User-Project", project) - } - op, err = clusterCreateCall.Do() - return err + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + clusterCreateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.Create(parent, req) + if config.UserProjectOverride { + clusterCreateCall.Header().Add("X-Goog-User-Project", project) + } + op, err = clusterCreateCall.Do() + return err + }, }) if err != nil { return err @@ -2278,13 +2280,15 @@ func resourceContainerClusterCreate(d *schema.ResourceData, meta interface{}) er if d.Get("remove_default_node_pool").(bool) { parent := fmt.Sprintf("%s/nodePools/%s", containerClusterFullName(project, location, clusterName), "default-pool") - err = transport_tpg.Retry(func() error { - clusterNodePoolDeleteCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Delete(parent) - if config.UserProjectOverride { - clusterNodePoolDeleteCall.Header().Add("X-Goog-User-Project", project) - } - op, err = clusterNodePoolDeleteCall.Do() - return err + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + clusterNodePoolDeleteCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Delete(parent) + if config.UserProjectOverride { + clusterNodePoolDeleteCall.Header().Add("X-Goog-User-Project", project) + } + op, err = clusterNodePoolDeleteCall.Do() + return err + }, }) if err != nil { return errwrap.Wrapf("Error deleting default node pool: {{err}}", err) @@ -3052,7 +3056,7 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er req := &container.UpdateClusterRequest{ Update: &container.ClusterUpdate{ - DesiredLocations: convertStringSet(azSet), + DesiredLocations: tpgresource.ConvertStringSet(azSet), }, } @@ -3068,7 +3072,7 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er if !azSet.Equal(azSetNew) { req = &container.UpdateClusterRequest{ Update: &container.ClusterUpdate{ - DesiredLocations: convertStringSet(azSetNew), + DesiredLocations: tpgresource.ConvertStringSet(azSetNew), }, } @@ -3536,7 +3540,7 @@ func resourceContainerClusterUpdate(d *schema.ResourceData, meta interface{}) er resourceLabels := d.Get("resource_labels").(map[string]interface{}) labelFingerprint := d.Get("label_fingerprint").(string) req := &container.SetLabelsRequest{ - ResourceLabels: convertStringMap(resourceLabels), + ResourceLabels: tpgresource.ConvertStringMap(resourceLabels), LabelFingerprint: labelFingerprint, } updateF := func() error { @@ -5724,7 +5728,7 @@ func containerClusterAddedScopesSuppress(k, old, new string, d *schema.ResourceD } for _, i := range combined { - if stringInSlice(tpgresource.ConvertStringArr(o.([]interface{})), i) { + if tpgresource.StringInSlice(tpgresource.ConvertStringArr(o.([]interface{})), i) { continue } diff --git a/mmv1/third_party/terraform/resources/resource_container_node_pool.go.erb b/mmv1/third_party/terraform/resources/resource_container_node_pool.go.erb index 83f608507521..874ed54e4787 100644 --- a/mmv1/third_party/terraform/resources/resource_container_node_pool.go.erb +++ b/mmv1/third_party/terraform/resources/resource_container_node_pool.go.erb @@ -847,7 +847,7 @@ func expandNodePool(d *schema.ResourceData, prefix string) (*container.NodePool, var locations []string if v, ok := d.GetOk("node_locations"); ok && v.(*schema.Set).Len() > 0 { - locations = convertStringSet(v.(*schema.Set)) + locations = tpgresource.ConvertStringSet(v.(*schema.Set)) } np := &container.NodePool{ @@ -1305,7 +1305,7 @@ func nodePoolUpdate(d *schema.ResourceData, meta interface{}, nodePoolInfo *Node if v, ok := d.GetOk(prefix + "node_config.0.resource_labels"); ok { resourceLabels := v.(map[string]interface{}) req.ResourceLabels = &container.ResourceLabels{ - Labels: convertStringMap(resourceLabels), + Labels: tpgresource.ConvertStringMap(resourceLabels), } } @@ -1343,7 +1343,7 @@ func nodePoolUpdate(d *schema.ResourceData, meta interface{}, nodePoolInfo *Node if v, ok := d.GetOk(prefix + "node_config.0.labels"); ok { labels := v.(map[string]interface{}) req.Labels = &container.NodeLabels{ - Labels: convertStringMap(labels), + Labels: tpgresource.ConvertStringMap(labels), } } @@ -1599,7 +1599,7 @@ func nodePoolUpdate(d *schema.ResourceData, meta interface{}, nodePoolInfo *Node if d.HasChange(prefix + "node_locations") { req := &container.UpdateNodePoolRequest{ - Locations: convertStringSet(d.Get(prefix + "node_locations").(*schema.Set)), + Locations: tpgresource.ConvertStringSet(d.Get(prefix + "node_locations").(*schema.Set)), } updateF := func() error { clusterNodePoolsUpdateCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.Update(nodePoolInfo.fullyQualifiedName(name),req) diff --git a/mmv1/third_party/terraform/resources/resource_dataflow_flex_template_job.go.erb b/mmv1/third_party/terraform/resources/resource_dataflow_flex_template_job.go.erb index a4731b61aafc..52fa7bab11b9 100644 --- a/mmv1/third_party/terraform/resources/resource_dataflow_flex_template_job.go.erb +++ b/mmv1/third_party/terraform/resources/resource_dataflow_flex_template_job.go.erb @@ -152,14 +152,14 @@ func ResourceDataflowFlexTemplateJob() *schema.Resource { "network": { Type: schema.TypeString, Optional: true, - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `The network to which VMs will be assigned. If it is not provided, "default" will be used.`, }, "subnetwork": { Type: schema.TypeString, Optional: true, - DiffSuppressFunc: compareSelfLinkOrResourceName, + DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".`, }, @@ -277,7 +277,7 @@ func resourceDataflowFlexTemplateJobCreate(d *schema.ResourceData, meta interfac func resourceDataflowFlexJobSetupEnv(d *schema.ResourceData, config *transport_tpg.Config) (dataflow.FlexTemplateRuntimeEnvironment, error) { - additionalExperiments := convertStringSet(d.Get("additional_experiments").(*schema.Set)) + additionalExperiments := tpgresource.ConvertStringSet(d.Get("additional_experiments").(*schema.Set)) env := dataflow.FlexTemplateRuntimeEnvironment{ AdditionalUserLabels: tpgresource.ExpandStringMap(d, "labels"), @@ -407,7 +407,7 @@ func waitForDataflowJobState(d *schema.ResourceData, config *transport_tpg.Confi job, err := resourceDataflowJobGetJob(config, project, region, userAgent, jobID) if err != nil { - if transport_tpg.IsRetryableError(err) { + if transport_tpg.IsRetryableError(err, nil, nil) { return resource.RetryableError(err) } return resource.NonRetryableError(err) diff --git a/mmv1/third_party/terraform/resources/resource_dataflow_job.go.erb b/mmv1/third_party/terraform/resources/resource_dataflow_job.go.erb index db89f27c358c..6337b931734e 100644 --- a/mmv1/third_party/terraform/resources/resource_dataflow_job.go.erb +++ b/mmv1/third_party/terraform/resources/resource_dataflow_job.go.erb @@ -421,10 +421,14 @@ func resourceDataflowJobUpdateByReplacement(d *schema.ResourceData, meta interfa } var response *dataflow.LaunchTemplateResponse - err = transport_tpg.RetryTimeDuration(func() (updateErr error) { - response, updateErr = resourceDataflowJobLaunchTemplate(config, project, region, userAgent, d.Get("template_gcs_path").(string), &request) - return updateErr - }, time.Minute*time.Duration(5), transport_tpg.IsDataflowJobUpdateRetryableError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (updateErr error) { + response, updateErr = resourceDataflowJobLaunchTemplate(config, project, region, userAgent, d.Get("template_gcs_path").(string), &request) + return updateErr + }, + Timeout: time.Minute*time.Duration(5), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsDataflowJobUpdateRetryableError}, + }) if err != nil { return err } @@ -564,7 +568,7 @@ func resourceDataflowJobSetupEnv(d *schema.ResourceData, config *transport_tpg.C labels := tpgresource.ExpandStringMap(d, "labels") - additionalExperiments := convertStringSet(d.Get("additional_experiments").(*schema.Set)) + additionalExperiments := tpgresource.ConvertStringSet(d.Get("additional_experiments").(*schema.Set)) env := dataflow.RuntimeEnvironment{ MaxWorkers: int64(d.Get("max_workers").(int)), @@ -643,7 +647,7 @@ func waitForDataflowJobToBeUpdated(d *schema.ResourceData, config *transport_tpg replacementJob, err := resourceDataflowJobGetJob(config, project, region, userAgent, replacementJobID) if err != nil { - if transport_tpg.IsRetryableError(err) { + if transport_tpg.IsRetryableError(err, nil, nil) { return resource.RetryableError(err) } return resource.NonRetryableError(err) diff --git a/mmv1/third_party/terraform/resources/resource_dataproc_cluster.go.erb b/mmv1/third_party/terraform/resources/resource_dataproc_cluster.go.erb index 1edede58610e..24368119a677 100644 --- a/mmv1/third_party/terraform/resources/resource_dataproc_cluster.go.erb +++ b/mmv1/third_party/terraform/resources/resource_dataproc_cluster.go.erb @@ -1471,7 +1471,7 @@ func expandGkeNodePoolTarget(d *schema.ResourceData, v interface{}, clusterAddre data := v1.(map[string]interface{}) nodePool := dataproc.GkeNodePoolTarget{ NodePool: clusterAddress + "/nodePools/" + data["node_pool"].(string), - Roles: convertStringSet(data["roles"].(*schema.Set)), + Roles: tpgresource.ConvertStringSet(data["roles"].(*schema.Set)), } if v, ok := d.GetOk(fmt.Sprintf("virtual_cluster_config.0.kubernetes_cluster_config.0.gke_cluster_config.0.node_pool_target.%d.node_pool_config", i)); ok { @@ -1492,7 +1492,7 @@ func expandGkeNodePoolConfig(cfg map[string]interface{}) *dataproc.GkeNodePoolCo } if v, ok := cfg["locations"]; ok { - conf.Locations = convertStringSet(v.(*schema.Set)) + conf.Locations = tpgresource.ConvertStringSet(v.(*schema.Set)) } if autoscalingcfg, ok := cfg["autoscaling"]; ok { @@ -1649,7 +1649,7 @@ func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config conf.SubnetworkUri = snf.RelativeLink() } if v, ok := cfg["tags"]; ok { - conf.Tags = convertStringSet(v.(*schema.Set)) + conf.Tags = tpgresource.ConvertStringSet(v.(*schema.Set)) } if v, ok := cfg["service_account"]; ok { conf.ServiceAccount = v.(string) @@ -1666,7 +1666,7 @@ func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config conf.InternalIpOnly = v.(bool) } if v, ok := cfg["metadata"]; ok { - conf.Metadata = convertStringMap(v.(map[string]interface{})) + conf.Metadata = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := d.GetOk("cluster_config.0.gce_cluster_config.0.shielded_instance_config"); ok { cfgSic := v.([]interface{})[0].(map[string]interface{}) @@ -1691,7 +1691,7 @@ func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config conf.ReservationAffinity.Key = v.(string) } if v, ok := cfgRa["values"]; ok { - conf.ReservationAffinity.Values = convertStringSet(v.(*schema.Set)) + conf.ReservationAffinity.Values = tpgresource.ConvertStringSet(v.(*schema.Set)) } } if v, ok := d.GetOk("cluster_config.0.gce_cluster_config.0.node_group_affinity"); ok { @@ -1830,7 +1830,7 @@ func expandDataprocMetricConfig(cfg map[string]interface{}) *dataproc.DataprocMe data := raw.(map[string]interface{}) metric := dataproc.Metric{ MetricSource: data["metric_source"].(string), - MetricOverrides: convertStringSet(data["metric_overrides"].(*schema.Set)), + MetricOverrides: tpgresource.ConvertStringSet(data["metric_overrides"].(*schema.Set)), } metricsSet = append(metricsSet, &metric) } @@ -2088,32 +2088,33 @@ func resourceDataprocClusterRead(d *schema.ResourceData, meta interface{}) error } var cfg []map[string]interface{} + cfg, err = flattenClusterConfig(d, cluster.Config) - if cluster.Config != nil { - cfg, err = flattenClusterConfig(d, cluster.Config) - - if err != nil { - return err - } - - err = d.Set("cluster_config", cfg) - } else { - cfg, err = flattenVirtualClusterConfig(d, cluster.VirtualClusterConfig) + if err != nil { + return err + } - if err != nil { - return err - } + err = d.Set("cluster_config", cfg) + virtualCfg, err := flattenVirtualClusterConfig(d, cluster.VirtualClusterConfig) - err = d.Set("virtual_cluster_config", cfg) + if err != nil { + return err } + err = d.Set("virtual_cluster_config", virtualCfg) + if err != nil { return err } + return nil } func flattenVirtualClusterConfig(d *schema.ResourceData, cfg *dataproc.VirtualClusterConfig) ([]map[string]interface{}, error) { + if cfg == nil { + return []map[string]interface{}{}, nil + } + data := map[string]interface{}{ "staging_bucket": d.Get("virtual_cluster_config.0.staging_bucket"), "auxiliary_services_config": flattenAuxiliaryServicesConfig(d, cfg.AuxiliaryServicesConfig), @@ -2230,6 +2231,9 @@ func flattenKubernetesSoftwareConfig(d *schema.ResourceData, cfg *dataproc.Kuber } func flattenClusterConfig(d *schema.ResourceData, cfg *dataproc.ClusterConfig) ([]map[string]interface{}, error) { + if cfg == nil { + return []map[string]interface{}{}, nil + } data := map[string]interface{}{ "staging_bucket": d.Get("cluster_config.0.staging_bucket").(string), @@ -2422,6 +2426,9 @@ func flattenInitializationActions(nia []*dataproc.NodeInitializationAction) ([]m } func flattenGceClusterConfig(d *schema.ResourceData, gcc *dataproc.GceClusterConfig) []map[string]interface{} { + if gcc == nil { + return []map[string]interface{}{} + } gceConfig := map[string]interface{}{ "tags": schema.NewSet(schema.HashString, tpgresource.ConvertStringArrToInterface(gcc.Tags)), diff --git a/mmv1/third_party/terraform/resources/resource_dataproc_job.go.erb b/mmv1/third_party/terraform/resources/resource_dataproc_job.go.erb index 06cb45657621..e451078fa637 100644 --- a/mmv1/third_party/terraform/resources/resource_dataproc_job.go.erb +++ b/mmv1/third_party/terraform/resources/resource_dataproc_job.go.erb @@ -554,7 +554,7 @@ func expandPySparkJob(config map[string]interface{}) *dataproc.PySparkJob { job.ArchiveUris = tpgresource.ConvertStringArr(v.([]interface{})) } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["logging_config"]; ok { config := extractFirstMapConfig(v.([]interface{})) @@ -686,7 +686,7 @@ func expandSparkJob(config map[string]interface{}) *dataproc.SparkJob { job.ArchiveUris = tpgresource.ConvertStringArr(v.([]interface{})) } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["logging_config"]; ok { config := extractFirstMapConfig(v.([]interface{})) @@ -807,7 +807,7 @@ func expandHadoopJob(config map[string]interface{}) *dataproc.HadoopJob { job.ArchiveUris = tpgresource.ConvertStringArr(v.([]interface{})) } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["logging_config"]; ok { config := extractFirstMapConfig(v.([]interface{})) @@ -912,13 +912,13 @@ func expandHiveJob(config map[string]interface{}) *dataproc.HiveJob { job.ContinueOnFailure = v.(bool) } if v, ok := config["script_variables"]; ok { - job.ScriptVariables = convertStringMap(v.(map[string]interface{})) + job.ScriptVariables = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["jar_file_uris"]; ok { job.JarFileUris = tpgresource.ConvertStringArr(v.([]interface{})) } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } return job @@ -1020,13 +1020,13 @@ func expandPigJob(config map[string]interface{}) *dataproc.PigJob { job.ContinueOnFailure = v.(bool) } if v, ok := config["script_variables"]; ok { - job.ScriptVariables = convertStringMap(v.(map[string]interface{})) + job.ScriptVariables = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["jar_file_uris"]; ok { job.JarFileUris = tpgresource.ConvertStringArr(v.([]interface{})) } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } return job @@ -1118,13 +1118,13 @@ func expandSparkSqlJob(config map[string]interface{}) *dataproc.SparkSqlJob { } } if v, ok := config["script_variables"]; ok { - job.ScriptVariables = convertStringMap(v.(map[string]interface{})) + job.ScriptVariables = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["jar_file_uris"]; ok { job.JarFileUris = tpgresource.ConvertStringArr(v.([]interface{})) } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } return job @@ -1226,7 +1226,7 @@ func expandPrestoJob(config map[string]interface{}) *dataproc.PrestoJob { } } if v, ok := config["properties"]; ok { - job.Properties = convertStringMap(v.(map[string]interface{})) + job.Properties = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := config["output_format"]; ok { job.OutputFormat = v.(string) @@ -1241,7 +1241,7 @@ func expandPrestoJob(config map[string]interface{}) *dataproc.PrestoJob { func expandLoggingConfig(config map[string]interface{}) *dataproc.LoggingConfig { conf := &dataproc.LoggingConfig{} if v, ok := config["driver_log_levels"]; ok { - conf.DriverLogLevels = convertStringMap(v.(map[string]interface{})) + conf.DriverLogLevels = tpgresource.ConvertStringMap(v.(map[string]interface{})) } return conf } diff --git a/mmv1/third_party/terraform/resources/resource_google_folder.go b/mmv1/third_party/terraform/resources/resource_google_folder.go index f0c61c2d8293..11bda4e1dd2d 100644 --- a/mmv1/third_party/terraform/resources/resource_google_folder.go +++ b/mmv1/third_party/terraform/resources/resource_google_folder.go @@ -81,14 +81,17 @@ func resourceGoogleFolderCreate(d *schema.ResourceData, meta interface{}) error parent := d.Get("parent").(string) var op *resourceManagerV3.Operation - err = transport_tpg.RetryTimeDuration(func() error { - var reqErr error - op, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Create(&resourceManagerV3.Folder{ - DisplayName: displayName, - Parent: parent, - }).Do() - return reqErr - }, d.Timeout(schema.TimeoutCreate)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + op, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Create(&resourceManagerV3.Folder{ + DisplayName: displayName, + Parent: parent, + }).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + }) if err != nil { return fmt.Errorf("Error creating folder '%s' in '%s': %s", displayName, parent, err) } @@ -167,11 +170,13 @@ func resourceGoogleFolderUpdate(d *schema.ResourceData, meta interface{}) error d.Partial(true) if d.HasChange("display_name") { - err := transport_tpg.Retry(func() error { - _, reqErr := config.NewResourceManagerV3Client(userAgent).Folders.Patch(d.Id(), &resourceManagerV3.Folder{ - DisplayName: displayName, - }).Do() - return reqErr + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, reqErr := config.NewResourceManagerV3Client(userAgent).Folders.Patch(d.Id(), &resourceManagerV3.Folder{ + DisplayName: displayName, + }).Do() + return reqErr + }, }) if err != nil { return fmt.Errorf("Error updating display_name to '%s': %s", displayName, err) @@ -182,12 +187,14 @@ func resourceGoogleFolderUpdate(d *schema.ResourceData, meta interface{}) error newParent := d.Get("parent").(string) var op *resourceManagerV3.Operation - err := transport_tpg.Retry(func() error { - var reqErr error - op, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Move(d.Id(), &resourceManagerV3.MoveFolderRequest{ - DestinationParent: newParent, - }).Do() - return reqErr + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + op, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Move(d.Id(), &resourceManagerV3.MoveFolderRequest{ + DestinationParent: newParent, + }).Do() + return reqErr + }, }) if err != nil { return fmt.Errorf("Error moving folder '%s' to '%s': %s", displayName, newParent, err) @@ -218,11 +225,14 @@ func resourceGoogleFolderDelete(d *schema.ResourceData, meta interface{}) error displayName := d.Get("display_name").(string) var op *resourceManagerV3.Operation - err = transport_tpg.RetryTimeDuration(func() error { - var reqErr error - op, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Delete(d.Id()).Do() - return reqErr - }, d.Timeout(schema.TimeoutDelete)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + op, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Delete(d.Id()).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutDelete), + }) if err != nil { return fmt.Errorf("Error deleting folder '%s': %s", displayName, err) } @@ -256,11 +266,14 @@ func resourceGoogleFolderImportState(d *schema.ResourceData, m interface{}) ([]* // ResourceData resource. func getGoogleFolder(folderName, userAgent string, d *schema.ResourceData, config *transport_tpg.Config) (*resourceManagerV3.Folder, error) { var folder *resourceManagerV3.Folder - err := transport_tpg.RetryTimeDuration(func() error { - var reqErr error - folder, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Get(folderName).Do() - return reqErr - }, d.Timeout(schema.TimeoutRead)) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + folder, reqErr = config.NewResourceManagerV3Client(userAgent).Folders.Get(folderName).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/resources/resource_google_folder_organization_policy.go b/mmv1/third_party/terraform/resources/resource_google_folder_organization_policy.go index 2aef3b353847..cde66a0c00c0 100644 --- a/mmv1/third_party/terraform/resources/resource_google_folder_organization_policy.go +++ b/mmv1/third_party/terraform/resources/resource_google_folder_organization_policy.go @@ -88,12 +88,15 @@ func resourceGoogleFolderOrganizationPolicyRead(d *schema.ResourceData, meta int folder := canonicalFolderId(d.Get("folder").(string)) var policy *cloudresourcemanager.OrgPolicy - err = transport_tpg.RetryTimeDuration(func() (getErr error) { - policy, getErr = config.NewResourceManagerClient(userAgent).Folders.GetOrgPolicy(folder, &cloudresourcemanager.GetOrgPolicyRequest{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - }).Do() - return getErr - }, d.Timeout(schema.TimeoutRead)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (getErr error) { + policy, getErr = config.NewResourceManagerClient(userAgent).Folders.GetOrgPolicy(folder, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + }).Do() + return getErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Organization policy for %s", folder)) } @@ -143,12 +146,15 @@ func resourceGoogleFolderOrganizationPolicyDelete(d *schema.ResourceData, meta i } folder := canonicalFolderId(d.Get("folder").(string)) - return transport_tpg.RetryTimeDuration(func() (delErr error) { - _, delErr = config.NewResourceManagerClient(userAgent).Folders.ClearOrgPolicy(folder, &cloudresourcemanager.ClearOrgPolicyRequest{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - }).Do() - return delErr - }, d.Timeout(schema.TimeoutDelete)) + return transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (delErr error) { + _, delErr = config.NewResourceManagerClient(userAgent).Folders.ClearOrgPolicy(folder, &cloudresourcemanager.ClearOrgPolicyRequest{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + }).Do() + return delErr + }, + Timeout: d.Timeout(schema.TimeoutDelete), + }) } func setFolderOrganizationPolicy(d *schema.ResourceData, meta interface{}) error { @@ -170,17 +176,20 @@ func setFolderOrganizationPolicy(d *schema.ResourceData, meta interface{}) error return err } - return transport_tpg.RetryTimeDuration(func() (setErr error) { - _, setErr = config.NewResourceManagerClient(userAgent).Folders.SetOrgPolicy(folder, &cloudresourcemanager.SetOrgPolicyRequest{ - Policy: &cloudresourcemanager.OrgPolicy{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), - ListPolicy: listPolicy, - RestoreDefault: restoreDefault, - Version: int64(d.Get("version").(int)), - Etag: d.Get("etag").(string), - }, - }).Do() - return setErr - }, d.Timeout(schema.TimeoutCreate)) + return transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (setErr error) { + _, setErr = config.NewResourceManagerClient(userAgent).Folders.SetOrgPolicy(folder, &cloudresourcemanager.SetOrgPolicyRequest{ + Policy: &cloudresourcemanager.OrgPolicy{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), + ListPolicy: listPolicy, + RestoreDefault: restoreDefault, + Version: int64(d.Get("version").(int)), + Etag: d.Get("etag").(string), + }, + }).Do() + return setErr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + }) } diff --git a/mmv1/third_party/terraform/resources/resource_google_organization_iam_custom_role.go b/mmv1/third_party/terraform/resources/resource_google_organization_iam_custom_role.go index 5bc22d0fc562..2e3430a679b4 100644 --- a/mmv1/third_party/terraform/resources/resource_google_organization_iam_custom_role.go +++ b/mmv1/third_party/terraform/resources/resource_google_organization_iam_custom_role.go @@ -112,7 +112,7 @@ func resourceGoogleOrganizationIamCustomRoleCreate(d *schema.ResourceData, meta Title: d.Get("title").(string), Description: d.Get("description").(string), Stage: d.Get("stage").(string), - IncludedPermissions: convertStringSet(d.Get("permissions").(*schema.Set)), + IncludedPermissions: tpgresource.ConvertStringSet(d.Get("permissions").(*schema.Set)), }, }).Do() if err != nil { @@ -200,7 +200,7 @@ func resourceGoogleOrganizationIamCustomRoleUpdate(d *schema.ResourceData, meta Title: d.Get("title").(string), Description: d.Get("description").(string), Stage: d.Get("stage").(string), - IncludedPermissions: convertStringSet(d.Get("permissions").(*schema.Set)), + IncludedPermissions: tpgresource.ConvertStringSet(d.Get("permissions").(*schema.Set)), }).Do() if err != nil { diff --git a/mmv1/third_party/terraform/resources/resource_google_organization_policy.go b/mmv1/third_party/terraform/resources/resource_google_organization_policy.go index 7d9c270be341..f02b4e9a173f 100644 --- a/mmv1/third_party/terraform/resources/resource_google_organization_policy.go +++ b/mmv1/third_party/terraform/resources/resource_google_organization_policy.go @@ -197,12 +197,15 @@ func resourceGoogleOrganizationPolicyRead(d *schema.ResourceData, meta interface org := "organizations/" + d.Get("org_id").(string) var policy *cloudresourcemanager.OrgPolicy - err = transport_tpg.RetryTimeDuration(func() (readErr error) { - policy, readErr = config.NewResourceManagerClient(userAgent).Organizations.GetOrgPolicy(org, &cloudresourcemanager.GetOrgPolicyRequest{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - }).Do() - return readErr - }, d.Timeout(schema.TimeoutRead)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (readErr error) { + policy, readErr = config.NewResourceManagerClient(userAgent).Organizations.GetOrgPolicy(org, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + }).Do() + return readErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Organization policy for %s", org)) } @@ -252,12 +255,15 @@ func resourceGoogleOrganizationPolicyDelete(d *schema.ResourceData, meta interfa } org := "organizations/" + d.Get("org_id").(string) - err = transport_tpg.RetryTimeDuration(func() error { - _, dErr := config.NewResourceManagerClient(userAgent).Organizations.ClearOrgPolicy(org, &cloudresourcemanager.ClearOrgPolicyRequest{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - }).Do() - return dErr - }, d.Timeout(schema.TimeoutDelete)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, dErr := config.NewResourceManagerClient(userAgent).Organizations.ClearOrgPolicy(org, &cloudresourcemanager.ClearOrgPolicyRequest{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + }).Do() + return dErr + }, + Timeout: d.Timeout(schema.TimeoutDelete), + }) if err != nil { return err } @@ -313,19 +319,22 @@ func setOrganizationPolicy(d *schema.ResourceData, meta interface{}) error { return err } - err = transport_tpg.RetryTimeDuration(func() (setErr error) { - _, setErr = config.NewResourceManagerClient(userAgent).Organizations.SetOrgPolicy(org, &cloudresourcemanager.SetOrgPolicyRequest{ - Policy: &cloudresourcemanager.OrgPolicy{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), - ListPolicy: listPolicy, - RestoreDefault: restoreDefault, - Version: int64(d.Get("version").(int)), - Etag: d.Get("etag").(string), - }, - }).Do() - return setErr - }, d.Timeout(schema.TimeoutCreate)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (setErr error) { + _, setErr = config.NewResourceManagerClient(userAgent).Organizations.SetOrgPolicy(org, &cloudresourcemanager.SetOrgPolicyRequest{ + Policy: &cloudresourcemanager.OrgPolicy{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), + ListPolicy: listPolicy, + RestoreDefault: restoreDefault, + Version: int64(d.Get("version").(int)), + Etag: d.Get("etag").(string), + }, + }).Do() + return setErr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + }) return err } diff --git a/mmv1/third_party/terraform/resources/resource_google_project.go b/mmv1/third_party/terraform/resources/resource_google_project.go index 89427a514e20..1fff48ed847b 100644 --- a/mmv1/third_party/terraform/resources/resource_google_project.go +++ b/mmv1/third_party/terraform/resources/resource_google_project.go @@ -12,6 +12,8 @@ import ( "github.com/hashicorp/errwrap" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + tpgserviceusage "github.com/hashicorp/terraform-provider-google/google/services/serviceusage" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/hashicorp/terraform-provider-google/google/verify" @@ -139,10 +141,13 @@ func resourceGoogleProjectCreate(d *schema.ResourceData, meta interface{}) error } var op *cloudresourcemanager.Operation - err = transport_tpg.RetryTimeDuration(func() (reqErr error) { - op, reqErr = config.NewResourceManagerClient(userAgent).Projects.Create(project).Do() - return reqErr - }, d.Timeout(schema.TimeoutCreate)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (reqErr error) { + op, reqErr = config.NewResourceManagerClient(userAgent).Projects.Create(project).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + }) if err != nil { return fmt.Errorf("error creating project %s (%s): %s. "+ "If you received a 403 error, make sure you have the"+ @@ -224,7 +229,7 @@ func resourceGoogleProjectCheckPreRequisites(config *transport_tpg.Config, d *sc if err != nil { return fmt.Errorf("failed to check permissions on billing account %q: %v", ba, err) } - if !stringInSlice(resp.Permissions, perm) { + if !tpgresource.StringInSlice(resp.Permissions, perm) { return fmt.Errorf("missing permission on %q: %v", ba, perm) } if !d.Get("auto_create_network").(bool) { @@ -304,10 +309,13 @@ func resourceGoogleProjectRead(d *schema.ResourceData, meta interface{}) error { } var ba *cloudbilling.ProjectBillingInfo - err = transport_tpg.RetryTimeDuration(func() (reqErr error) { - ba, reqErr = config.NewBillingClient(userAgent).Projects.GetBillingInfo(PrefixedProject(pid)).Do() - return reqErr - }, d.Timeout(schema.TimeoutRead)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (reqErr error) { + ba, reqErr = config.NewBillingClient(userAgent).Projects.GetBillingInfo(PrefixedProject(pid)).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) // Read the billing account if err != nil && !transport_tpg.IsApiNotEnabledError(err) { return fmt.Errorf("Error reading billing account for project %q: %v", PrefixedProject(pid), err) @@ -437,10 +445,13 @@ func resourceGoogleProjectUpdate(d *schema.ResourceData, meta interface{}) error func updateProject(config *transport_tpg.Config, d *schema.ResourceData, projectName, userAgent string, desiredProject *cloudresourcemanager.Project) (*cloudresourcemanager.Project, error) { var newProj *cloudresourcemanager.Project - if err := transport_tpg.RetryTimeDuration(func() (updateErr error) { - newProj, updateErr = config.NewResourceManagerClient(userAgent).Projects.Update(desiredProject.ProjectId, desiredProject).Do() - return updateErr - }, d.Timeout(schema.TimeoutUpdate)); err != nil { + if err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (updateErr error) { + newProj, updateErr = config.NewResourceManagerClient(userAgent).Projects.Update(desiredProject.ProjectId, desiredProject).Do() + return updateErr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + }); err != nil { return nil, fmt.Errorf("Error updating project %q: %s", projectName, err) } return newProj, nil @@ -456,10 +467,13 @@ func resourceGoogleProjectDelete(d *schema.ResourceData, meta interface{}) error if !d.Get("skip_delete").(bool) { parts := strings.Split(d.Id(), "/") pid := parts[len(parts)-1] - if err := transport_tpg.RetryTimeDuration(func() error { - _, delErr := config.NewResourceManagerClient(userAgent).Projects.Delete(pid).Do() - return delErr - }, d.Timeout(schema.TimeoutDelete)); err != nil { + if err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, delErr := config.NewResourceManagerClient(userAgent).Projects.Delete(pid).Do() + return delErr + }, + Timeout: d.Timeout(schema.TimeoutDelete), + }); err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Project %s", pid)) } } @@ -546,7 +560,10 @@ func updateProjectBillingAccount(d *schema.ResourceData, config *transport_tpg.C _, err := config.NewBillingClient(userAgent).Projects.UpdateBillingInfo(PrefixedProject(pid), ba).Do() return err } - err := transport_tpg.RetryTimeDuration(updateBillingInfoFunc, d.Timeout(schema.TimeoutUpdate)) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: updateBillingInfoFunc, + Timeout: d.Timeout(schema.TimeoutUpdate), + }) if err != nil { if err := d.Set("billing_account", ""); err != nil { return fmt.Errorf("Error setting billing_account: %s", err) @@ -558,10 +575,13 @@ func updateProjectBillingAccount(d *schema.ResourceData, config *transport_tpg.C } for retries := 0; retries < 3; retries++ { var ba *cloudbilling.ProjectBillingInfo - err = transport_tpg.RetryTimeDuration(func() (reqErr error) { - ba, reqErr = config.NewBillingClient(userAgent).Projects.GetBillingInfo(PrefixedProject(pid)).Do() - return reqErr - }, d.Timeout(schema.TimeoutRead)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (reqErr error) { + ba, reqErr = config.NewBillingClient(userAgent).Projects.GetBillingInfo(PrefixedProject(pid)).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return fmt.Errorf("Error getting billing info for project %q: %v", PrefixedProject(pid), err) } @@ -594,10 +614,13 @@ func readGoogleProject(d *schema.ResourceData, config *transport_tpg.Config, use // Read the project parts := strings.Split(d.Id(), "/") pid := parts[len(parts)-1] - err := transport_tpg.RetryTimeDuration(func() (reqErr error) { - p, reqErr = config.NewResourceManagerClient(userAgent).Projects.Get(pid).Do() - return reqErr - }, d.Timeout(schema.TimeoutRead)) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (reqErr error) { + p, reqErr = config.NewResourceManagerClient(userAgent).Projects.Get(pid).Do() + return reqErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) return p, err } @@ -629,41 +652,81 @@ func EnableServiceUsageProjectServices(services []string, project, billingProjec func doEnableServicesRequest(services []string, project, billingProject, userAgent string, config *transport_tpg.Config, timeout time.Duration) error { var op *serviceusage.Operation - var call ServicesCall - err := transport_tpg.RetryTimeDuration(func() error { - var rerr error - if len(services) == 1 { - // BatchEnable returns an error for a single item, so just enable - // using service endpoint. - name := fmt.Sprintf("projects/%s/services/%s", project, services[0]) - req := &serviceusage.EnableServiceRequest{} - call = config.NewServiceUsageClient(userAgent).Services.Enable(name, req) - } else { - // Batch enable for multiple services. - name := fmt.Sprintf("projects/%s", project) - req := &serviceusage.BatchEnableServicesRequest{ServiceIds: services} - call = config.NewServiceUsageClient(userAgent).Services.BatchEnable(name, req) - } - if config.UserProjectOverride && billingProject != "" { - call.Header().Add("X-Goog-User-Project", billingProject) - } - op, rerr = call.Do() - return handleServiceUsageRetryableError(rerr) - }, - timeout, - transport_tpg.ServiceUsageServiceBeingActivated, - ) - if err != nil { - return errwrap.Wrapf("failed to send enable services request: {{err}}", err) - } - // Poll for the API to return - waitErr := serviceUsageOperationWait(config, op, billingProject, fmt.Sprintf("Enable Project %q Services: %+v", project, services), userAgent, timeout) - if waitErr != nil { - return waitErr + + // errors can come up at multiple points, so there are a few levels of + // retrying here. + // logicalErr / waitErr: overall error on the logical operation (enabling services) + // but possibly also errors when retrieving the LRO (these are rare) + // err / reqErr: precondition errors when sending the request received instead of an LRO + logicalErr := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + var call ServicesCall + if len(services) == 1 { + // BatchEnable returns an error for a single item, so enable with single endpoint + name := fmt.Sprintf("projects/%s/services/%s", project, services[0]) + req := &serviceusage.EnableServiceRequest{} + call = config.NewServiceUsageClient(userAgent).Services.Enable(name, req) + } else { + // Batch enable for multiple services. + name := fmt.Sprintf("projects/%s", project) + req := &serviceusage.BatchEnableServicesRequest{ServiceIds: services} + call = config.NewServiceUsageClient(userAgent).Services.BatchEnable(name, req) + } + + if config.UserProjectOverride && billingProject != "" { + call.Header().Add("X-Goog-User-Project", billingProject) + } + + op, reqErr = call.Do() + return handleServiceUsageRetryablePreconditionError(reqErr) + }, + Timeout: timeout, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.ServiceUsageServiceBeingActivated}, + }) + if err != nil { + return errwrap.Wrapf("failed on request preconditions: {{err}}", err) + } + + waitErr := tpgserviceusage.ServiceUsageOperationWait(config, op, billingProject, fmt.Sprintf("Enable Project %q Services: %+v", project, services), userAgent, timeout) + if waitErr != nil { + return waitErr + } + + return nil + }, + Timeout: timeout, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.ServiceUsageInternalError160009}, + }) + + if logicalErr != nil { + return errwrap.Wrapf("failed to enable services: {{err}}", logicalErr) } + return nil } +// Handle errors that are retryable at call time for serviceusage +// Specifically, errors in https://cloud.google.com/service-usage/docs/reference/rest/v1/services/batchEnable#response-body +// Errors in operations are handled separately. +// NOTE(rileykarson): This should probably be turned into a retry predicate +func handleServiceUsageRetryablePreconditionError(err error) error { + if err == nil { + return nil + } + if gerr, ok := err.(*googleapi.Error); ok { + if (gerr.Code == 400 || gerr.Code == 412) && gerr.Message == "Precondition check failed." { + return &googleapi.Error{ + Code: 503, + Message: "api returned \"precondition failed\" while enabling service", + } + } + } + return err +} + // Retrieve a project's services from the API // if a service has been renamed, this function will list both the old and new // forms of the service. LIST responses are expected to return only the old or @@ -671,33 +734,36 @@ func doEnableServicesRequest(services []string, project, billingProject, userAge func ListCurrentlyEnabledServices(project, billingProject, userAgent string, config *transport_tpg.Config, timeout time.Duration) (map[string]struct{}, error) { log.Printf("[DEBUG] Listing enabled services for project %s", project) apiServices := make(map[string]struct{}) - err := transport_tpg.RetryTimeDuration(func() error { - ctx := context.Background() - call := config.NewServiceUsageClient(userAgent).Services.List(fmt.Sprintf("projects/%s", project)) - if config.UserProjectOverride && billingProject != "" { - call.Header().Add("X-Goog-User-Project", billingProject) - } - return call.Fields("services/name,nextPageToken").Filter("state:ENABLED"). - Pages(ctx, func(r *serviceusage.ListServicesResponse) error { - for _, v := range r.Services { - // services are returned as "projects/{{project}}/services/{{name}}" - name := tpgresource.GetResourceNameFromSelfLink(v.Name) - - // if name not in ignoredProjectServicesSet - if _, ok := ignoredProjectServicesSet[name]; !ok { - apiServices[name] = struct{}{} - - // if a service has been renamed, set both. We'll deal - // with setting the right values later. - if v, ok := renamedServicesByOldAndNewServiceNames[name]; ok { - log.Printf("[DEBUG] Adding service alias for %s to enabled services: %s", name, v) - apiServices[v] = struct{}{} + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + ctx := context.Background() + call := config.NewServiceUsageClient(userAgent).Services.List(fmt.Sprintf("projects/%s", project)) + if config.UserProjectOverride && billingProject != "" { + call.Header().Add("X-Goog-User-Project", billingProject) + } + return call.Fields("services/name,nextPageToken").Filter("state:ENABLED"). + Pages(ctx, func(r *serviceusage.ListServicesResponse) error { + for _, v := range r.Services { + // services are returned as "projects/{{project}}/services/{{name}}" + name := tpgresource.GetResourceNameFromSelfLink(v.Name) + + // if name not in ignoredProjectServicesSet + if _, ok := ignoredProjectServicesSet[name]; !ok { + apiServices[name] = struct{}{} + + // if a service has been renamed, set both. We'll deal + // with setting the right values later. + if v, ok := renamedServicesByOldAndNewServiceNames[name]; ok { + log.Printf("[DEBUG] Adding service alias for %s to enabled services: %s", name, v) + apiServices[v] = struct{}{} + } } } - } - return nil - }) - }, timeout) + return nil + }) + }, + Timeout: timeout, + }) if err != nil { return nil, errwrap.Wrapf(fmt.Sprintf("Failed to list enabled services for project %s: {{err}}", project), err) } @@ -711,33 +777,36 @@ func waitForServiceUsageEnabledServices(services []string, project, billingProje missing := make([]string, 0, len(services)) delay := time.Duration(0) interval := time.Second - err := transport_tpg.RetryTimeDuration(func() error { - // Get the list of services that are enabled on the project - enabledServices, err := ListCurrentlyEnabledServices(project, billingProject, userAgent, config, timeout) - if err != nil { - return err - } - - missing := make([]string, 0, len(services)) - for _, s := range services { - if _, ok := enabledServices[s]; !ok { - missing = append(missing, s) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + // Get the list of services that are enabled on the project + enabledServices, err := ListCurrentlyEnabledServices(project, billingProject, userAgent, config, timeout) + if err != nil { + return err } - } - if len(missing) > 0 { - log.Printf("[DEBUG] waiting %v before reading project %s services...", delay, project) - time.Sleep(delay) - delay += interval - interval += delay - // Spoof a googleapi Error so retryTime will try again - return &googleapi.Error{ - Code: 503, - Message: fmt.Sprintf("The service(s) %q are still being enabled for project %s. This isn't a real API error, this is just eventual consistency.", missing, project), + missing := make([]string, 0, len(services)) + for _, s := range services { + if _, ok := enabledServices[s]; !ok { + missing = append(missing, s) + } } - } - return nil - }, timeout) + if len(missing) > 0 { + log.Printf("[DEBUG] waiting %v before reading project %s services...", delay, project) + time.Sleep(delay) + delay += interval + interval += delay + + // Spoof a googleapi Error so retryTime will try again + return &googleapi.Error{ + Code: 503, + Message: fmt.Sprintf("The service(s) %q are still being enabled for project %s. This isn't a real API error, this is just eventual consistency.", missing, project), + } + } + return nil + }, + Timeout: timeout, + }) if err != nil { return errwrap.Wrap(err, fmt.Errorf("failed to enable some service(s) %q for project %s", missing, project)) } diff --git a/mmv1/third_party/terraform/resources/resource_google_project_iam_custom_role.go b/mmv1/third_party/terraform/resources/resource_google_project_iam_custom_role.go index e1c193539f1c..5f16bbaa9768 100644 --- a/mmv1/third_party/terraform/resources/resource_google_project_iam_custom_role.go +++ b/mmv1/third_party/terraform/resources/resource_google_project_iam_custom_role.go @@ -113,7 +113,7 @@ func resourceGoogleProjectIamCustomRoleCreate(d *schema.ResourceData, meta inter Title: d.Get("title").(string), Description: d.Get("description").(string), Stage: d.Get("stage").(string), - IncludedPermissions: convertStringSet(d.Get("permissions").(*schema.Set)), + IncludedPermissions: tpgresource.ConvertStringSet(d.Get("permissions").(*schema.Set)), }, }).Do() if err != nil { @@ -203,7 +203,7 @@ func resourceGoogleProjectIamCustomRoleUpdate(d *schema.ResourceData, meta inter Title: d.Get("title").(string), Description: d.Get("description").(string), Stage: d.Get("stage").(string), - IncludedPermissions: convertStringSet(d.Get("permissions").(*schema.Set)), + IncludedPermissions: tpgresource.ConvertStringSet(d.Get("permissions").(*schema.Set)), }).Do() if err != nil { diff --git a/mmv1/third_party/terraform/resources/resource_google_project_organization_policy.go b/mmv1/third_party/terraform/resources/resource_google_project_organization_policy.go index bbe886d0bb40..aa8720e8aa74 100644 --- a/mmv1/third_party/terraform/resources/resource_google_project_organization_policy.go +++ b/mmv1/third_party/terraform/resources/resource_google_project_organization_policy.go @@ -85,12 +85,15 @@ func resourceGoogleProjectOrganizationPolicyRead(d *schema.ResourceData, meta in project := PrefixedProject(d.Get("project").(string)) var policy *cloudresourcemanager.OrgPolicy - err = transport_tpg.RetryTimeDuration(func() (readErr error) { - policy, readErr = config.NewResourceManagerClient(userAgent).Projects.GetOrgPolicy(project, &cloudresourcemanager.GetOrgPolicyRequest{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - }).Do() - return readErr - }, d.Timeout(schema.TimeoutRead)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (readErr error) { + policy, readErr = config.NewResourceManagerClient(userAgent).Projects.GetOrgPolicy(project, &cloudresourcemanager.GetOrgPolicyRequest{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + }).Do() + return readErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Organization policy for %s", project)) } @@ -140,12 +143,15 @@ func resourceGoogleProjectOrganizationPolicyDelete(d *schema.ResourceData, meta } project := PrefixedProject(d.Get("project").(string)) - return transport_tpg.RetryTimeDuration(func() error { - _, err := config.NewResourceManagerClient(userAgent).Projects.ClearOrgPolicy(project, &cloudresourcemanager.ClearOrgPolicyRequest{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - }).Do() - return err - }, d.Timeout(schema.TimeoutDelete)) + return transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, err := config.NewResourceManagerClient(userAgent).Projects.ClearOrgPolicy(project, &cloudresourcemanager.ClearOrgPolicyRequest{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + }).Do() + return err + }, + Timeout: d.Timeout(schema.TimeoutDelete), + }) } func setProjectOrganizationPolicy(d *schema.ResourceData, meta interface{}) error { @@ -167,17 +173,20 @@ func setProjectOrganizationPolicy(d *schema.ResourceData, meta interface{}) erro return err } - return transport_tpg.RetryTimeDuration(func() error { - _, err := config.NewResourceManagerClient(userAgent).Projects.SetOrgPolicy(project, &cloudresourcemanager.SetOrgPolicyRequest{ - Policy: &cloudresourcemanager.OrgPolicy{ - Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), - BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), - ListPolicy: listPolicy, - RestoreDefault: restore_default, - Version: int64(d.Get("version").(int)), - Etag: d.Get("etag").(string), - }, - }).Do() - return err - }, d.Timeout(schema.TimeoutCreate)) + return transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, err := config.NewResourceManagerClient(userAgent).Projects.SetOrgPolicy(project, &cloudresourcemanager.SetOrgPolicyRequest{ + Policy: &cloudresourcemanager.OrgPolicy{ + Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), + BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), + ListPolicy: listPolicy, + RestoreDefault: restore_default, + Version: int64(d.Get("version").(int)), + Etag: d.Get("etag").(string), + }, + }).Do() + return err + }, + Timeout: d.Timeout(schema.TimeoutCreate), + }) } diff --git a/mmv1/third_party/terraform/resources/resource_google_project_service.go b/mmv1/third_party/terraform/resources/resource_google_project_service.go index ee8cb57717d9..b17cb59461c0 100644 --- a/mmv1/third_party/terraform/resources/resource_google_project_service.go +++ b/mmv1/third_party/terraform/resources/resource_google_project_service.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + tpgserviceusage "github.com/hashicorp/terraform-provider-google/google/services/serviceusage" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/hashicorp/terraform-provider-google/google/verify" @@ -266,34 +267,38 @@ func resourceGoogleProjectServiceUpdate(d *schema.ResourceData, meta interface{} // Disables a project service. func disableServiceUsageProjectService(service, project string, d *schema.ResourceData, config *transport_tpg.Config, disableDependentServices bool) error { - err := transport_tpg.RetryTimeDuration(func() error { - billingProject := project - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - name := fmt.Sprintf("projects/%s/services/%s", project, service) - servicesDisableCall := config.NewServiceUsageClient(userAgent).Services.Disable(name, &serviceusage.DisableServiceRequest{ - DisableDependentServices: disableDependentServices, - }) - if config.UserProjectOverride { - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + billingProject := project + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err } - servicesDisableCall.Header().Add("X-Goog-User-Project", billingProject) - } - sop, err := servicesDisableCall.Do() - if err != nil { - return err - } - // Wait for the operation to complete - waitErr := serviceUsageOperationWait(config, sop, billingProject, "api to disable", userAgent, d.Timeout(schema.TimeoutDelete)) - if waitErr != nil { - return waitErr - } - return nil - }, d.Timeout(schema.TimeoutDelete), transport_tpg.ServiceUsageServiceBeingActivated) + name := fmt.Sprintf("projects/%s/services/%s", project, service) + servicesDisableCall := config.NewServiceUsageClient(userAgent).Services.Disable(name, &serviceusage.DisableServiceRequest{ + DisableDependentServices: disableDependentServices, + }) + if config.UserProjectOverride { + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + servicesDisableCall.Header().Add("X-Goog-User-Project", billingProject) + } + sop, err := servicesDisableCall.Do() + if err != nil { + return err + } + // Wait for the operation to complete + waitErr := tpgserviceusage.ServiceUsageOperationWait(config, sop, billingProject, "api to disable", userAgent, d.Timeout(schema.TimeoutDelete)) + if waitErr != nil { + return waitErr + } + return nil + }, + Timeout: d.Timeout(schema.TimeoutDelete), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.ServiceUsageServiceBeingActivated}, + }) if err != nil { return fmt.Errorf("Error disabling service %q for project %q: %v", service, project, err) } diff --git a/mmv1/third_party/terraform/resources/resource_google_service_account.go b/mmv1/third_party/terraform/resources/resource_google_service_account.go index 82e11aa5b28b..d36df116d48a 100644 --- a/mmv1/third_party/terraform/resources/resource_google_service_account.go +++ b/mmv1/third_party/terraform/resources/resource_google_service_account.go @@ -115,10 +115,14 @@ func resourceGoogleServiceAccountCreate(d *schema.ResourceData, meta interface{} d.SetId(sa.Name) - err = transport_tpg.RetryTimeDuration(func() (operr error) { - _, saerr := config.NewIamClient(userAgent).Projects.ServiceAccounts.Get(d.Id()).Do() - return saerr - }, d.Timeout(schema.TimeoutCreate), transport_tpg.IsNotFoundRetryableError("service account creation")) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, saerr := config.NewIamClient(userAgent).Projects.ServiceAccounts.Get(d.Id()).Do() + return saerr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("service account creation")}, + }) if err != nil { return fmt.Errorf("Error reading service account after creation: %s", err) @@ -126,7 +130,7 @@ func resourceGoogleServiceAccountCreate(d *schema.ResourceData, meta interface{} // We poll until the resource is found due to eventual consistency issue // on part of the api https://cloud.google.com/iam/docs/overview#consistency - err = PollingWaitTime(resourceServiceAccountPollRead(d, meta), PollCheckForExistence, "Creating Service Account", d.Timeout(schema.TimeoutCreate), 1) + err = transport_tpg.PollingWaitTime(resourceServiceAccountPollRead(d, meta), transport_tpg.PollCheckForExistence, "Creating Service Account", d.Timeout(schema.TimeoutCreate), 1) if err != nil { return err diff --git a/mmv1/third_party/terraform/resources/resource_google_service_networking_peered_dns_domain.go b/mmv1/third_party/terraform/resources/resource_google_service_networking_peered_dns_domain.go index 1b382a174ca8..0272ea9dc6d6 100644 --- a/mmv1/third_party/terraform/resources/resource_google_service_networking_peered_dns_domain.go +++ b/mmv1/third_party/terraform/resources/resource_google_service_networking_peered_dns_domain.go @@ -221,10 +221,13 @@ func resourceGoogleServiceNetworkingPeeredDNSDomainDelete(d *schema.ResourceData apiService := config.NewServiceNetworkingClient(userAgent) peeredDnsDomainsService := servicenetworking.NewServicesProjectsGlobalNetworksPeeredDnsDomainsService(apiService) - if err := transport_tpg.RetryTimeDuration(func() error { - _, delErr := peeredDnsDomainsService.Delete(d.Id()).Do() - return delErr - }, d.Timeout(schema.TimeoutDelete)); err != nil { + if err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, delErr := peeredDnsDomainsService.Delete(d.Id()).Do() + return delErr + }, + Timeout: d.Timeout(schema.TimeoutDelete), + }); err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Peered DNS domain %s", name)) } diff --git a/mmv1/third_party/terraform/resources/resource_project_service_identity.go.erb b/mmv1/third_party/terraform/resources/resource_project_service_identity.go.erb index 86c0c4268552..cd626899aa06 100644 --- a/mmv1/third_party/terraform/resources/resource_project_service_identity.go.erb +++ b/mmv1/third_party/terraform/resources/resource_project_service_identity.go.erb @@ -7,6 +7,7 @@ import ( "log" "time" + "github.com/hashicorp/terraform-provider-google/google/services/serviceusage" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -83,7 +84,7 @@ func resourceProjectServiceIdentityCreate(d *schema.ResourceData, meta interface } var opRes map[string]interface{} - err = ServiceUsageOperationWaitTimeWithResponse( + err = serviceusage.ServiceUsageOperationWaitTimeWithResponse( config, res, &opRes, billingProject, "Creating Service Identity", userAgent, d.Timeout(schema.TimeoutCreate)) if err != nil { diff --git a/mmv1/third_party/terraform/resources/resource_sql_database_instance.go.erb b/mmv1/third_party/terraform/resources/resource_sql_database_instance.go.erb index 9b56106f2eb4..d015a68439e6 100644 --- a/mmv1/third_party/terraform/resources/resource_sql_database_instance.go.erb +++ b/mmv1/third_party/terraform/resources/resource_sql_database_instance.go.erb @@ -15,6 +15,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-provider-google/google/services/compute" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/hashicorp/terraform-provider-google/google/verify" @@ -115,7 +116,7 @@ func ResourceSqlDatabaseInstance() *schema.Resource { }, CustomizeDiff: customdiff.All( - customdiff.ForceNewIfChange("settings.0.disk_size", isDiskShrinkage), + customdiff.ForceNewIfChange("settings.0.disk_size", compute.IsDiskShrinkage), customdiff.ForceNewIfChange("master_instance_name", isMasterInstanceNameSet), customdiff.IfValueChange("instance_type", isReplicaPromoteRequested, checkPromoteConfigurationsAndUpdateDiff), privateNetworkCustomizeDiff, @@ -1038,16 +1039,20 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) } var op *sqladmin.Operation - err = transport_tpg.RetryTimeDuration(func() (operr error) { - if cloneContext != nil { - cloneContext.DestinationInstanceName = name - clodeReq := sqladmin.InstancesCloneRequest{CloneContext: cloneContext} - op, operr = config.NewSqlAdminClient(userAgent).Instances.Clone(project, cloneSource, &clodeReq).Do() - } else { - op, operr = config.NewSqlAdminClient(userAgent).Instances.Insert(project, instance).Do() - } - return operr - }, d.Timeout(schema.TimeoutCreate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + if cloneContext != nil { + cloneContext.DestinationInstanceName = name + clodeReq := sqladmin.InstancesCloneRequest{CloneContext: cloneContext} + op, operr = config.NewSqlAdminClient(userAgent).Instances.Clone(project, cloneSource, &clodeReq).Do() + } else { + op, operr = config.NewSqlAdminClient(userAgent).Instances.Insert(project, instance).Do() + } + return operr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to create instance %s: %s", instance.Name, err) } @@ -1069,35 +1074,47 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) // Users in a replica instance are inherited from the master instance and should be left alone. // This deletion is done immediately after the instance is created, in order to minimize the // risk of it being left on the instance, which would present a security concern. - if sqlDatabaseIsMaster(d) && strings.Contains(strings.ToUpper(databaseVersion), "MYSQL") { - var user *sqladmin.User - err = transport_tpg.RetryTimeDuration(func() error { - user, err = config.NewSqlAdminClient(userAgent).Users.Get(project, instance.Name, "root").Host("%").Do() - return err - }, d.Timeout(schema.TimeoutRead), transport_tpg.IsSqlOperationInProgressError) + if sqlDatabaseIsMaster(d) { + var users *sqladmin.UsersListResponse + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + users, err = config.NewSqlAdminClient(userAgent).Users.List(project, instance.Name).Do() + return err + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { - return fmt.Errorf("Error, attempting to fetch root user associated with instance %s: %s", instance.Name, err) + return fmt.Errorf("Error, attempting to list users associated with instance %s: %s", instance.Name, err) } - if user != nil { - err = transport_tpg.Retry(func() error { - op, err = config.NewSqlAdminClient(userAgent).Users.Delete(project, instance.Name).Host(user.Host).Name(user.Name).Do() - if err == nil { - err = SqlAdminOperationWaitTime(config, op, project, "Delete default root User", userAgent, d.Timeout(schema.TimeoutCreate)) + for _, u := range users.Items { + if u.Name == "root" && u.Host == "%" { + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + op, err = config.NewSqlAdminClient(userAgent).Users.Delete(project, instance.Name).Host(u.Host).Name(u.Name).Do() + if err == nil { + err = SqlAdminOperationWaitTime(config, op, project, "Delete default root User", userAgent, d.Timeout(schema.TimeoutCreate)) + } + return err + }, + }) + if err != nil { + return fmt.Errorf("Error, failed to delete default 'root'@'*' u, but the database was created successfully: %s", err) } - return err - }) - if err != nil { - return fmt.Errorf("Error, failed to delete default 'root'@'*' user, but the database was created successfully: %s", err) } } } // patch any fields that need to be sent postcreation if patchData != nil { - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, instance.Name, patchData).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, instance.Name, patchData).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to update instance settings for %s: %s", instance.Name, err) } @@ -1122,10 +1139,14 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) _settings := s.([]interface{})[0].(map[string]interface{}) instanceUpdate.Settings.SettingsVersion = int64(_settings["version"].(int)) var op *sqladmin.Operation - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Update(project, name, instanceUpdate).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Update(project, name, instanceUpdate).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to update instance settings for %s: %s", instance.Name, err) } @@ -1178,7 +1199,7 @@ func expandSqlDatabaseInstanceSettings(configured []interface{}, databaseVersion DataDiskType: _settings["disk_type"].(string), PricingPlan: _settings["pricing_plan"].(string), DeletionProtectionEnabled: _settings["deletion_protection_enabled"].(bool), - UserLabels: convertStringMap(_settings["user_labels"].(map[string]interface{})), + UserLabels: tpgresource.ConvertStringMap(_settings["user_labels"].(map[string]interface{})), BackupConfiguration: expandBackupConfiguration(_settings["backup_configuration"].([]interface{})), DatabaseFlags: expandDatabaseFlags(_settings["database_flags"].([]interface{})), IpConfiguration: expandIpConfiguration(_settings["ip_configuration"].([]interface{}), databaseVersion), @@ -1454,10 +1475,14 @@ func resourceSqlDatabaseInstanceRead(d *schema.ResourceData, meta interface{}) e } var instance *sqladmin.DatabaseInstance - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - instance, rerr = config.NewSqlAdminClient(userAgent).Instances.Get(project, d.Get("name").(string)).Do() - return rerr - }, d.Timeout(schema.TimeoutRead), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + instance, rerr = config.NewSqlAdminClient(userAgent).Instances.Get(project, d.Get("name").(string)).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("SQL Database Instance %q", d.Get("name").(string))) } @@ -1586,10 +1611,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) // Check if the activation policy is being updated. If it is being changed to ALWAYS this should be done first. if d.HasChange("settings.0.activation_policy") && d.Get("settings.0.activation_policy").(string) == "ALWAYS" { instance = &sqladmin.DatabaseInstance{Settings: &sqladmin.Settings{ActivationPolicy: "ALWAYS"}} - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, d.Get("name").(string), instance).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, d.Get("name").(string), instance).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to patch instance settings for %s: %s", instance.Name, err) } @@ -1607,10 +1636,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) // performed with other fields, we first patch database version before updating the rest of the fields. if d.HasChange("database_version") { instance = &sqladmin.DatabaseInstance{DatabaseVersion: databaseVersion} - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, d.Get("name").(string), instance).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, d.Get("name").(string), instance).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to patch instance settings for %s: %s", instance.Name, err) } @@ -1666,7 +1699,10 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) op, err = config.NewSqlAdminClient(userAgent).Users.Update(project, instance, user).Host(host).Name(name).Do() return err } - err = transport_tpg.RetryTimeDuration(updateFunc, d.Timeout(schema.TimeoutUpdate)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: updateFunc, + Timeout: d.Timeout(schema.TimeoutUpdate), + }) if err != nil { if err := d.Set("root_password", oldPwd.(string)); err != nil { @@ -1689,10 +1725,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) // performed with other fields, we first patch maintenance version before updating the rest of the fields. if d.HasChange("maintenance_version") { instance = &sqladmin.DatabaseInstance{MaintenanceVersion: maintenance_version} - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, d.Get("name").(string), instance).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Patch(project, d.Get("name").(string), instance).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to patch instance settings for %s: %s", instance.Name, err) } @@ -1707,10 +1747,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) } if promoteReadReplicaRequired { - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.PromoteReplica(project, d.Get("name").(string)).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.PromoteReplica(project, d.Get("name").(string)).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to promote read replica instance as primary stand-alone %s: %s", instance.Name, err) } @@ -1747,10 +1791,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) instance.InstanceType = d.Get("instance_type").(string) } - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Update(project, d.Get("name").(string), instance).Do() - return rerr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Update(project, d.Get("name").(string), instance).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to update instance settings for %s: %s", instance.Name, err) } @@ -1809,17 +1857,21 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) } var op *sqladmin.Operation - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Delete(project, d.Get("name").(string)).Do() - if rerr != nil { - return rerr - } - err = SqlAdminOperationWaitTime(config, op, project, "Delete Instance", userAgent, d.Timeout(schema.TimeoutDelete)) - if err != nil { - return err - } - return nil - }, d.Timeout(schema.TimeoutDelete), transport_tpg.IsSqlOperationInProgressError, IsSqlInternalError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Delete(project, d.Get("name").(string)).Do() + if rerr != nil { + return rerr + } + err = SqlAdminOperationWaitTime(config, op, project, "Delete Instance", userAgent, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return err + } + return nil + }, + Timeout: d.Timeout(schema.TimeoutDelete), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError, IsSqlInternalError}, + }) if err != nil { return fmt.Errorf("Error, failed to delete instance %s: %s", d.Get("name").(string), err) } @@ -2216,10 +2268,14 @@ func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transp } var op *sqladmin.Operation - err := transport_tpg.RetryTimeDuration(func() (operr error) { - op, operr = config.NewSqlAdminClient(userAgent).Instances.RestoreBackup(project, instanceId, backupRequest).Do() - return operr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsSqlOperationInProgressError) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + op, operr = config.NewSqlAdminClient(userAgent).Instances.RestoreBackup(project, instanceId, backupRequest).Do() + return operr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return fmt.Errorf("Error, failed to restore instance from backup %s: %s", instanceId, err) } diff --git a/mmv1/third_party/terraform/resources/resource_sql_user.go b/mmv1/third_party/terraform/resources/resource_sql_user.go index a698c24ba4df..b32d5f719d9c 100644 --- a/mmv1/third_party/terraform/resources/resource_sql_user.go +++ b/mmv1/third_party/terraform/resources/resource_sql_user.go @@ -123,7 +123,6 @@ func ResourceSqlUser() *schema.Resource { "password_policy": { Type: schema.TypeList, Optional: true, - Computed: true, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ @@ -207,6 +206,7 @@ func expandPasswordPolicy(cfg interface{}) *sqladmin.UserPasswordValidationPolic raw := cfg.([]interface{})[0].(map[string]interface{}) upvp := &sqladmin.UserPasswordValidationPolicy{} + if v, ok := raw["allowed_failed_attempts"]; ok { upvp.AllowedFailedAttempts = int64(v.(int)) } @@ -260,10 +260,14 @@ func resourceSqlUserCreate(d *schema.ResourceData, meta interface{}) error { if v, ok := d.GetOk("host"); ok { if v.(string) != "" { var fetchedInstance *sqladmin.DatabaseInstance - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - fetchedInstance, rerr = config.NewSqlAdminClient(userAgent).Instances.Get(project, instance).Do() - return rerr - }, d.Timeout(schema.TimeoutRead), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + fetchedInstance, rerr = config.NewSqlAdminClient(userAgent).Instances.Get(project, instance).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("SQL Database Instance %q", d.Get("instance").(string))) } @@ -279,7 +283,10 @@ func resourceSqlUserCreate(d *schema.ResourceData, meta interface{}) error { user).Do() return err } - err = transport_tpg.RetryTimeDuration(insertFunc, d.Timeout(schema.TimeoutCreate)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: insertFunc, + Timeout: d.Timeout(schema.TimeoutCreate), + }) if err != nil { return fmt.Errorf("Error, failed to insert "+ @@ -316,17 +323,41 @@ func resourceSqlUserRead(d *schema.ResourceData, meta interface{}) error { name := d.Get("name").(string) host := d.Get("host").(string) - var user *sqladmin.User + var users *sqladmin.UsersListResponse err = nil - err = transport_tpg.RetryTime(func() error { - user, err = config.NewSqlAdminClient(userAgent).Users.Get(project, instance, name).Host(host).Do() - return err - }, 5) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + users, err = config.NewSqlAdminClient(userAgent).Users.List(project, instance).Do() + return err + }, + Timeout: 5 * time.Minute, + }) if err != nil { // move away from transport_tpg.HandleNotFoundError() as we need to handle both 404 and 403 return handleUserNotFoundError(err, d, fmt.Sprintf("SQL User %q in instance %q", name, instance)) } + var user *sqladmin.User + databaseInstance, err := config.NewSqlAdminClient(userAgent).Instances.Get(project, instance).Do() + if err != nil { + return err + } + + for _, currentUser := range users.Items { + if !strings.Contains(databaseInstance.DatabaseVersion, "POSTGRES") { + name = strings.Split(name, "@")[0] + } + + if currentUser.Name == name { + // Host can only be empty for postgres instances, + // so don't compare the host if the API host is empty. + if host == "" || currentUser.Host == host { + user = currentUser + break + } + } + } + if user == nil { log.Printf("[WARN] Removing SQL User %q because it's gone", d.Get("name").(string)) d.SetId("") @@ -435,7 +466,10 @@ func resourceSqlUserUpdate(d *schema.ResourceData, meta interface{}) error { op, err = config.NewSqlAdminClient(userAgent).Users.Update(project, instance, user).Host(host).Name(name).Do() return err } - err = transport_tpg.RetryTimeDuration(updateFunc, d.Timeout(schema.TimeoutUpdate)) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: updateFunc, + Timeout: d.Timeout(schema.TimeoutUpdate), + }) if err != nil { return fmt.Errorf("Error, failed to update"+ @@ -482,17 +516,21 @@ func resourceSqlUserDelete(d *schema.ResourceData, meta interface{}) error { defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) var op *sqladmin.Operation - err = transport_tpg.RetryTimeDuration(func() error { - op, err = config.NewSqlAdminClient(userAgent).Users.Delete(project, instance).Host(host).Name(name).Do() - if err != nil { - return err - } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + op, err = config.NewSqlAdminClient(userAgent).Users.Delete(project, instance).Host(host).Name(name).Do() + if err != nil { + return err + } - if err := SqlAdminOperationWaitTime(config, op, project, "Delete User", userAgent, d.Timeout(schema.TimeoutDelete)); err != nil { - return err - } - return nil - }, d.Timeout(schema.TimeoutDelete), transport_tpg.IsSqlOperationInProgressError, IsSqlInternalError) + if err := SqlAdminOperationWaitTime(config, op, project, "Delete User", userAgent, d.Timeout(schema.TimeoutDelete)); err != nil { + return err + } + return nil + }, + Timeout: d.Timeout(schema.TimeoutDelete), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError, IsSqlInternalError}, + }) if err != nil { return fmt.Errorf("Error, failed to delete"+ diff --git a/mmv1/third_party/terraform/resources/resource_storage_bucket.go.erb b/mmv1/third_party/terraform/resources/resource_storage_bucket.go.erb index dea6ece86fd7..e22075e6ee63 100644 --- a/mmv1/third_party/terraform/resources/resource_storage_bucket.go.erb +++ b/mmv1/third_party/terraform/resources/resource_storage_bucket.go.erb @@ -549,9 +549,11 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error var res *storage.Bucket - err = transport_tpg.Retry(func() error { - res, err = config.NewStorageClient(userAgent).Buckets.Insert(project, sb).Do() - return err + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + res, err = config.NewStorageClient(userAgent).Buckets.Insert(project, sb).Do() + return err + }, }) if err != nil { @@ -564,10 +566,14 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error // There seems to be some eventual consistency errors in some cases, so we want to check a few times // to make sure it exists before moving on - err = transport_tpg.RetryTimeDuration(func() (operr error) { - _, retryErr := config.NewStorageClient(userAgent).Buckets.Get(res.Name).Do() - return retryErr - }, d.Timeout(schema.TimeoutCreate), transport_tpg.IsNotFoundRetryableError("bucket creation")) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, retryErr := config.NewStorageClient(userAgent).Buckets.Get(res.Name).Do() + return retryErr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("bucket creation")}, + }) if err != nil { return fmt.Errorf("Error reading bucket after creation: %s", err) @@ -712,10 +718,14 @@ func resourceStorageBucketUpdate(d *schema.ResourceData, meta interface{}) error // There seems to be some eventual consistency errors in some cases, so we want to check a few times // to make sure it exists before moving on - err = transport_tpg.RetryTimeDuration(func() (operr error) { - _, retryErr := config.NewStorageClient(userAgent).Buckets.Get(res.Name).Do() - return retryErr - }, d.Timeout(schema.TimeoutUpdate), transport_tpg.IsNotFoundRetryableError("bucket update")) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, retryErr := config.NewStorageClient(userAgent).Buckets.Get(res.Name).Do() + return retryErr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("bucket update")}, + }) if err != nil { return fmt.Errorf("Error reading bucket after update: %s", err) @@ -758,11 +768,15 @@ func resourceStorageBucketRead(d *schema.ResourceData, meta interface{}) error { var res *storage.Bucket // There seems to be some eventual consistency errors in some cases, so we want to check a few times // to make sure it exists before moving on - err = transport_tpg.RetryTimeDuration(func() (operr error) { - var retryErr error - res, retryErr = config.NewStorageClient(userAgent).Buckets.Get(bucket).Do() - return retryErr - }, d.Timeout(schema.TimeoutRead), transport_tpg.IsNotFoundRetryableError("bucket read")) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + var retryErr error + res, retryErr = config.NewStorageClient(userAgent).Buckets.Get(bucket).Do() + return retryErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("bucket read")}, + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Storage Bucket %q", d.Get("name").(string))) diff --git a/mmv1/third_party/terraform/resources/resource_storage_bucket_object.go b/mmv1/third_party/terraform/resources/resource_storage_bucket_object.go index b83a47a4379a..a35413c7e634 100644 --- a/mmv1/third_party/terraform/resources/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/resources/resource_storage_bucket_object.go @@ -6,7 +6,6 @@ import ( "io" "log" "os" - "strings" "time" "github.com/hashicorp/terraform-provider-google/google/tpgresource" @@ -171,7 +170,7 @@ func ResourceStorageBucketObject() *schema.Resource { ForceNew: true, Computed: true, ConflictsWith: []string{"customer_encryption"}, - DiffSuppressFunc: compareCryptoKeyVersions, + DiffSuppressFunc: tpgresource.CompareCryptoKeyVersions, Description: `Resource name of the Cloud KMS key that will be used to encrypt the object. Overrides the object metadata's kmsKeyName value, if any.`, }, @@ -256,18 +255,6 @@ func objectGetID(object *storage.Object) string { return object.Bucket + "-" + object.Name } -func compareCryptoKeyVersions(_, old, new string, _ *schema.ResourceData) bool { - // The API can return cryptoKeyVersions even though it wasn't specified. - // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 - - kmsKeyWithoutVersions := strings.Split(old, "/cryptoKeyVersions")[0] - if kmsKeyWithoutVersions == new { - return true - } - - return false -} - func resourceStorageBucketObjectCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) @@ -315,7 +302,7 @@ func resourceStorageBucketObjectCreate(d *schema.ResourceData, meta interface{}) } if v, ok := d.GetOk("metadata"); ok { - object.Metadata = convertStringMap(v.(map[string]interface{})) + object.Metadata = tpgresource.ConvertStringMap(v.(map[string]interface{})) } if v, ok := d.GetOk("storage_class"); ok { diff --git a/mmv1/third_party/terraform/resources/resource_storage_notification.go b/mmv1/third_party/terraform/resources/resource_storage_notification.go index b20ed7a7eba6..5700480efbf8 100644 --- a/mmv1/third_party/terraform/resources/resource_storage_notification.go +++ b/mmv1/third_party/terraform/resources/resource_storage_notification.go @@ -6,6 +6,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "google.golang.org/api/storage/v1" @@ -100,18 +102,18 @@ func resourceStorageNotificationCreate(d *schema.ResourceData, meta interface{}) bucket := d.Get("bucket").(string) topicName := d.Get("topic").(string) - computedTopicName := getComputedTopicName("", topicName) + computedTopicName := pubsub.GetComputedTopicName("", topicName) if computedTopicName != topicName { project, err := tpgresource.GetProject(d, config) if err != nil { return err } - computedTopicName = getComputedTopicName(project, topicName) + computedTopicName = pubsub.GetComputedTopicName(project, topicName) } storageNotification := &storage.Notification{ CustomAttributes: tpgresource.ExpandStringMap(d, "custom_attributes"), - EventTypes: convertStringSet(d.Get("event_types").(*schema.Set)), + EventTypes: tpgresource.ConvertStringSet(d.Get("event_types").(*schema.Set)), ObjectNamePrefix: d.Get("object_name_prefix").(string), PayloadFormat: d.Get("payload_format").(string), Topic: computedTopicName, diff --git a/mmv1/third_party/terraform/resources/resource_storage_object_acl.go b/mmv1/third_party/terraform/resources/resource_storage_object_acl.go index 83c06e4bdb6c..fd462d1cc460 100644 --- a/mmv1/third_party/terraform/resources/resource_storage_object_acl.go +++ b/mmv1/third_party/terraform/resources/resource_storage_object_acl.go @@ -161,7 +161,7 @@ func resourceStorageObjectAclCreate(d *schema.ResourceData, meta interface{}) er return fmt.Errorf("Error reading object %s in %s: %v", object, bucket, err) } - create, update, remove, err := getRoleEntityChange(roleEntitiesUpstream, convertStringSet(reMap), objectOwner) + create, update, remove, err := getRoleEntityChange(roleEntitiesUpstream, tpgresource.ConvertStringSet(reMap), objectOwner) if err != nil { return fmt.Errorf("Error reading object %s in %s. Invalid schema: %v", object, bucket, err) } @@ -243,8 +243,8 @@ func resourceStorageObjectAclUpdate(d *schema.ResourceData, meta interface{}) er o, n := d.GetChange("role_entity") create, update, remove, err := getRoleEntityChange( - convertStringSet(o.(*schema.Set)), - convertStringSet(n.(*schema.Set)), + tpgresource.ConvertStringSet(o.(*schema.Set)), + tpgresource.ConvertStringSet(n.(*schema.Set)), objectOwner) if err != nil { return fmt.Errorf("Error reading object %s in %s. Invalid schema: %v", object, bucket, err) diff --git a/mmv1/third_party/terraform/resources/resource_storage_transfer_job.go b/mmv1/third_party/terraform/resources/resource_storage_transfer_job.go index 9bdc48a46da7..a01cdeb8b97d 100644 --- a/mmv1/third_party/terraform/resources/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/resources/resource_storage_transfer_job.go @@ -571,9 +571,11 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) var res *storagetransfer.TransferJob - err = transport_tpg.Retry(func() error { - res, err = config.NewStorageTransferClient(userAgent).TransferJobs.Create(transferJob).Do() - return err + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + res, err = config.NewStorageTransferClient(userAgent).TransferJobs.Create(transferJob).Do() + return err + }, }) if err != nil { diff --git a/mmv1/third_party/terraform/services/appengine/appengine_operation.go b/mmv1/third_party/terraform/services/appengine/appengine_operation.go new file mode 100644 index 000000000000..6efb479f39c3 --- /dev/null +++ b/mmv1/third_party/terraform/services/appengine/appengine_operation.go @@ -0,0 +1,74 @@ +package appengine + +import ( + "encoding/json" + "fmt" + "regexp" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" + + "google.golang.org/api/appengine/v1" +) + +var ( + appEngineOperationIdRegexp = regexp.MustCompile(fmt.Sprintf("apps/%s/operations/(.*)", verify.ProjectRegex)) +) + +type AppEngineOperationWaiter struct { + Service *appengine.APIService + AppId string + tpgresource.CommonOperationWaiter +} + +func (w *AppEngineOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + matches := appEngineOperationIdRegexp.FindStringSubmatch(w.Op.Name) + if len(matches) != 2 { + return nil, fmt.Errorf("Expected %d results of parsing operation name, got %d from %s", 2, len(matches), w.Op.Name) + } + return w.Service.Apps.Operations.Get(w.AppId, matches[1]).Do() +} + +func AppEngineOperationWaitTimeWithResponse(config *transport_tpg.Config, res interface{}, response *map[string]interface{}, appId, activity, userAgent string, timeout time.Duration) error { + op := &appengine.Operation{} + err := tpgresource.Convert(res, op) + if err != nil { + return err + } + + w := &AppEngineOperationWaiter{ + Service: config.NewAppEngineClient(userAgent), + AppId: appId, + } + + if err := w.SetOp(op); err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func AppEngineOperationWaitTime(config *transport_tpg.Config, res interface{}, appId, activity, userAgent string, timeout time.Duration) error { + op := &appengine.Operation{} + err := tpgresource.Convert(res, op) + if err != nil { + return err + } + + w := &AppEngineOperationWaiter{ + Service: config.NewAppEngineClient(userAgent), + AppId: appId, + } + + if err := w.SetOp(op); err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_artifact_registry_repository.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repository.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_artifact_registry_repository.go rename to mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repository.go index 1a27ad583370..09c832b4aff9 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_artifact_registry_repository.go +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_repository.go @@ -1,4 +1,4 @@ -package google +package artifactregistry import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_connection.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_connection.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_connection.go rename to mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_connection.go index 3614b39db46a..fef0cf119493 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_connection.go +++ b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_connection.go @@ -1,4 +1,4 @@ -package google +package beyondcorp import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_connector.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_connector.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_connector.go rename to mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_connector.go index f526953951ec..202941b1448c 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_connector.go +++ b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_connector.go @@ -1,4 +1,4 @@ -package google +package beyondcorp import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_gateway.go b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_gateway.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_gateway.go rename to mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_gateway.go index 6bd313f1def8..a713c74e6a25 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_beyondcorp_app_gateway.go +++ b/mmv1/third_party/terraform/services/beyondcorp/data_source_google_beyondcorp_app_gateway.go @@ -1,4 +1,4 @@ -package google +package beyondcorp import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_cloudbuild_trigger.go b/mmv1/third_party/terraform/services/cloudbuild/data_source_google_cloudbuild_trigger.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_cloudbuild_trigger.go rename to mmv1/third_party/terraform/services/cloudbuild/data_source_google_cloudbuild_trigger.go index 02fe54b1b460..499f4930dcb4 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_cloudbuild_trigger.go +++ b/mmv1/third_party/terraform/services/cloudbuild/data_source_google_cloudbuild_trigger.go @@ -1,4 +1,4 @@ -package google +package cloudbuild import ( "fmt" diff --git a/mmv1/third_party/terraform/services/cloudfunctions/cloudfunctions_operation.go b/mmv1/third_party/terraform/services/cloudfunctions/cloudfunctions_operation.go new file mode 100644 index 000000000000..869e5317126c --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudfunctions/cloudfunctions_operation.go @@ -0,0 +1,41 @@ +package cloudfunctions + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/cloudfunctions/v1" +) + +type CloudFunctionsOperationWaiter struct { + Service *cloudfunctions.Service + tpgresource.CommonOperationWaiter +} + +func (w *CloudFunctionsOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + return w.Service.Operations.Get(w.Op.Name).Do() +} + +func CloudFunctionsOperationWait(config *transport_tpg.Config, op *cloudfunctions.Operation, activity, userAgent string, timeout time.Duration) error { + w := &CloudFunctionsOperationWaiter{ + Service: config.NewCloudFunctionsClient(userAgent), + } + if err := w.SetOp(op); err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} + +func IsCloudFunctionsSourceCodeError(err error) (bool, string) { + if operr, ok := err.(*tpgresource.CommonOpError); ok { + if operr.Code == 3 && operr.Message == "Failed to retrieve function source code" { + return true, fmt.Sprintf("Retry on Function failing to pull code from GCS") + } + } + return false, "" +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_cloudfunctions2_function.go b/mmv1/third_party/terraform/services/cloudfunctions2/data_source_google_cloudfunctions2_function.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_cloudfunctions2_function.go rename to mmv1/third_party/terraform/services/cloudfunctions2/data_source_google_cloudfunctions2_function.go index a2c6c4a69f1c..f340b7608cc6 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_cloudfunctions2_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions2/data_source_google_cloudfunctions2_function.go @@ -1,4 +1,4 @@ -package google +package cloudfunctions2 import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/cloud_identity_group_membership_utils.go b/mmv1/third_party/terraform/services/cloudidentity/cloud_identity_group_membership_utils.go similarity index 97% rename from mmv1/third_party/terraform/utils/cloud_identity_group_membership_utils.go rename to mmv1/third_party/terraform/services/cloudidentity/cloud_identity_group_membership_utils.go index 3fcbdba08f77..5be759238564 100644 --- a/mmv1/third_party/terraform/utils/cloud_identity_group_membership_utils.go +++ b/mmv1/third_party/terraform/services/cloudidentity/cloud_identity_group_membership_utils.go @@ -1,4 +1,4 @@ -package google +package cloudidentity import ( "log" diff --git a/mmv1/third_party/terraform/data_sources/data_source_cloud_identity_group_memberships.go.erb b/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_memberships.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_cloud_identity_group_memberships.go.erb rename to mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_memberships.go.erb index 63f1839640fa..14eb5dceb0d8 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_cloud_identity_group_memberships.go.erb +++ b/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_group_memberships.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package cloudidentity import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_cloud_identity_groups.go.erb b/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_groups.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_cloud_identity_groups.go.erb rename to mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_groups.go.erb index 346c3fdd2c13..848366d3e4eb 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_cloud_identity_groups.go.erb +++ b/mmv1/third_party/terraform/services/cloudidentity/data_source_cloud_identity_groups.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package cloudidentity import ( "fmt" diff --git a/mmv1/third_party/terraform/services/cloudrun/cloudrun_polling.go b/mmv1/third_party/terraform/services/cloudrun/cloudrun_polling.go new file mode 100644 index 000000000000..67addac5d56b --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudrun/cloudrun_polling.go @@ -0,0 +1,90 @@ +package cloudrun + +import ( + "fmt" + "log" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +const readyStatusType string = "Ready" +const pendingCertificateReason string = "CertificatePending" + +type Condition struct { + Type string + Status string + Reason string + Message string +} + +// KnativeStatus is a struct that can contain a Knative style resource's Status block. It is not +// intended to be used for anything other than polling for the success of the given resource. +type KnativeStatus struct { + Metadata struct { + Name string + Namespace string + SelfLink string + } + Status struct { + Conditions []Condition + ObservedGeneration float64 + } +} + +func getGeneration(res map[string]interface{}) (int, error) { + metadata, ok := res["metadata"] + if !ok { + return 0, fmt.Errorf("Unable to find knative metadata") + } + m, ok := metadata.(map[string]interface{}) + if !ok { + return 0, fmt.Errorf("Unable to find generation in knative metadata") + } + gen, ok := m["generation"] + if !ok { + return 0, fmt.Errorf("Unable to find generation in knative metadata") + } + return int(gen.(float64)), nil +} + +func PollCheckKnativeStatusFunc(knativeRestResponse map[string]interface{}) func(resp map[string]interface{}, respErr error) transport_tpg.PollResult { + return func(resp map[string]interface{}, respErr error) transport_tpg.PollResult { + if respErr != nil { + return transport_tpg.ErrorPollResult(respErr) + } + s := KnativeStatus{} + if err := tpgresource.Convert(resp, &s); err != nil { + return transport_tpg.ErrorPollResult(errwrap.Wrapf("unable to get KnativeStatus: {{err}}", err)) + } + + gen, err := getGeneration(knativeRestResponse) + if err != nil { + return transport_tpg.ErrorPollResult(errwrap.Wrapf("unable to find Knative generation: {{err}}", err)) + } + if int(s.Status.ObservedGeneration) < gen { + return transport_tpg.PendingStatusPollResult("waiting for observed generation to match") + } + for _, condition := range s.Status.Conditions { + if condition.Type == readyStatusType { + log.Printf("[DEBUG] checking KnativeStatus Ready condition %s: %s", condition.Status, condition.Message) + switch condition.Status { + case "True": + // Resource is ready + return transport_tpg.SuccessPollResult() + case "Unknown": + // DomainMapping can enter a 'terminal' state where "Ready" status is "Unknown" + // but the resource is waiting for external verification of DNS records. + if condition.Reason == pendingCertificateReason { + return transport_tpg.SuccessPollResult() + } + return transport_tpg.PendingStatusPollResult(fmt.Sprintf("%s:%s", condition.Status, condition.Message)) + case "False": + return transport_tpg.ErrorPollResult(fmt.Errorf(`resource is in failed state "Ready:False", message: %s`, condition.Message)) + } + } + } + return transport_tpg.PendingStatusPollResult("no status yet") + } +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_cloud_run_service.go b/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_cloud_run_service.go rename to mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go index dc2480c652a7..f689e97107e4 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_cloud_run_service.go +++ b/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go @@ -1,4 +1,4 @@ -package google +package cloudrun import ( "fmt" diff --git a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_internal_test.go b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_internal_test.go new file mode 100644 index 000000000000..3a429448609c --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_internal_test.go @@ -0,0 +1,38 @@ +package cloudrun + +import ( + "testing" +) + +func TestCloudrunAnnotationDiffSuppress(t *testing.T) { + cases := map[string]struct { + K, Old, New string + ExpectDiffSuppress bool + }{ + "missing run.googleapis.com/operation-id": { + K: "metadata.0.annotations.run.googleapis.com/operation-id", + Old: "12345abc", + New: "", + ExpectDiffSuppress: true, + }, + "missing run.googleapis.com/ingress": { + K: "metadata.0.annotations.run.googleapis.com/ingress", + Old: "all", + New: "", + ExpectDiffSuppress: true, + }, + "explicit run.googleapis.com/ingress": { + K: "metadata.0.annotations.run.googleapis.com/ingress", + Old: "all", + New: "internal", + ExpectDiffSuppress: false, + }, + } + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + if got := cloudrunAnnotationDiffSuppress(tc.K, tc.Old, tc.New, nil); got != tc.ExpectDiffSuppress { + t.Errorf("got %t; want %t", got, tc.ExpectDiffSuppress) + } + }) + } +} diff --git a/mmv1/third_party/terraform/services/cloudrunv2/runadminv3_operation.go b/mmv1/third_party/terraform/services/cloudrunv2/runadminv3_operation.go new file mode 100644 index 000000000000..88c15ea639d5 --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudrunv2/runadminv3_operation.go @@ -0,0 +1,68 @@ +package cloudrunv2 + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "google.golang.org/api/run/v2" +) + +type RunAdminV2OperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + tpgresource.CommonOperationWaiter +} + +func (w *RunAdminV2OperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + url := fmt.Sprintf("%s%s", w.Config.CloudRunV2BasePath, w.CommonOperationWaiter.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createRunAdminV2Waiter(config *transport_tpg.Config, op *run.GoogleLongrunningOperation, project, activity, userAgent string) (*RunAdminV2OperationWaiter, error) { + w := &RunAdminV2OperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +func RunAdminV2OperationWaitTimeWithResponse(config *transport_tpg.Config, op *run.GoogleLongrunningOperation, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createRunAdminV2Waiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func RunAdminV2OperationWaitTime(config *transport_tpg.Config, op *run.GoogleLongrunningOperation, project, activity, userAgent string, timeout time.Duration) error { + if op.Done { + return nil + } + w, err := createRunAdminV2Waiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_internal_test.go b/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_internal_test.go new file mode 100644 index 000000000000..b6da157ce108 --- /dev/null +++ b/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_internal_test.go @@ -0,0 +1,96 @@ +package cloudscheduler + +import ( + "reflect" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestCloudScheduler_FlattenHttpHeaders(t *testing.T) { + + cases := []struct { + Input map[string]interface{} + Output map[string]interface{} + }{ + // simple, no headers included + { + Input: map[string]interface{}{ + "My-Header": "my-header-value", + }, + Output: map[string]interface{}{ + "My-Header": "my-header-value", + }, + }, + + // include the User-Agent header value Google-Cloud-Scheduler + // Tests Removing User-Agent header + { + Input: map[string]interface{}{ + "User-Agent": "Google-Cloud-Scheduler", + "My-Header": "my-header-value", + }, + Output: map[string]interface{}{ + "My-Header": "my-header-value", + }, + }, + + // include the User-Agent header + // Tests removing value AppEngine-Google; (+http://code.google.com/appengine) + { + Input: map[string]interface{}{ + "User-Agent": "My-User-Agent AppEngine-Google; (+http://code.google.com/appengine)", + "My-Header": "my-header-value", + }, + Output: map[string]interface{}{ + "User-Agent": "My-User-Agent", + "My-Header": "my-header-value", + }, + }, + + // include the Content-Type header value application/octet-stream. + // Tests Removing Content-Type header + { + Input: map[string]interface{}{ + "Content-Type": "application/octet-stream", + "My-Header": "my-header-value", + }, + Output: map[string]interface{}{ + "My-Header": "my-header-value", + }, + }, + + // include the Content-Length header + // Tests Removing Content-Length header + { + Input: map[string]interface{}{ + "Content-Length": 7, + "My-Header": "my-header-value", + }, + Output: map[string]interface{}{ + "My-Header": "my-header-value", + }, + }, + + // include the X-Google- header + // Tests Removing X-Google- header + { + Input: map[string]interface{}{ + "X-Google-My-Header": "x-google-my-header-value", + "My-Header": "my-header-value", + }, + Output: map[string]interface{}{ + "My-Header": "my-header-value", + }, + }, + } + + for _, c := range cases { + d := &schema.ResourceData{} + output := flattenCloudSchedulerJobAppEngineHttpTargetHeaders(c.Input, d, &transport_tpg.Config{}) + if !reflect.DeepEqual(output, c.Output) { + t.Fatalf("Error matching output and expected: %#v vs %#v", output, c.Output) + } + } +} diff --git a/mmv1/third_party/terraform/services/composer/composer_operation.go.erb b/mmv1/third_party/terraform/services/composer/composer_operation.go.erb new file mode 100644 index 000000000000..b8a5914b56f6 --- /dev/null +++ b/mmv1/third_party/terraform/services/composer/composer_operation.go.erb @@ -0,0 +1,38 @@ +<% autogen_exception -%> +package composer + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + +<% if version == "ga" -%> + "google.golang.org/api/composer/v1" +<% else -%> + "google.golang.org/api/composer/v1beta1" +<% end -%> +) + +type ComposerOperationWaiter struct { + Service *composer.ProjectsLocationsService + tpgresource.CommonOperationWaiter +} + +func (w *ComposerOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + return w.Service.Operations.Get(w.Op.Name).Do() +} + +func ComposerOperationWaitTime(config *transport_tpg.Config, op *composer.Operation, project, activity, userAgent string, timeout time.Duration) error { + w := &ComposerOperationWaiter{ + Service: config.NewComposerClient(userAgent).Projects.Locations, + } + if err := w.SetOp(op); err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/utils/compute_backend_service_helpers.go.erb b/mmv1/third_party/terraform/services/compute/compute_backend_service_helpers.go.erb similarity index 97% rename from mmv1/third_party/terraform/utils/compute_backend_service_helpers.go.erb rename to mmv1/third_party/terraform/services/compute/compute_backend_service_helpers.go.erb index 923daf4f7f8d..5be38c1a8310 100644 --- a/mmv1/third_party/terraform/utils/compute_backend_service_helpers.go.erb +++ b/mmv1/third_party/terraform/services/compute/compute_backend_service_helpers.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( <% if version == "ga" -%> diff --git a/mmv1/third_party/terraform/utils/compute_instance_helpers.go.erb b/mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.erb similarity index 98% rename from mmv1/third_party/terraform/utils/compute_instance_helpers.go.erb rename to mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.erb index ef2850d73150..9e13f868a745 100644 --- a/mmv1/third_party/terraform/utils/compute_instance_helpers.go.erb +++ b/mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -396,7 +396,7 @@ func expandServiceAccounts(configs []interface{}) []*compute.ServiceAccount { accounts[i] = &compute.ServiceAccount{ Email: data["email"].(string), - Scopes: tpgresource.CanonicalizeServiceScopes(convertStringSet(data["scopes"].(*schema.Set))), + Scopes: tpgresource.CanonicalizeServiceScopes(tpgresource.ConvertStringSet(data["scopes"].(*schema.Set))), } if accounts[i].Email == "" { @@ -591,8 +591,8 @@ func hasNodeAffinitiesChanged(oScheduling, newScheduling map[string]interface{}) return true } - // convertStringSet will sort the set into a slice, allowing DeepEqual - if !reflect.DeepEqual(convertStringSet(oldNodeAffinity["values"].(*schema.Set)), convertStringSet(newNodeAffinity["values"].(*schema.Set))) { + // ConvertStringSet will sort the set into a slice, allowing DeepEqual + if !reflect.DeepEqual(tpgresource.ConvertStringSet(oldNodeAffinity["values"].(*schema.Set)), tpgresource.ConvertStringSet(newNodeAffinity["values"].(*schema.Set))) { return true } } diff --git a/mmv1/third_party/terraform/utils/compute_instance_network_interface_helpers.go.erb b/mmv1/third_party/terraform/services/compute/compute_instance_network_interface_helpers.go.erb similarity index 99% rename from mmv1/third_party/terraform/utils/compute_instance_network_interface_helpers.go.erb rename to mmv1/third_party/terraform/services/compute/compute_instance_network_interface_helpers.go.erb index 37ef212f65c6..b80b47ee7818 100644 --- a/mmv1/third_party/terraform/utils/compute_instance_network_interface_helpers.go.erb +++ b/mmv1/third_party/terraform/services/compute/compute_instance_network_interface_helpers.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/services/compute/compute_operation.go.erb b/mmv1/third_party/terraform/services/compute/compute_operation.go.erb new file mode 100644 index 000000000000..faeb0c5ef0b7 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/compute_operation.go.erb @@ -0,0 +1,214 @@ +<% autogen_exception -%> +package compute + +import ( + "bytes" + "context" +<% unless version == 'ga' -%> + "encoding/json" +<% end -%> + "errors" + "fmt" + "io" + "log" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + +<% if version == "ga" -%> + "google.golang.org/api/compute/v1" +<% else -%> + compute "google.golang.org/api/compute/v0.beta" +<% end -%> +) + +type ComputeOperationWaiter struct { + Service *compute.Service + Op *compute.Operation + Context context.Context + Project string +<% unless version == 'ga' -%> + Parent string +<% end -%> +} + +func (w *ComputeOperationWaiter) State() string { + if w == nil || w.Op == nil { + return "" + } + + return w.Op.Status +} + +func (w *ComputeOperationWaiter) Error() error { + if w != nil && w.Op != nil && w.Op.Error != nil { + return ComputeOperationError(*w.Op.Error) + } + return nil +} + +func (w *ComputeOperationWaiter) IsRetryable(err error) bool { + if oe, ok := err.(ComputeOperationError); ok { + for _, e := range oe.Errors { + if e.Code == "RESOURCE_NOT_READY" { + return true + } + } + } + return false +} + +func (w *ComputeOperationWaiter) SetOp(op interface{}) error { + var ok bool + w.Op, ok = op.(*compute.Operation) + if !ok { + return fmt.Errorf("Unable to set operation. Bad type!") + } + return nil +} + +func (w *ComputeOperationWaiter) QueryOp() (interface{}, error) { + if w == nil || w.Op == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + if w.Context != nil { + select { + case <-w.Context.Done(): + log.Println("[WARN] request has been cancelled early") + return w.Op, errors.New("unable to finish polling, context has been cancelled") + default: + // default must be here to keep the previous case from blocking + } + } + if w.Op.Zone != "" { + zone := tpgresource.GetResourceNameFromSelfLink(w.Op.Zone) + return w.Service.ZoneOperations.Get(w.Project, zone, w.Op.Name).Do() + } else if w.Op.Region != "" { + region := tpgresource.GetResourceNameFromSelfLink(w.Op.Region) + return w.Service.RegionOperations.Get(w.Project, region, w.Op.Name).Do() +<% unless version == 'ga' -%> + } else if w.Parent != "" { + return w.Service.GlobalOrganizationOperations.Get(w.Op.Name).ParentId(w.Parent).Do() +<% end -%> + } + return w.Service.GlobalOperations.Get(w.Project, w.Op.Name).Do() +} + +func (w *ComputeOperationWaiter) OpName() string { + if w == nil || w.Op == nil { + return " Compute Op" + } + + return w.Op.Name +} + +func (w *ComputeOperationWaiter) PendingStates() []string { + return []string{"PENDING", "RUNNING"} +} + +func (w *ComputeOperationWaiter) TargetStates() []string { + return []string{"DONE"} +} + +func ComputeOperationWaitTime(config *transport_tpg.Config, res interface{}, project, activity, userAgent string, timeout time.Duration) error { + op := &compute.Operation{} + err := tpgresource.Convert(res, op) + if err != nil { + return err + } + + w := &ComputeOperationWaiter{ + Service: config.NewComputeClient(userAgent), + Context: config.Context, + Op: op, + Project: project, + } + + if err := w.SetOp(op); err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} + +<% unless version == 'ga' -%> +func ComputeOrgOperationWaitTimeWithResponse(config *transport_tpg.Config, res interface{}, response *map[string]interface{}, parent, activity, userAgent string, timeout time.Duration) error { + op := &compute.Operation{} + err := tpgresource.Convert(res, op) + if err != nil { + return err + } + + w := &ComputeOperationWaiter{ + Service: config.NewComputeClient(userAgent), + Op: op, + Parent: parent, + } + + if err := w.SetOp(op); err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + e, err := json.Marshal(w.Op) + if err != nil { + return err + } + return json.Unmarshal(e, response) +} + +<% end -%> + +// ComputeOperationError wraps compute.OperationError and implements the +// error interface so it can be returned. +type ComputeOperationError compute.OperationError + +func (e ComputeOperationError) Error() string { + buf := bytes.NewBuffer(nil) + for _, err := range e.Errors { + writeOperationError(buf, err) + } + + return buf.String() +} + +const errMsgSep = "\n\n" + +func writeOperationError(w io.StringWriter, opError *compute.OperationErrorErrors) { + w.WriteString(opError.Message + "\n") + + var lm *compute.LocalizedMessage + var link *compute.HelpLink + + for _, ed := range opError.ErrorDetails { + if lm == nil && ed.LocalizedMessage != nil { + lm = ed.LocalizedMessage + } + + if link == nil && ed.Help != nil && len(ed.Help.Links) > 0 { + link = ed.Help.Links[0] + } + + if lm != nil && link != nil { + break + } + } + + if lm != nil && lm.Message != "" { + w.WriteString(errMsgSep) + w.WriteString(lm.Message + "\n") + } + + if link != nil { + w.WriteString(errMsgSep) + + if link.Description != "" { + w.WriteString(link.Description + "\n") + } + + if link.Url != "" { + w.WriteString(link.Url + "\n") + } + } +} diff --git a/mmv1/third_party/terraform/utils/compute_operation_test.go.erb b/mmv1/third_party/terraform/services/compute/compute_operation_test.go.erb similarity index 99% rename from mmv1/third_party/terraform/utils/compute_operation_test.go.erb rename to mmv1/third_party/terraform/services/compute/compute_operation_test.go.erb index f99e4ea1d876..14a9eb76fe13 100644 --- a/mmv1/third_party/terraform/utils/compute_operation_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/compute_operation_test.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_compute_health_check.go b/mmv1/third_party/terraform/services/compute/data_source_compute_health_check.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_compute_health_check.go rename to mmv1/third_party/terraform/services/compute/data_source_compute_health_check.go index defc9d164ef2..e47066ea9b26 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_compute_health_check.go +++ b/mmv1/third_party/terraform/services/compute/data_source_compute_health_check.go @@ -1,4 +1,4 @@ -package google +package compute import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_compute_network_endpoint_group.go b/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_compute_network_endpoint_group.go rename to mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group.go index 05d0cac3f28c..e12c2958b4d5 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_compute_network_endpoint_group.go +++ b/mmv1/third_party/terraform/services/compute/data_source_compute_network_endpoint_group.go @@ -1,4 +1,4 @@ -package google +package compute import ( "errors" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_backend_bucket.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_backend_bucket.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_backend_bucket.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_backend_bucket.go index c682ded7821d..b56c80f7c5bc 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_backend_bucket.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_backend_bucket.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_backend_service.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_backend_service.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_backend_service.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_backend_service.go index ca08717c3b84..eac09d83ae6a 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_backend_service.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_backend_service.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_disk.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_disk.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_disk.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_disk.go index dec5d2e68a17..9b660c8989e5 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_disk.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_disk.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_forwarding_rule.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_forwarding_rule.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_forwarding_rule.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_forwarding_rule.go index 90cd81bab249..9dc06385eb78 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_forwarding_rule.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_forwarding_rule.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_ha_vpn_gateway.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_ha_vpn_gateway.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_ha_vpn_gateway.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_ha_vpn_gateway.go index d4a0feb10046..a5408b608dea 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_ha_vpn_gateway.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_ha_vpn_gateway.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_instance.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.erb index ef4ee005e798..19a49a5b4410 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_group.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_group.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group.go index d0ffaea73003..7be1e9c072fe 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_group.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group.go @@ -1,4 +1,4 @@ -package google +package compute import ( "errors" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_group_manager.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_manager.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_group_manager.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_manager.go index 30249c6ff7f5..3f311cf4af05 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_group_manager.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_group_manager.go @@ -1,4 +1,4 @@ -package google +package compute import ( "errors" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_template.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_template.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_template.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_template.go.erb index 420c2ee02dc2..ee82da732187 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_instance_template.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_instance_template.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_instance_group.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_region_instance_group.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group.go.erb index 65492d364ccf..43c8499ce26b 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_instance_group.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_group.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_instance_template.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_template.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_region_instance_template.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_template.go.erb index 98f924b8a95c..55bba9c66a69 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_instance_template.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_instance_template.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute <% unless version == 'ga' -%> diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_network_endpoint_group.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_network_endpoint_group.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_region_network_endpoint_group.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_region_network_endpoint_group.go index 6b744aee615e..0147cd280d3d 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_network_endpoint_group.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_network_endpoint_group.go @@ -1,4 +1,4 @@ -package google +package compute import ( "errors" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_ssl_certificate.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_ssl_certificate.go similarity index 90% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_region_ssl_certificate.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_region_ssl_certificate.go index 881813fd5fb0..a42e9bc85491 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_region_ssl_certificate.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_region_ssl_certificate.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" @@ -28,7 +28,7 @@ func DataSourceGoogleRegionComputeSslCertificate() *schema.Resource { func dataSourceComputeRegionSslCertificateRead(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - project, region, name, err := GetRegionalResourcePropertiesFromSelfLinkOrSchema(d, config) + project, region, name, err := tpgresource.GetRegionalResourcePropertiesFromSelfLinkOrSchema(d, config) if err != nil { return err } diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_resource_policy.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.erb similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_resource_policy.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.erb index b7f359e929c1..3485a7051fe3 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_resource_policy.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_resource_policy.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_router.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_router.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_router.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_router.go index fb097f3033ce..01f1ab0218e6 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_router.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_router.go @@ -1,4 +1,4 @@ -package google +package compute import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_router_nat.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_router_nat.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_router_nat.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_router_nat.go index a5b04d7fac4c..b9f8fd1dae48 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_router_nat.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_router_nat.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_router_status.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_router_status.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_router_status.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_router_status.go.erb index cb92e562e548..19b725cbd6a6 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_router_status.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_router_status.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_snapshot.go.erb b/mmv1/third_party/terraform/services/compute/data_source_google_compute_snapshot.go.erb similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_snapshot.go.erb rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_snapshot.go.erb index 0e1f590f6cf0..ea1d893715c2 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_snapshot.go.erb +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_snapshot.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_ssl_certificate.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_ssl_certificate.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_ssl_certificate.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_ssl_certificate.go index aa780e7737c2..20e9bf9d4110 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_ssl_certificate.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_ssl_certificate.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_compute_ssl_policy.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_ssl_policy.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_compute_ssl_policy.go rename to mmv1/third_party/terraform/services/compute/data_source_google_compute_ssl_policy.go index 95913582d185..8710928be8e4 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_compute_ssl_policy.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_ssl_policy.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_global_compute_forwarding_rule.go b/mmv1/third_party/terraform/services/compute/data_source_google_global_compute_forwarding_rule.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_global_compute_forwarding_rule.go rename to mmv1/third_party/terraform/services/compute/data_source_google_global_compute_forwarding_rule.go index 377384d15079..1b25c01babef 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_global_compute_forwarding_rule.go +++ b/mmv1/third_party/terraform/services/compute/data_source_google_global_compute_forwarding_rule.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/disk_type.go b/mmv1/third_party/terraform/services/compute/disk_type.go similarity index 97% rename from mmv1/third_party/terraform/utils/disk_type.go rename to mmv1/third_party/terraform/services/compute/disk_type.go index a8bbcf8d66ab..d9008b2e86c2 100644 --- a/mmv1/third_party/terraform/utils/disk_type.go +++ b/mmv1/third_party/terraform/services/compute/disk_type.go @@ -1,4 +1,4 @@ -package google +package compute import ( "github.com/hashicorp/terraform-provider-google/google/tpgresource" diff --git a/mmv1/third_party/terraform/services/compute/image.go b/mmv1/third_party/terraform/services/compute/image.go new file mode 100644 index 000000000000..063c7e4cd167 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/image.go @@ -0,0 +1,247 @@ +package compute + +import ( + "fmt" + "regexp" + "strings" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" + + "google.golang.org/api/googleapi" +) + +const ( + resolveImageFamilyRegex = "[-_a-zA-Z0-9]*" + resolveImageImageRegex = "[-_a-zA-Z0-9]*" +) + +var ( + resolveImageProjectImage = regexp.MustCompile(fmt.Sprintf("projects/(%s)/global/images/(%s)$", verify.ProjectRegex, resolveImageImageRegex)) + resolveImageProjectFamily = regexp.MustCompile(fmt.Sprintf("projects/(%s)/global/images/family/(%s)$", verify.ProjectRegex, resolveImageFamilyRegex)) + resolveImageGlobalImage = regexp.MustCompile(fmt.Sprintf("^global/images/(%s)$", resolveImageImageRegex)) + resolveImageGlobalFamily = regexp.MustCompile(fmt.Sprintf("^global/images/family/(%s)$", resolveImageFamilyRegex)) + resolveImageFamilyFamily = regexp.MustCompile(fmt.Sprintf("^family/(%s)$", resolveImageFamilyRegex)) + resolveImageProjectImageShorthand = regexp.MustCompile(fmt.Sprintf("^(%s)/(%s)$", verify.ProjectRegex, resolveImageImageRegex)) + resolveImageProjectFamilyShorthand = regexp.MustCompile(fmt.Sprintf("^(%s)/(%s)$", verify.ProjectRegex, resolveImageFamilyRegex)) + resolveImageFamily = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageFamilyRegex)) + resolveImageImage = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageImageRegex)) + resolveImageLink = regexp.MustCompile(fmt.Sprintf("^https://www.googleapis.com/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", verify.ProjectRegex, resolveImageImageRegex)) + + windowsSqlImage = regexp.MustCompile("^sql-(?:server-)?([0-9]{4})-([a-z]+)-windows-(?:server-)?([0-9]{4})(?:-r([0-9]+))?-dc-v[0-9]+$") + canonicalUbuntuLtsImage = regexp.MustCompile("^ubuntu-(minimal-)?([0-9]+)(?:.*(arm64))?.*$") + cosLtsImage = regexp.MustCompile("^cos-([0-9]+)-") +) + +// built-in projects to look for images/families containing the string +// on the left in +var ImageMap = map[string]string{ + "centos": "centos-cloud", + "coreos": "coreos-cloud", + "debian": "debian-cloud", + "opensuse": "opensuse-cloud", + "rhel": "rhel-cloud", + "rocky-linux": "rocky-linux-cloud", + "sles": "suse-cloud", + "ubuntu": "ubuntu-os-cloud", + "windows": "windows-cloud", + "windows-sql": "windows-sql-cloud", +} + +func resolveImageImageExists(c *transport_tpg.Config, project, name, userAgent string) (bool, error) { + if _, err := c.NewComputeClient(userAgent).Images.Get(project, name).Do(); err == nil { + return true, nil + } else if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return false, nil + } else { + return false, fmt.Errorf("Error checking if image %s exists: %s", name, err) + } +} + +func resolveImageFamilyExists(c *transport_tpg.Config, project, name, userAgent string) (bool, error) { + if _, err := c.NewComputeClient(userAgent).Images.GetFromFamily(project, name).Do(); err == nil { + return true, nil + } else if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return false, nil + } else { + return false, fmt.Errorf("Error checking if family %s exists: %s", name, err) + } +} + +func sanityTestRegexMatches(expected int, got []string, regexType, name string) error { + if len(got)-1 != expected { // subtract one, index zero is the entire matched expression + return fmt.Errorf("Expected %d %s regex matches, got %d for %s", expected, regexType, len(got)-1, name) + } + return nil +} + +// If the given name is a URL, return it. +// If it's in the form projects/{project}/global/images/{image}, return it +// If it's in the form projects/{project}/global/images/family/{family}, return it +// If it's in the form global/images/{image}, return it +// If it's in the form global/images/family/{family}, return it +// If it's in the form family/{family}, check if it's a family in the current project. If it is, return it as global/images/family/{family}. +// +// If not, check if it could be a GCP-provided family, and if it exists. If it does, return it as projects/{project}/global/images/family/{family}. +// +// If it's in the form {project}/{family-or-image}, check if it's an image in the named project. If it is, return it as projects/{project}/global/images/{image}. +// +// If not, check if it's a family in the named project. If it is, return it as projects/{project}/global/images/family/{family}. +// +// If it's in the form {family-or-image}, check if it's an image in the current project. If it is, return it as global/images/{image}. +// +// If not, check if it could be a GCP-provided image, and if it exists. If it does, return it as projects/{project}/global/images/{image}. +// If not, check if it's a family in the current project. If it is, return it as global/images/family/{family}. +// If not, check if it could be a GCP-provided family, and if it exists. If it does, return it as projects/{project}/global/images/family/{family} +func ResolveImage(c *transport_tpg.Config, project, name, userAgent string) (string, error) { + var builtInProject string + for k, v := range ImageMap { + if strings.Contains(name, k) { + builtInProject = v + break + } + } + switch { + case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz + return name, nil + case resolveImageProjectImage.MatchString(name): // projects/xyz/global/images/xyz + res := resolveImageProjectImage.FindStringSubmatch(name) + if err := sanityTestRegexMatches(2, res, "project image", name); err != nil { + return "", err + } + return fmt.Sprintf("projects/%s/global/images/%s", res[1], res[2]), nil + case resolveImageProjectFamily.MatchString(name): // projects/xyz/global/images/family/xyz + res := resolveImageProjectFamily.FindStringSubmatch(name) + if err := sanityTestRegexMatches(2, res, "project family", name); err != nil { + return "", err + } + return fmt.Sprintf("projects/%s/global/images/family/%s", res[1], res[2]), nil + case resolveImageGlobalImage.MatchString(name): // global/images/xyz + res := resolveImageGlobalImage.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "global image", name); err != nil { + return "", err + } + return fmt.Sprintf("global/images/%s", res[1]), nil + case resolveImageGlobalFamily.MatchString(name): // global/images/family/xyz + res := resolveImageGlobalFamily.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "global family", name); err != nil { + return "", err + } + return fmt.Sprintf("global/images/family/%s", res[1]), nil + case resolveImageFamilyFamily.MatchString(name): // family/xyz + res := resolveImageFamilyFamily.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "family family", name); err != nil { + return "", err + } + if ok, err := resolveImageFamilyExists(c, project, res[1], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("global/images/family/%s", res[1]), nil + } + if builtInProject != "" { + if ok, err := resolveImageFamilyExists(c, builtInProject, res[1], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("projects/%s/global/images/family/%s", builtInProject, res[1]), nil + } + } + case resolveImageProjectImageShorthand.MatchString(name): // xyz/xyz + res := resolveImageProjectImageShorthand.FindStringSubmatch(name) + if err := sanityTestRegexMatches(2, res, "project image shorthand", name); err != nil { + return "", err + } + if ok, err := resolveImageImageExists(c, res[1], res[2], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("projects/%s/global/images/%s", res[1], res[2]), nil + } + fallthrough // check if it's a family + case resolveImageProjectFamilyShorthand.MatchString(name): // xyz/xyz + res := resolveImageProjectFamilyShorthand.FindStringSubmatch(name) + if err := sanityTestRegexMatches(2, res, "project family shorthand", name); err != nil { + return "", err + } + if ok, err := resolveImageFamilyExists(c, res[1], res[2], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("projects/%s/global/images/family/%s", res[1], res[2]), nil + } + case resolveImageImage.MatchString(name): // xyz + res := resolveImageImage.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "image", name); err != nil { + return "", err + } + if ok, err := resolveImageImageExists(c, project, res[1], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("global/images/%s", res[1]), nil + } + if builtInProject != "" { + // check the images GCP provides + if ok, err := resolveImageImageExists(c, builtInProject, res[1], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("projects/%s/global/images/%s", builtInProject, res[1]), nil + } + } + fallthrough // check if the name is a family, instead of an image + case resolveImageFamily.MatchString(name): // xyz + res := resolveImageFamily.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "family", name); err != nil { + return "", err + } + if ok, err := resolveImageFamilyExists(c, c.Project, res[1], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("global/images/family/%s", res[1]), nil + } + if builtInProject != "" { + // check the families GCP provides + if ok, err := resolveImageFamilyExists(c, builtInProject, res[1], userAgent); err != nil { + return "", err + } else if ok { + return fmt.Sprintf("projects/%s/global/images/family/%s", builtInProject, res[1]), nil + } + } + } + return "", fmt.Errorf("Could not find image or family %s", name) +} + +// resolveImageRefToRelativeURI takes the output of ResolveImage and coerces it +// into a relative URI. In the event that a global/images/IMAGE or +// global/images/family/FAMILY reference is returned from ResolveImage, +// providerProject will be used as the project for the self_link. +func resolveImageRefToRelativeURI(providerProject, name string) (string, error) { + switch { + case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz + namePath, err := tpgresource.GetRelativePath(name) + if err != nil { + return "", err + } + + return namePath, nil + case resolveImageProjectImage.MatchString(name): // projects/xyz/global/images/xyz + return name, nil + case resolveImageProjectFamily.MatchString(name): // projects/xyz/global/images/family/xyz + res := resolveImageProjectFamily.FindStringSubmatch(name) + if err := sanityTestRegexMatches(2, res, "project family", name); err != nil { + return "", err + } + return fmt.Sprintf("projects/%s/global/images/family/%s", res[1], res[2]), nil + case resolveImageGlobalImage.MatchString(name): // global/images/xyz + res := resolveImageGlobalImage.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "global image", name); err != nil { + return "", err + } + return fmt.Sprintf("projects/%s/global/images/%s", providerProject, res[1]), nil + case resolveImageGlobalFamily.MatchString(name): // global/images/family/xyz + res := resolveImageGlobalFamily.FindStringSubmatch(name) + if err := sanityTestRegexMatches(1, res, "global family", name); err != nil { + return "", err + } + return fmt.Sprintf("projects/%s/global/images/family/%s", providerProject, res[1]), nil + } + return "", fmt.Errorf("Could not expand image or family %q into a relative URI", name) + +} diff --git a/mmv1/third_party/terraform/services/compute/metadata.go.erb b/mmv1/third_party/terraform/services/compute/metadata.go.erb new file mode 100644 index 000000000000..c6cbffee3045 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/metadata.go.erb @@ -0,0 +1,163 @@ +<% autogen_exception -%> +package compute + +import ( + "errors" + "sort" + +<% if version == "ga" -%> + "google.golang.org/api/compute/v1" +<% else -%> + compute "google.golang.org/api/compute/v0.beta" +<% end -%> + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// Since the google compute API uses optimistic locking, there is a chance +// we need to resubmit our updated metadata. To do this, you need to provide +// an update function that attempts to submit your metadata +func MetadataRetryWrapper(update func() error) error { + return transport_tpg.MetadataRetryWrapper(update) +} + +// Update the metadata (serverMD) according to the provided diff (oldMDMap v +// newMDMap). +func MetadataUpdate(oldMDMap map[string]interface{}, newMDMap map[string]interface{}, serverMD *compute.Metadata) { + curMDMap := make(map[string]string) + // Load metadata on server into map + for _, kv := range serverMD.Items { + // If the server state has a key that we had in our old + // state, but not in our new state, we should delete it + _, okOld := oldMDMap[kv.Key] + _, okNew := newMDMap[kv.Key] + if okOld && !okNew { + continue + } else { + curMDMap[kv.Key] = *kv.Value + } + } + + // Insert new metadata into existing metadata (overwriting when needed) + for key, val := range newMDMap { + curMDMap[key] = val.(string) + } + + // Reformat old metadata into a list + serverMD.Items = nil + for key, val := range curMDMap { + v := val + serverMD.Items = append(serverMD.Items, &compute.MetadataItems{ + Key: key, + Value: &v, + }) + } +} + +// Update the beta metadata (serverMD) according to the provided diff (oldMDMap v +// newMDMap). +func BetaMetadataUpdate(oldMDMap map[string]interface{}, newMDMap map[string]interface{}, serverMD *compute.Metadata) { + curMDMap := make(map[string]string) + // Load metadata on server into map + for _, kv := range serverMD.Items { + // If the server state has a key that we had in our old + // state, but not in our new state, we should delete it + _, okOld := oldMDMap[kv.Key] + _, okNew := newMDMap[kv.Key] + if okOld && !okNew { + continue + } else { + curMDMap[kv.Key] = *kv.Value + } + } + + // Insert new metadata into existing metadata (overwriting when needed) + for key, val := range newMDMap { + curMDMap[key] = val.(string) + } + + // Reformat old metadata into a list + serverMD.Items = nil + for key, val := range curMDMap { + v := val + serverMD.Items = append(serverMD.Items, &compute.MetadataItems{ + Key: key, + Value: &v, + }) + } +} + +func expandComputeMetadata(m map[string]interface{}) []*compute.MetadataItems { + metadata := make([]*compute.MetadataItems, len(m)) + var keys []string + for key := range m { + keys = append(keys, key) + } + sort.Strings(keys) + // Append new metadata to existing metadata + for _, key := range keys { + v := m[key].(string) + metadata = append(metadata, &compute.MetadataItems{ + Key: key, + Value: &v, + }) + } + + return metadata +} + +func flattenMetadataBeta(metadata *compute.Metadata) map[string]string { + metadataMap := make(map[string]string) + for _, item := range metadata.Items { + metadataMap[item.Key] = *item.Value + } + return metadataMap +} + +// This function differs from flattenMetadataBeta only in that it takes +// compute.metadata rather than compute.metadata as an argument. It should +// be removed in favour of flattenMetadataBeta if/when all resources using it get +// beta support. +func FlattenMetadata(metadata *compute.Metadata) map[string]interface{} { + metadataMap := make(map[string]interface{}) + for _, item := range metadata.Items { + metadataMap[item.Key] = *item.Value + } + return metadataMap +} + +func resourceInstanceMetadata(d tpgresource.TerraformResourceData) (*compute.Metadata, error) { + m := &compute.Metadata{} + mdMap := d.Get("metadata").(map[string]interface{}) + if v, ok := d.GetOk("metadata_startup_script"); ok && v.(string) != "" { + if w, ok := mdMap["startup-script"]; ok { + // metadata.startup-script could be from metadata_startup_script in the first place + if v != w { + return nil, errors.New("Cannot provide both metadata_startup_script and metadata.startup-script.") + } + } + mdMap["startup-script"] = v + } + if len(mdMap) > 0 { + m.Items = make([]*compute.MetadataItems, 0, len(mdMap)) + var keys []string + for k := range mdMap { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + v := mdMap[k].(string) + m.Items = append(m.Items, &compute.MetadataItems{ + Key: k, + Value: &v, + }) + } + + // Set the fingerprint. If the metadata has never been set before + // then this will just be blank. + m.Fingerprint = d.Get("metadata_fingerprint").(string) + } + + return m, nil +} diff --git a/mmv1/third_party/terraform/resources/resource_compute_firewall_migrate.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_migrate.go similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_firewall_migrate.go rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_migrate.go index d22881332495..4eecabb92056 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_firewall_migrate.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_migrate.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/tests/resource_compute_firewall_migrate_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_migrate_internal_test.go similarity index 99% rename from mmv1/third_party/terraform/tests/resource_compute_firewall_migrate_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_firewall_migrate_internal_test.go index a99f35a03443..9339315e981a 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_firewall_migrate_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_firewall_migrate_internal_test.go @@ -1,4 +1,4 @@ -package google +package compute import ( "testing" diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb index 81072195234d..9d5c88786b4b 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "context" @@ -112,7 +112,7 @@ func ResourceComputeInstance() *schema.Resource { }, SchemaVersion: 6, - MigrateState: resourceComputeInstanceMigrateState, + MigrateState: ResourceComputeInstanceMigrateState, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(20 * time.Minute), @@ -627,7 +627,7 @@ func ResourceComputeInstance() *schema.Resource { DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress(""), Description: `Specifies node affinities or anti-affinities to determine which sole-tenant nodes your instances and managed instance groups will use as host systems.`, }, - + "min_node_cpus": { Type: schema.TypeInt, Optional: true, @@ -684,7 +684,7 @@ be from 0 to 999,999,999 inclusive.`, Description: `Specifies the frequency of planned maintenance events. The accepted values are: PERIODIC`, }, <% end -%> - + }, }, }, @@ -1522,8 +1522,8 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err } // We're retrying for an error 412 where the metadata fingerprint is out of date - err = transport_tpg.Retry( - func() error { + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { // retrieve up-to-date metadata from the API in case several updates hit simultaneously. instances // sometimes but not always share metadata fingerprints. instance, err := config.NewComputeClient(userAgent).Instances.Get(project, zone, instance.Name).Do() @@ -1545,7 +1545,7 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err return nil }, - ) + }) if err != nil { return err @@ -1905,8 +1905,8 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err } if d.HasChange("can_ip_forward") { - err = transport_tpg.Retry( - func() error { + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { instance, err := config.NewComputeClient(userAgent).Instances.Get(project, zone, instance.Name).Do() if err != nil { return fmt.Errorf("Error retrieving instance: %s", err) @@ -1926,7 +1926,7 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err return nil }, - ) + }) if err != nil { return err @@ -2031,7 +2031,7 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err if len(sa) > 0 && sa[0] != nil { saMap := sa[0].(map[string]interface{}) req.Email = saMap["email"].(string) - req.Scopes = tpgresource.CanonicalizeServiceScopes(convertStringSet(saMap["scopes"].(*schema.Set))) + req.Scopes = tpgresource.CanonicalizeServiceScopes(tpgresource.ConvertStringSet(saMap["scopes"].(*schema.Set))) } op, err := config.NewComputeClient(userAgent).Instances.SetServiceAccount(project, zone, instance.Name, req).Do() if err != nil { @@ -2094,8 +2094,8 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err } if d.HasChange("advanced_machine_features") { - err = transport_tpg.Retry( - func() error { + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { // retrieve up-to-date instance from the API in case several updates hit simultaneously. instances // sometimes but not always share metadata fingerprints. instance, err := config.NewComputeClient(userAgent).Instances.Get(project, zone, instance.Name).Do() @@ -2117,7 +2117,7 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err return nil }, - ) + }) if err != nil { return err @@ -2473,7 +2473,7 @@ func expandBootDisk(d *schema.ResourceData, config *transport_tpg.Config, projec if v, ok := d.GetOk("boot_disk.0.initialize_params.0.image"); ok { imageName := v.(string) - imageUrl, err := resolveImage(config, project, imageName, userAgent) + imageUrl, err := ResolveImage(config, project, imageName, userAgent) if err != nil { return nil, fmt.Errorf("Error resolving image name '%s': %s", imageName, err) } diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_from_machine_image.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_from_machine_image.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image.go.erb index 81140648e6a7..0461a98a51f6 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_from_machine_image.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_machine_image.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute <% unless version == 'ga' -%> import ( diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_from_template.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_from_template.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template.go.erb index d722971f3fe1..5d624b4b797e 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_from_template.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_from_template.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "encoding/json" diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_group.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_group.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.erb index e369fbc76eb2..856c24d3b6aa 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_group.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_group_manager.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_group_manager.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb index c5a581a0ff3c..28c1a0eb6b4f 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_group_manager.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -577,7 +577,7 @@ func resourceComputeInstanceGroupManagerCreate(d *schema.ResourceData, meta inte TargetSize: int64(d.Get("target_size").(int)), ListManagedInstancesResults: d.Get("list_managed_instances_results").(string), NamedPorts: getNamedPortsBeta(d.Get("named_port").(*schema.Set).List()), - TargetPools: convertStringSet(d.Get("target_pools").(*schema.Set)), + TargetPools: tpgresource.ConvertStringSet(d.Get("target_pools").(*schema.Set)), AutoHealingPolicies: expandAutoHealingPolicies(d.Get("auto_healing_policies").([]interface{})), Versions: expandVersions(d.Get("version").([]interface{})), UpdatePolicy: expandUpdatePolicy(d.Get("update_policy").([]interface{})), @@ -770,7 +770,7 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf if err := d.Set("list_managed_instances_results", manager.ListManagedInstancesResults); err != nil { return fmt.Errorf("Error setting list_managed_instances_results: %s", err) } - if err = d.Set("target_pools", mapStringArr(manager.TargetPools, tpgresource.ConvertSelfLinkToV1)); err != nil { + if err = d.Set("target_pools", tpgresource.MapStringArr(manager.TargetPools, tpgresource.ConvertSelfLinkToV1)); err != nil { return fmt.Errorf("Error setting target_pools in state: %s", err.Error()) } if err = d.Set("named_port", flattenNamedPortsBeta(manager.NamedPorts)); err != nil { @@ -862,7 +862,7 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte } if d.HasChange("target_pools") { - updatedManager.TargetPools = convertStringSet(d.Get("target_pools").(*schema.Set)) + updatedManager.TargetPools = tpgresource.ConvertStringSet(d.Get("target_pools").(*schema.Set)) updatedManager.ForceSendFields = append(updatedManager.ForceSendFields, "TargetPools") change = true } @@ -1399,11 +1399,11 @@ func expandAllInstancesConfig(old []interface{}, new []interface{}) *compute.Ins for _, raw := range new { properties = &compute.InstancePropertiesPatch{} data := raw.(map[string]interface{}) - properties.Metadata = convertStringMap(data["metadata"].(map[string]interface{})) + properties.Metadata = tpgresource.ConvertStringMap(data["metadata"].(map[string]interface{})) if len(properties.Metadata) == 0 { properties.NullFields = append(properties.NullFields, "Metadata") } - properties.Labels = convertStringMap(data["labels"].(map[string]interface{})) + properties.Labels = tpgresource.ConvertStringMap(data["labels"].(map[string]interface{})) if len(properties.Labels) == 0 { properties.NullFields = append(properties.NullFields, "Labels") } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_internal_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_internal_test.go new file mode 100644 index 000000000000..315462e277fb --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_internal_test.go @@ -0,0 +1,78 @@ +package compute + +import ( + "testing" +) + +func TestInstanceGroupManager_parseUniqueId(t *testing.T) { + expectations := map[string][]string{ + "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": {"projects/imre-test/global/instanceTemplates/example-template-custom", "123"}, + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": {"https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", "123"}, + "projects/imre-test/global/instanceTemplates/example-template-custom": {"projects/imre-test/global/instanceTemplates/example-template-custom", ""}, + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom": {"https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", ""}, + "example-template-custom?uniqueId=123": {"example-template-custom", "123"}, + + // this test demonstrates that uniqueIds can't override eachother + "projects/imre-test/global/instanceTemplates/example?uniqueId=123?uniqueId=456": {"projects/imre-test/global/instanceTemplates/example", "123?uniqueId=456"}, + } + + for k, v := range expectations { + aName, aUniqueId := parseUniqueId(k) + if v[0] != aName { + t.Errorf("parseUniqueId failed; name of %v should be %v, not %v", k, v[0], aName) + } + if v[1] != aUniqueId { + t.Errorf("parseUniqueId failed; uniqueId of %v should be %v, not %v", k, v[1], aUniqueId) + } + } +} + +func TestInstanceGroupManager_compareInstanceTemplate(t *testing.T) { + shouldAllMatch := []string{ + // uniqueId not present + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", + "projects/imre-test/global/instanceTemplates/example-template-custom", + // uniqueId present + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123", + "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123", + } + shouldNotMatch := map[string]string{ + // mismatching name + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom": "projects/imre-test/global/instanceTemplates/example-template-custom2", + "projects/imre-test/global/instanceTemplates/example-template-custom": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom2", + // matching name, but mismatching uniqueId + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=1234", + "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=1234", + } + for _, v1 := range shouldAllMatch { + for _, v2 := range shouldAllMatch { + if !compareSelfLinkRelativePathsIgnoreParams("", v1, v2, nil) { + t.Fatalf("compareSelfLinkRelativePathsIgnoreParams did not match (and should have) %v and %v", v1, v2) + } + } + } + + for v1, v2 := range shouldNotMatch { + if compareSelfLinkRelativePathsIgnoreParams("", v1, v2, nil) { + t.Fatalf("compareSelfLinkRelativePathsIgnoreParams did match (and shouldn't) %v and %v", v1, v2) + } + } +} + +func TestInstanceGroupManager_convertUniqueId(t *testing.T) { + matches := map[string]string{ + // uniqueId not present (should return the same) + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", + "projects/imre-test/global/instanceTemplates/example-template-custom": "projects/imre-test/global/instanceTemplates/example-template-custom", + // uniqueId present (should return the last component replaced) + "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/123", + "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "projects/imre-test/global/instanceTemplates/123", + "tf-test-igm-8amncgtq22?uniqueId=8361222501423044003": "8361222501423044003", + } + for input, expected := range matches { + actual := ConvertToUniqueIdWhenPresent(input) + if actual != expected { + t.Fatalf("invalid return value by ConvertToUniqueIdWhenPresent for input %v; expected: %v, actual: %v", input, expected, actual) + } + } +} diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_group_migrate.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_migrate.go similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_group_migrate.go rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_group_migrate.go index 4c7159735406..04fcdbe45dc0 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_group_migrate.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_migrate.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/tests/resource_compute_instance_group_migrate_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_migrate_internal_test.go similarity index 99% rename from mmv1/third_party/terraform/tests/resource_compute_instance_group_migrate_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_group_migrate_internal_test.go index be77ec6e3b0e..b5de2ac06024 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_instance_group_migrate_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_migrate_internal_test.go @@ -1,4 +1,4 @@ -package google +package compute import ( "testing" diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_internal_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_internal_test.go new file mode 100644 index 000000000000..7a47c61ed476 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_internal_test.go @@ -0,0 +1,135 @@ +package compute + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestComputeInstance_networkIPCustomizedDiff(t *testing.T) { + t.Parallel() + + d := &tpgresource.ResourceDiffMock{ + Before: map[string]interface{}{ + "network_interface.#": 0, + }, + After: map[string]interface{}{ + "network_interface.#": 1, + }, + } + + err := forceNewIfNetworkIPNotUpdatableFunc(d) + if err != nil { + t.Error(err) + } + + if d.IsForceNew { + t.Errorf("Expected not force new if network_interface array size changes") + } + + type NetworkInterface struct { + Network string + Subnetwork string + SubnetworkProject string + NetworkIP string + } + NIBefore := NetworkInterface{ + Network: "a", + Subnetwork: "a", + SubnetworkProject: "a", + NetworkIP: "a", + } + + cases := map[string]struct { + ExpectedForceNew bool + Before NetworkInterface + After NetworkInterface + }{ + "NetworkIP only change": { + ExpectedForceNew: true, + Before: NIBefore, + After: NetworkInterface{ + Network: "a", + Subnetwork: "a", + SubnetworkProject: "a", + NetworkIP: "b", + }, + }, + "NetworkIP and Network change": { + ExpectedForceNew: false, + Before: NIBefore, + After: NetworkInterface{ + Network: "b", + Subnetwork: "a", + SubnetworkProject: "a", + NetworkIP: "b", + }, + }, + "NetworkIP and Subnetwork change": { + ExpectedForceNew: false, + Before: NIBefore, + After: NetworkInterface{ + Network: "a", + Subnetwork: "b", + SubnetworkProject: "a", + NetworkIP: "b", + }, + }, + "NetworkIP and SubnetworkProject change": { + ExpectedForceNew: false, + Before: NIBefore, + After: NetworkInterface{ + Network: "a", + Subnetwork: "a", + SubnetworkProject: "b", + NetworkIP: "b", + }, + }, + "All change": { + ExpectedForceNew: false, + Before: NIBefore, + After: NetworkInterface{ + Network: "b", + Subnetwork: "b", + SubnetworkProject: "b", + NetworkIP: "b", + }, + }, + "No change": { + ExpectedForceNew: false, + Before: NIBefore, + After: NetworkInterface{ + Network: "a", + Subnetwork: "a", + SubnetworkProject: "a", + NetworkIP: "a", + }, + }, + } + + for tn, tc := range cases { + d := &tpgresource.ResourceDiffMock{ + Before: map[string]interface{}{ + "network_interface.#": 1, + "network_interface.0.network": tc.Before.Network, + "network_interface.0.subnetwork": tc.Before.Subnetwork, + "network_interface.0.subnetwork_project": tc.Before.SubnetworkProject, + "network_interface.0.network_ip": tc.Before.NetworkIP, + }, + After: map[string]interface{}{ + "network_interface.#": 1, + "network_interface.0.network": tc.After.Network, + "network_interface.0.subnetwork": tc.After.Subnetwork, + "network_interface.0.subnetwork_project": tc.After.SubnetworkProject, + "network_interface.0.network_ip": tc.After.NetworkIP, + }, + } + err := forceNewIfNetworkIPNotUpdatableFunc(d) + if err != nil { + t.Error(err) + } + if tc.ExpectedForceNew != d.IsForceNew { + t.Errorf("%v: expected d.IsForceNew to be %v, but was %v", tn, tc.ExpectedForceNew, d.IsForceNew) + } + } +} diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_migrate.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_migrate.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_migrate.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_migrate.go.erb index 6d0104d20bf6..d0e4e1e08add 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_migrate.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_migrate.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -19,7 +19,7 @@ import ( <% end -%> ) -func resourceComputeInstanceMigrateState( +func ResourceComputeInstanceMigrateState( v int, is *terraform.InstanceState, meta interface{}) (*terraform.InstanceState, error) { if is.Empty() { log.Println("[DEBUG] Empty InstanceState; nothing to migrate.") @@ -456,7 +456,7 @@ func getDiskFromEncryptionKey(instance *compute.Instance, encryptionKey string) } func getDiskFromAutoDeleteAndImage(config *transport_tpg.Config, instance *compute.Instance, allDisks map[string]*compute.Disk, autoDelete bool, image, project, zone string) (*compute.AttachedDisk, error) { - img, err := resolveImage(config, project, image, config.UserAgent) + img, err := ResolveImage(config, project, image, config.UserAgent) if err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_template.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_template.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.erb index 877fdb4fb483..0544a1151b51 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_template.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.erb @@ -1,6 +1,6 @@ <% autogen_exception -%> -package google +package compute import ( "context" @@ -936,7 +936,7 @@ func resourceComputeInstanceTemplateSourceImageCustomizeDiff(_ context.Context, if err != nil { return err } - oldResolved, err := resolveImage(config, project, old.(string), config.UserAgent) + oldResolved, err := ResolveImage(config, project, old.(string), config.UserAgent) if err != nil { return err } @@ -944,7 +944,7 @@ func resourceComputeInstanceTemplateSourceImageCustomizeDiff(_ context.Context, if err != nil { return err } - newResolved, err := resolveImage(config, project, new.(string), config.UserAgent) + newResolved, err := ResolveImage(config, project, new.(string), config.UserAgent) if err != nil { return err } @@ -1077,7 +1077,7 @@ func buildDisks(d *schema.ResourceData, config *transport_tpg.Config) ([]*comput if v, ok := d.GetOk(prefix + ".source_image"); ok { imageName := v.(string) - imageUrl, err := resolveImage(config, project, imageName, userAgent) + imageUrl, err := ResolveImage(config, project, imageName, userAgent) if err != nil { return nil, fmt.Errorf( "Error resolving image name '%s': %s", @@ -1168,7 +1168,7 @@ func expandInstanceTemplateGuestAccelerators(d tpgresource.TerraformResourceData } func expandInstanceTemplateResourcePolicies(d tpgresource.TerraformResourceData, dataKey string) []string { - return convertAndMapStringArr(d.Get(dataKey).([]interface{}), tpgresource.GetResourceNameFromSelfLink) + return tpgresource.ConvertAndMapStringArr(d.Get(dataKey).([]interface{}), tpgresource.GetResourceNameFromSelfLink) } func resourceComputeInstanceTemplateCreate(d *schema.ResourceData, meta interface{}) error { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go new file mode 100644 index 000000000000..1ff79ef03e2a --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go @@ -0,0 +1,219 @@ +package compute + +import ( + "reflect" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { + t.Parallel() + + cBoot := map[string]interface{}{ + "source": "boot-source", + } + cFallThrough := map[string]interface{}{ + "auto_delete": true, + } + cDeviceName := map[string]interface{}{ + "device_name": "disk-1", + } + cScratch := map[string]interface{}{ + "type": "SCRATCH", + } + cSource := map[string]interface{}{ + "source": "disk-source", + } + cScratchNvme := map[string]interface{}{ + "type": "SCRATCH", + "interface": "NVME", + } + + aBoot := map[string]interface{}{ + "source": "boot-source", + "boot": true, + } + aScratchNvme := map[string]interface{}{ + "device_name": "scratch-1", + "type": "SCRATCH", + "interface": "NVME", + } + aSource := map[string]interface{}{ + "device_name": "disk-2", + "source": "disk-source", + } + aScratchScsi := map[string]interface{}{ + "device_name": "scratch-2", + "type": "SCRATCH", + "interface": "SCSI", + } + aFallThrough := map[string]interface{}{ + "device_name": "disk-3", + "auto_delete": true, + "source": "fake-source", + } + aFallThrough2 := map[string]interface{}{ + "device_name": "disk-4", + "auto_delete": true, + "source": "fake-source", + } + aDeviceName := map[string]interface{}{ + "device_name": "disk-1", + "auto_delete": true, + "source": "fake-source-2", + } + aNoMatch := map[string]interface{}{ + "device_name": "disk-2", + "source": "disk-source-doesn't-match", + } + + cases := map[string]struct { + ConfigDisks []interface{} + ApiDisks []map[string]interface{} + ExpectedResult []map[string]interface{} + }{ + "all disks represented": { + ApiDisks: []map[string]interface{}{ + aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, + }, + ConfigDisks: []interface{}{ + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + }, + ExpectedResult: []map[string]interface{}{ + aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, + }, + }, + "one non-match": { + ApiDisks: []map[string]interface{}{ + aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, + }, + ConfigDisks: []interface{}{ + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + }, + ExpectedResult: []map[string]interface{}{ + aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, + }, + }, + "two fallthroughs": { + ApiDisks: []map[string]interface{}{ + aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, + }, + ConfigDisks: []interface{}{ + cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, + }, + ExpectedResult: []map[string]interface{}{ + aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, + }, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + // Disks read using d.Get will always have values for all keys, so set those values + for _, disk := range tc.ConfigDisks { + d := disk.(map[string]interface{}) + for _, k := range []string{"auto_delete", "boot"} { + if _, ok := d[k]; !ok { + d[k] = false + } + } + for _, k := range []string{"device_name", "disk_name", "interface", "mode", "source", "type"} { + if _, ok := d[k]; !ok { + d[k] = "" + } + } + } + + // flattened disks always set auto_delete, boot, device_name, interface, mode, source, and type + for _, d := range tc.ApiDisks { + for _, k := range []string{"auto_delete", "boot"} { + if _, ok := d[k]; !ok { + d[k] = false + } + } + + for _, k := range []string{"device_name", "interface", "mode", "source"} { + if _, ok := d[k]; !ok { + d[k] = "" + } + } + if _, ok := d["type"]; !ok { + d["type"] = "PERSISTENT" + } + } + + result := reorderDisks(tc.ConfigDisks, tc.ApiDisks) + if !reflect.DeepEqual(tc.ExpectedResult, result) { + t.Errorf("reordering did not match\nExpected: %+v\nActual: %+v", tc.ExpectedResult, result) + } + }) + } +} + +func TestComputeInstanceTemplate_scratchDiskSizeCustomizeDiff(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + Typee string // misspelled on purpose, type is a special symbol + DiskType string + DiskSize int + Interfacee string + ExpectError bool + }{ + "scratch disk correct size 1": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 375, + Interfacee: "NVME", + ExpectError: false, + }, + "scratch disk correct size 2": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 3000, + Interfacee: "NVME", + ExpectError: false, + }, + "scratch disk incorrect size": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 300, + Interfacee: "NVME", + ExpectError: true, + }, + "scratch disk incorrect interface": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 3000, + Interfacee: "SCSI", + ExpectError: true, + }, + "non-scratch disk": { + Typee: "PERSISTENT", + DiskType: "", + DiskSize: 300, + Interfacee: "NVME", + ExpectError: false, + }, + } + + for tn, tc := range cases { + d := &tpgresource.ResourceDiffMock{ + After: map[string]interface{}{ + "disk.#": 1, + "disk.0.type": tc.Typee, + "disk.0.disk_type": tc.DiskType, + "disk.0.disk_size_gb": tc.DiskSize, + "disk.0.interface": tc.Interfacee, + }, + } + err := resourceComputeInstanceTemplateScratchDiskCustomizeDiffFunc(d) + if tc.ExpectError && err == nil { + t.Errorf("%s failed, expected error but was none", tn) + } + if !tc.ExpectError && err != nil { + t.Errorf("%s failed, found unexpected error: %s", tn, err) + } + } +} diff --git a/mmv1/third_party/terraform/resources/resource_compute_instance_template_migrate.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_migrate.go similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_instance_template_migrate.go rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_template_migrate.go index df648d62dbcd..64c7fa5530ef 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_instance_template_migrate.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_migrate.go @@ -1,4 +1,4 @@ -package google +package compute import ( "fmt" diff --git a/mmv1/third_party/terraform/tests/resource_compute_instance_template_migrate_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_migrate_internal_test.go similarity index 99% rename from mmv1/third_party/terraform/tests/resource_compute_instance_template_migrate_test.go rename to mmv1/third_party/terraform/services/compute/resource_compute_instance_template_migrate_internal_test.go index 3b3922d157d9..c9daec0ba4f7 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_instance_template_migrate_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_migrate_internal_test.go @@ -1,4 +1,4 @@ -package google +package compute import ( "testing" diff --git a/mmv1/third_party/terraform/resources/resource_compute_project_metadata.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata.go.erb similarity index 98% rename from mmv1/third_party/terraform/resources/resource_compute_project_metadata.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_project_metadata.go.erb index 55e6bc4eabde..8186762eb938 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_project_metadata.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -104,7 +104,7 @@ func resourceComputeProjectMetadataRead(d *schema.ResourceData, meta interface{} return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Project metadata for project %q", projectId)) } - err = d.Set("metadata", flattenMetadata(project.CommonInstanceMetadata)) + err = d.Set("metadata", FlattenMetadata(project.CommonInstanceMetadata)) if err != nil { return fmt.Errorf("Error setting metadata: %s", err) } diff --git a/mmv1/third_party/terraform/resources/resource_compute_project_metadata_item.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.erb similarity index 98% rename from mmv1/third_party/terraform/resources/resource_compute_project_metadata_item.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.erb index e0ef3d717439..5cf62dc29590 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_project_metadata_item.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -108,7 +108,7 @@ func resourceComputeProjectMetadataItemRead(d *schema.ResourceData, meta interfa return fmt.Errorf("Error loading project '%s': %s", projectID, err) } - md := flattenMetadata(project.CommonInstanceMetadata) + md := FlattenMetadata(project.CommonInstanceMetadata) val, ok := md[d.Id()] if !ok { // Resource no longer exists @@ -189,7 +189,7 @@ func updateComputeCommonInstanceMetadata(config *transport_tpg.Config, projectID return fmt.Errorf("Error loading project '%s': %s", projectID, err) } - md := flattenMetadata(project.CommonInstanceMetadata) + md := FlattenMetadata(project.CommonInstanceMetadata) val, ok := md[key] diff --git a/mmv1/third_party/terraform/resources/resource_compute_region_instance_group_manager.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_region_instance_group_manager.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb index 3c05319b9162..89f5b93082ef 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_region_instance_group_manager.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -551,7 +551,7 @@ func resourceComputeRegionInstanceGroupManagerCreate(d *schema.ResourceData, met TargetSize: int64(d.Get("target_size").(int)), ListManagedInstancesResults: d.Get("list_managed_instances_results").(string), NamedPorts: getNamedPortsBeta(d.Get("named_port").(*schema.Set).List()), - TargetPools: convertStringSet(d.Get("target_pools").(*schema.Set)), + TargetPools: tpgresource.ConvertStringSet(d.Get("target_pools").(*schema.Set)), AutoHealingPolicies: expandAutoHealingPolicies(d.Get("auto_healing_policies").([]interface{})), Versions: expandVersions(d.Get("version").([]interface{})), UpdatePolicy: expandRegionUpdatePolicy(d.Get("update_policy").([]interface{})), @@ -722,7 +722,7 @@ func resourceComputeRegionInstanceGroupManagerRead(d *schema.ResourceData, meta if err := d.Set("list_managed_instances_results", manager.ListManagedInstancesResults); err != nil { return fmt.Errorf("Error setting list_managed_instances_results: %s", err) } - if err := d.Set("target_pools", mapStringArr(manager.TargetPools, tpgresource.ConvertSelfLinkToV1)); err != nil { + if err := d.Set("target_pools", tpgresource.MapStringArr(manager.TargetPools, tpgresource.ConvertSelfLinkToV1)); err != nil { return fmt.Errorf("Error setting target_pools in state: %s", err.Error()) } if err := d.Set("named_port", flattenNamedPortsBeta(manager.NamedPorts)); err != nil { @@ -813,7 +813,7 @@ func resourceComputeRegionInstanceGroupManagerUpdate(d *schema.ResourceData, met var change bool if d.HasChange("target_pools") { - updatedManager.TargetPools = convertStringSet(d.Get("target_pools").(*schema.Set)) + updatedManager.TargetPools = tpgresource.ConvertStringSet(d.Get("target_pools").(*schema.Set)) updatedManager.ForceSendFields = append(updatedManager.ForceSendFields, "TargetPools") change = true } diff --git a/mmv1/third_party/terraform/resources/resource_compute_region_instance_template.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.erb similarity index 99% rename from mmv1/third_party/terraform/resources/resource_compute_region_instance_template.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.erb index 60e32e9f191c..be490665958a 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_region_instance_template.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.erb @@ -1,6 +1,6 @@ <% autogen_exception -%> -package google +package compute <% unless version == 'ga' -%> diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.erb new file mode 100644 index 000000000000..ef36cc13625a --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.erb @@ -0,0 +1,224 @@ +<% autogen_exception -%> + +package compute + +<% unless version == 'ga' -%> + +import ( + "reflect" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { + t.Parallel() + + cBoot := map[string]interface{}{ + "source": "boot-source", + } + cFallThrough := map[string]interface{}{ + "auto_delete": true, + } + cDeviceName := map[string]interface{}{ + "device_name": "disk-1", + } + cScratch := map[string]interface{}{ + "type": "SCRATCH", + } + cSource := map[string]interface{}{ + "source": "disk-source", + } + cScratchNvme := map[string]interface{}{ + "type": "SCRATCH", + "interface": "NVME", + } + + aBoot := map[string]interface{}{ + "source": "boot-source", + "boot": true, + } + aScratchNvme := map[string]interface{}{ + "device_name": "scratch-1", + "type": "SCRATCH", + "interface": "NVME", + } + aSource := map[string]interface{}{ + "device_name": "disk-2", + "source": "disk-source", + } + aScratchScsi := map[string]interface{}{ + "device_name": "scratch-2", + "type": "SCRATCH", + "interface": "SCSI", + } + aFallThrough := map[string]interface{}{ + "device_name": "disk-3", + "auto_delete": true, + "source": "fake-source", + } + aFallThrough2 := map[string]interface{}{ + "device_name": "disk-4", + "auto_delete": true, + "source": "fake-source", + } + aDeviceName := map[string]interface{}{ + "device_name": "disk-1", + "auto_delete": true, + "source": "fake-source-2", + } + aNoMatch := map[string]interface{}{ + "device_name": "disk-2", + "source": "disk-source-doesn't-match", + } + + cases := map[string]struct { + ConfigDisks []interface{} + ApiDisks []map[string]interface{} + ExpectedResult []map[string]interface{} + }{ + "all disks represented": { + ApiDisks: []map[string]interface{}{ + aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, + }, + ConfigDisks: []interface{}{ + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + }, + ExpectedResult: []map[string]interface{}{ + aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, + }, + }, + "one non-match": { + ApiDisks: []map[string]interface{}{ + aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, + }, + ConfigDisks: []interface{}{ + cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + }, + ExpectedResult: []map[string]interface{}{ + aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, + }, + }, + "two fallthroughs": { + ApiDisks: []map[string]interface{}{ + aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, + }, + ConfigDisks: []interface{}{ + cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, + }, + ExpectedResult: []map[string]interface{}{ + aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, + }, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + // Disks read using d.Get will always have values for all keys, so set those values + for _, disk := range tc.ConfigDisks { + d := disk.(map[string]interface{}) + for _, k := range []string{"auto_delete", "boot"} { + if _, ok := d[k]; !ok { + d[k] = false + } + } + for _, k := range []string{"device_name", "disk_name", "interface", "mode", "source", "type"} { + if _, ok := d[k]; !ok { + d[k] = "" + } + } + } + + // flattened disks always set auto_delete, boot, device_name, interface, mode, source, and type + for _, d := range tc.ApiDisks { + for _, k := range []string{"auto_delete", "boot"} { + if _, ok := d[k]; !ok { + d[k] = false + } + } + + for _, k := range []string{"device_name", "interface", "mode", "source"} { + if _, ok := d[k]; !ok { + d[k] = "" + } + } + if _, ok := d["type"]; !ok { + d["type"] = "PERSISTENT" + } + } + + result := reorderDisks(tc.ConfigDisks, tc.ApiDisks) + if !reflect.DeepEqual(tc.ExpectedResult, result) { + t.Errorf("reordering did not match\nExpected: %+v\nActual: %+v", tc.ExpectedResult, result) + } + }) + } +} + +func TestComputeRegionInstanceTemplate_scratchDiskSizeCustomizeDiff(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + Typee string // misspelled on purpose, type is a special symbol + DiskType string + DiskSize int + Interfacee string + ExpectError bool + }{ + "scratch disk correct size 1": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 375, + Interfacee: "NVME", + ExpectError: false, + }, + "scratch disk correct size 2": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 3000, + Interfacee: "NVME", + ExpectError: false, + }, + "scratch disk incorrect size": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 300, + Interfacee: "NVME", + ExpectError: true, + }, + "scratch disk incorrect interface": { + Typee: "SCRATCH", + DiskType: "local-ssd", + DiskSize: 3000, + Interfacee: "SCSI", + ExpectError: true, + }, + "non-scratch disk": { + Typee: "PERSISTENT", + DiskType: "", + DiskSize: 300, + Interfacee: "NVME", + ExpectError: false, + }, + } + + for tn, tc := range cases { + d := &tpgresource.ResourceDiffMock{ + After: map[string]interface{}{ + "disk.#": 1, + "disk.0.type": tc.Typee, + "disk.0.disk_type": tc.DiskType, + "disk.0.disk_size_gb": tc.DiskSize, + "disk.0.interface": tc.Interfacee, + }, + } + err := resourceComputeInstanceTemplateScratchDiskCustomizeDiffFunc(d) + if tc.ExpectError && err == nil { + t.Errorf("%s failed, expected error but was none", tn) + } + if !tc.ExpectError && err != nil { + t.Errorf("%s failed, found unexpected error: %s", tn, err) + } + } +} +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/resources/resource_compute_router_interface.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_router_interface.go.erb similarity index 98% rename from mmv1/third_party/terraform/resources/resource_compute_router_interface.go.erb rename to mmv1/third_party/terraform/services/compute/resource_compute_router_interface.go.erb index d5c9a4b9d5b2..b8f48d98cffc 100644 --- a/mmv1/third_party/terraform/resources/resource_compute_router_interface.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_interface.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute import ( "fmt" @@ -185,7 +185,7 @@ func resourceComputeRouterInterfaceCreate(d *schema.ResourceData, meta interface } if icVal, ok := d.GetOk("interconnect_attachment"); ok { - interconnectAttachment, err := getInterconnectAttachmentLink(config, project, region, icVal.(string), userAgent) + interconnectAttachment, err := tpgresource.GetInterconnectAttachmentLink(config, project, region, icVal.(string), userAgent) if err != nil { return err } diff --git a/mmv1/third_party/terraform/utils/security_policy_association_utils.go.erb b/mmv1/third_party/terraform/services/compute/security_policy_association_utils.go.erb similarity index 98% rename from mmv1/third_party/terraform/utils/security_policy_association_utils.go.erb rename to mmv1/third_party/terraform/services/compute/security_policy_association_utils.go.erb index 356ffd8feaf1..ac9c7cb0ca67 100644 --- a/mmv1/third_party/terraform/utils/security_policy_association_utils.go.erb +++ b/mmv1/third_party/terraform/services/compute/security_policy_association_utils.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package compute <% unless version == 'ga' -%> import ( diff --git a/mmv1/third_party/terraform/services/compute/stateful_mig_polling.go b/mmv1/third_party/terraform/services/compute/stateful_mig_polling.go new file mode 100644 index 000000000000..57f96ff9fd62 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/stateful_mig_polling.go @@ -0,0 +1,169 @@ +package compute + +import ( + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// PerInstanceConfig needs both regular operation polling AND custom polling for deletion which is why this is not generated +func resourceComputePerInstanceConfigPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { + return func() (map[string]interface{}, error) { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return nil, err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/instanceGroupManagers/{{instance_group_manager}}/listPerInstanceConfigs") + if err != nil { + return nil, err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return nil, err + } + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return res, err + } + res, err = flattenNestedComputePerInstanceConfig(d, meta, res) + if err != nil { + return nil, err + } + + // Returns nil res if nested object is not found + return res, nil + } +} + +// RegionPerInstanceConfig needs both regular operation polling AND custom polling for deletion which is why this is not generated +func resourceComputeRegionPerInstanceConfigPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { + return func() (map[string]interface{}, error) { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return nil, err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/regions/{{region}}/instanceGroupManagers/{{region_instance_group_manager}}/listPerInstanceConfigs") + if err != nil { + return nil, err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return nil, err + } + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return res, err + } + res, err = flattenNestedComputeRegionPerInstanceConfig(d, meta, res) + if err != nil { + return nil, err + } + + // Returns nil res if nested object is not found + return res, nil + } +} + +// Returns an instance name in the form zones/{zone}/instances/{instance} for the managed +// instance matching the name of a PerInstanceConfig +func findInstanceName(d *schema.ResourceData, config *transport_tpg.Config) (string, error) { + url, err := tpgresource.ReplaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/regions/{{region}}/instanceGroupManagers/{{region_instance_group_manager}}/listManagedInstances") + if err != nil { + return "", err + } + + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return "", err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return "", err + } + instanceNameToFind := fmt.Sprintf("/%s", d.Get("name").(string)) + + token := "" + for paginate := true; paginate; { + urlWithToken := "" + if token != "" { + urlWithToken = fmt.Sprintf("%s?maxResults=1&pageToken=%s", url, token) + } else { + urlWithToken = fmt.Sprintf("%s?maxResults=1", url) + } + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: project, + RawURL: urlWithToken, + UserAgent: userAgent, + }) + if err != nil { + return "", err + } + + managedInstances, ok := res["managedInstances"] + if !ok { + return "", fmt.Errorf("Failed to parse response for listManagedInstances for %s", d.Id()) + } + + managedInstancesArr := managedInstances.([]interface{}) + for _, managedInstanceRaw := range managedInstancesArr { + instance := managedInstanceRaw.(map[string]interface{}) + name, ok := instance["instance"] + if !ok { + return "", fmt.Errorf("Failed to read instance name for managed instance: %#v", instance) + } + if strings.HasSuffix(name.(string), instanceNameToFind) { + return name.(string), nil + } + } + + tokenRaw, paginate := res["nextPageToken"] + if paginate { + token = tokenRaw.(string) + } + } + + return "", fmt.Errorf("Failed to find managed instance with name: %s", instanceNameToFind) +} + +func PollCheckInstanceConfigDeleted(resp map[string]interface{}, respErr error) transport_tpg.PollResult { + if respErr != nil { + return transport_tpg.ErrorPollResult(respErr) + } + + // Nested object 404 appears as nil response + if resp == nil { + // Config no longer exists + return transport_tpg.SuccessPollResult() + } + + // Read status + status := resp["status"].(string) + if status == "DELETING" { + return transport_tpg.PendingStatusPollResult("Still deleting") + } + return transport_tpg.ErrorPollResult(fmt.Errorf("Expected PerInstanceConfig to be deleting but status is: %s", status)) +} diff --git a/mmv1/third_party/terraform/services/container/container_operation.go.erb b/mmv1/third_party/terraform/services/container/container_operation.go.erb new file mode 100644 index 000000000000..3f8836330216 --- /dev/null +++ b/mmv1/third_party/terraform/services/container/container_operation.go.erb @@ -0,0 +1,131 @@ +<% autogen_exception -%> +package container + +import ( + "context" + "errors" + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + +<% if version == "ga" -%> + "google.golang.org/api/container/v1" +<% else -%> + container "google.golang.org/api/container/v1beta1" +<% end -%> +) + +type ContainerOperationWaiter struct { + Service *container.Service + Context context.Context + Op *container.Operation + Project string + Location string + UserProjectOverride bool +} + +func (w *ContainerOperationWaiter) State() string { + if w == nil || w.Op == nil { + return "" + } + return w.Op.Status +} + +func (w *ContainerOperationWaiter) Error() error { + if w == nil || w.Op == nil { + return nil + } + + // Error gets called during operation polling to see if there is an error. + // Since container's operation doesn't have an "error" field, we must wait + // until it's done and check the status message + for _, pending := range w.PendingStates() { + if w.Op.Status == pending { + return nil + } + } + + if w.Op.StatusMessage != "" { + return fmt.Errorf(w.Op.StatusMessage) + } + + return nil +} + +func (w *ContainerOperationWaiter) IsRetryable(error) bool { + return false +} + +func (w *ContainerOperationWaiter) SetOp(op interface{}) error { + var ok bool + w.Op, ok = op.(*container.Operation) + if !ok { + return fmt.Errorf("Unable to set operation. Bad type!") + } + return nil +} + +func (w *ContainerOperationWaiter) QueryOp() (interface{}, error) { + if w == nil || w.Op == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + name := fmt.Sprintf("projects/%s/locations/%s/operations/%s", + w.Project, w.Location, w.Op.Name) + + var op *container.Operation + select { + case <-w.Context.Done(): + log.Println("[WARN] request has been cancelled early") + return op, errors.New("unable to finish polling, context has been cancelled") + default: + // default must be here to keep the previous case from blocking + } + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (opErr error) { + opGetCall := w.Service.Projects.Locations.Operations.Get(name) + if w.UserProjectOverride { + opGetCall.Header().Add("X-Goog-User-Project", w.Project) + } + op, opErr = opGetCall.Do() + return opErr + }, + Timeout: transport_tpg.DefaultRequestTimeout, + }) + + return op, err +} + +func (w *ContainerOperationWaiter) OpName() string { + if w == nil || w.Op == nil { + return "" + } + return w.Op.Name +} + +func (w *ContainerOperationWaiter) PendingStates() []string { + return []string{"PENDING", "RUNNING"} +} + +func (w *ContainerOperationWaiter) TargetStates() []string { + return []string{"DONE"} +} + +func ContainerOperationWait(config *transport_tpg.Config, op *container.Operation, project, location, activity, userAgent string, timeout time.Duration) error { + w := &ContainerOperationWaiter{ + Service: config.NewContainerClient(userAgent), + Context: config.Context, + Op: op, + Project: project, + Location: location, + UserProjectOverride: config.UserProjectOverride, + } + + if err := w.SetOp(op); err != nil { + return err + } + + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/services/containerattached/container_attached_operation.go b/mmv1/third_party/terraform/services/containerattached/container_attached_operation.go new file mode 100644 index 000000000000..1e2600f267cf --- /dev/null +++ b/mmv1/third_party/terraform/services/containerattached/container_attached_operation.go @@ -0,0 +1,73 @@ +package containerattached + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type ContainerAttachedOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + tpgresource.CommonOperationWaiter +} + +func (w *ContainerAttachedOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + + region := tpgresource.GetRegionFromRegionalSelfLink(w.CommonOperationWaiter.Op.Name) + + // Returns the proper get. + url := fmt.Sprintf("https://%s-gkemulticloud.googleapis.com/v1/%s", region, w.CommonOperationWaiter.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createContainerAttachedWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*ContainerAttachedOperationWaiter, error) { + w := &ContainerAttachedOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func ContainerAttachedOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createContainerAttachedWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func ContainerAttachedOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createContainerAttachedWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/services/datalossprevention/resource_data_loss_prevention_stored_info_type_internal_test.go b/mmv1/third_party/terraform/services/datalossprevention/resource_data_loss_prevention_stored_info_type_internal_test.go new file mode 100644 index 000000000000..812b7973f0d3 --- /dev/null +++ b/mmv1/third_party/terraform/services/datalossprevention/resource_data_loss_prevention_stored_info_type_internal_test.go @@ -0,0 +1,210 @@ +package datalossprevention + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeCustomDiffFuncForceNew(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + before map[string]interface{} + after map[string]interface{} + forcenew bool + }{ + "updating_dictionary": { + before: map[string]interface{}{ + "dictionary": map[string]interface{}{ + "word_list": map[string]interface{}{ + "word": []string{"word", "word2"}, + }, + }, + }, + after: map[string]interface{}{ + "dictionary": map[string]interface{}{ + "word_list": map[string]interface{}{ + "word": []string{"wordnew", "word2"}, + }, + }, + }, + forcenew: false, + }, + "updating_large_custom_dictionary": { + before: map[string]interface{}{ + "large_custom_dictionary": map[string]interface{}{ + "output_path": map[string]interface{}{ + "path": "gs://sample-dlp-bucket/something.json", + }, + }, + }, + after: map[string]interface{}{ + "large_custom_dictionary": map[string]interface{}{ + "output_path": map[string]interface{}{ + "path": "gs://sample-dlp-bucket/somethingnew.json", + }, + }, + }, + forcenew: false, + }, + "updating_regex": { + before: map[string]interface{}{ + "regex": map[string]interface{}{ + "pattern": "patient", + }, + }, + after: map[string]interface{}{ + "regex": map[string]interface{}{ + "pattern": "newpatient", + }, + }, + forcenew: false, + }, + "changing_from_dictionary_to_large_custom_dictionary": { + before: map[string]interface{}{ + "dictionary": map[string]interface{}{ + "word_list": map[string]interface{}{ + "word": []string{"word", "word2"}, + }, + }, + }, + after: map[string]interface{}{ + "large_custom_dictionary": map[string]interface{}{ + "output_path": map[string]interface{}{ + "path": "gs://sample-dlp-bucket/something.json", + }, + }, + }, + forcenew: true, + }, + "changing_from_dictionary_to_regex": { + before: map[string]interface{}{ + "dictionary": map[string]interface{}{ + "word_list": map[string]interface{}{ + "word": []string{"word", "word2"}, + }, + }, + }, + after: map[string]interface{}{ + "regex": map[string]interface{}{ + "pattern": "patient", + }, + }, + forcenew: true, + }, + "changing_from_large_custom_dictionary_to_regex": { + before: map[string]interface{}{ + "large_custom_dictionary": map[string]interface{}{ + "output_path": map[string]interface{}{ + "path": "gs://sample-dlp-bucket/something.json", + }, + }, + }, + after: map[string]interface{}{ + "regex": map[string]interface{}{ + "pattern": "patient", + }, + }, + forcenew: true, + }, + "changing_from_large_custom_dictionary_to_dictionary": { + before: map[string]interface{}{ + "large_custom_dictionary": map[string]interface{}{ + "output_path": map[string]interface{}{ + "path": "gs://sample-dlp-bucket/something.json", + }, + }, + }, + after: map[string]interface{}{ + "dictionary": map[string]interface{}{ + "word_list": map[string]interface{}{ + "word": []string{"word", "word2"}, + }, + }, + }, + forcenew: true, + }, + "changing_from_regex_to_dictionary": { + before: map[string]interface{}{ + "regex": map[string]interface{}{ + "pattern": "patient", + }, + }, + after: map[string]interface{}{ + "dictionary": map[string]interface{}{ + "word_list": map[string]interface{}{ + "word": []string{"word", "word2"}, + }, + }, + }, + forcenew: true, + }, + "changing_from_regex_to_large_custom_dictionary": { + before: map[string]interface{}{ + "regex": map[string]interface{}{ + "pattern": "patient", + }, + }, + after: map[string]interface{}{ + "large_custom_dictionary": map[string]interface{}{ + "output_path": map[string]interface{}{ + "path": "gs://sample-dlp-bucket/something.json", + }, + }, + }, + forcenew: true, + }, + } + + for tn, tc := range cases { + + fieldBefore := "" + fieldAfter := "" + switch tn { + case "updating_dictionary": + fieldBefore = "dictionary" + fieldAfter = fieldBefore + case "updating_large_custom_dictionary": + fieldBefore = "large_custom_dictionary" + fieldAfter = fieldBefore + case "updating_regex": + fieldBefore = "regex" + fieldAfter = fieldBefore + case "changing_from_dictionary_to_large_custom_dictionary": + fieldBefore = "dictionary" + fieldAfter = "large_custom_dictionary" + case "changing_from_dictionary_to_regex": + fieldBefore = "dictionary" + fieldAfter = "regex" + case "changing_from_large_custom_dictionary_to_regex": + fieldBefore = "large_custom_dictionary" + fieldAfter = "regex" + case "changing_from_large_custom_dictionary_to_dictionary": + fieldBefore = "large_custom_dictionary" + fieldAfter = "dictionary" + case "changing_from_regex_to_dictionary": + fieldBefore = "regex" + fieldAfter = "dictionary" + case "changing_from_regex_to_large_custom_dictionary": + fieldBefore = "regex" + fieldAfter = "large_custom_dictionary" + } + + d := &tpgresource.ResourceDiffMock{ + Before: map[string]interface{}{ + fieldBefore: tc.before[fieldBefore], + }, + After: map[string]interface{}{ + fieldAfter: tc.after[fieldAfter], + }, + } + err := storedInfoTypeCustomizeDiffFunc(d) + if err != nil { + t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) + } + if d.IsForceNew != tc.forcenew { + t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v", tn, tc.forcenew, d.IsForceNew) + } + } +} diff --git a/mmv1/third_party/terraform/utils/dataproc_cluster_operation.go.erb b/mmv1/third_party/terraform/services/dataproc/dataproc_cluster_operation.go similarity index 89% rename from mmv1/third_party/terraform/utils/dataproc_cluster_operation.go.erb rename to mmv1/third_party/terraform/services/dataproc/dataproc_cluster_operation.go index 708112e4e5c1..f34234bf8a86 100644 --- a/mmv1/third_party/terraform/utils/dataproc_cluster_operation.go.erb +++ b/mmv1/third_party/terraform/services/dataproc/dataproc_cluster_operation.go @@ -1,5 +1,4 @@ -<% autogen_exception -%> -package google +package dataproc import ( "fmt" @@ -23,7 +22,7 @@ func (w *DataprocClusterOperationWaiter) QueryOp() (interface{}, error) { return w.Service.Projects.Regions.Operations.Get(w.Op.Name).Do() } -func dataprocClusterOperationWait(config *transport_tpg.Config, op *dataproc.Operation, activity, userAgent string, timeout time.Duration) error { +func DataprocClusterOperationWait(config *transport_tpg.Config, op *dataproc.Operation, activity, userAgent string, timeout time.Duration) error { w := &DataprocClusterOperationWaiter{ Service: config.NewDataprocClient(userAgent), } diff --git a/mmv1/third_party/terraform/utils/dataproc_job_operation.go.erb b/mmv1/third_party/terraform/services/dataproc/dataproc_job_operation.go similarity index 94% rename from mmv1/third_party/terraform/utils/dataproc_job_operation.go.erb rename to mmv1/third_party/terraform/services/dataproc/dataproc_job_operation.go index 99f43c61723c..f558842b82c7 100644 --- a/mmv1/third_party/terraform/utils/dataproc_job_operation.go.erb +++ b/mmv1/third_party/terraform/services/dataproc/dataproc_job_operation.go @@ -1,5 +1,4 @@ -<% autogen_exception -%> -package google +package dataproc import ( "fmt" @@ -70,7 +69,7 @@ func (w *DataprocJobOperationWaiter) TargetStates() []string { return []string{"CANCELLED", "DONE", "ATTEMPT_FAILURE", "ERROR", "RUNNING"} } -func dataprocJobOperationWait(config *transport_tpg.Config, region, projectId, jobId, activity, userAgent string, timeout time.Duration) error { +func DataprocJobOperationWait(config *transport_tpg.Config, region, projectId, jobId, activity, userAgent string, timeout time.Duration) error { w := &DataprocJobOperationWaiter{ Service: config.NewDataprocClient(userAgent), Region: region, @@ -108,7 +107,7 @@ func (w *DataprocDeleteJobOperationWaiter) QueryOp() (interface{}, error) { return job, err } -func dataprocDeleteOperationWait(config *transport_tpg.Config, region, projectId, jobId, activity, userAgent string, timeout time.Duration) error { +func DataprocDeleteOperationWait(config *transport_tpg.Config, region, projectId, jobId, activity, userAgent string, timeout time.Duration) error { w := &DataprocDeleteJobOperationWaiter{ DataprocJobOperationWaiter{ Service: config.NewDataprocClient(userAgent), diff --git a/mmv1/third_party/terraform/data_sources/data_source_dataproc_metastore_service.go.erb b/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service.go.erb similarity index 97% rename from mmv1/third_party/terraform/data_sources/data_source_dataproc_metastore_service.go.erb rename to mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service.go.erb index fe30ec6fd773..6a351893759d 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_dataproc_metastore_service.go.erb +++ b/mmv1/third_party/terraform/services/dataprocmetastore/data_source_dataproc_metastore_service.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package dataprocmetastore import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/dataproc_metastore_service_diff_supress.go.erb b/mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.erb similarity index 95% rename from mmv1/third_party/terraform/utils/dataproc_metastore_service_diff_supress.go.erb rename to mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.erb index 7e14be9c527a..b651ac96d016 100644 --- a/mmv1/third_party/terraform/utils/dataproc_metastore_service_diff_supress.go.erb +++ b/mmv1/third_party/terraform/services/dataprocmetastore/dataproc_metastore_service_diff_supress.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package dataprocmetastore import ( "strings" diff --git a/mmv1/third_party/terraform/services/datastream/datastream_operation.go b/mmv1/third_party/terraform/services/datastream/datastream_operation.go new file mode 100644 index 000000000000..1a5a1933e6ad --- /dev/null +++ b/mmv1/third_party/terraform/services/datastream/datastream_operation.go @@ -0,0 +1,139 @@ +package datastream + +import ( + "bytes" + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + datastream "google.golang.org/api/datastream/v1" +) + +type DatastreamOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + Op datastream.Operation + tpgresource.CommonOperationWaiter +} + +func (w *DatastreamOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + url := fmt.Sprintf("%s%s", w.Config.DatastreamBasePath, w.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func (w *DatastreamOperationWaiter) Error() error { + if w != nil && w.Op.Error != nil { + return &DatastreamOperationError{Op: w.Op} + } + return nil +} + +func (w *DatastreamOperationWaiter) SetOp(op interface{}) error { + w.CommonOperationWaiter.SetOp(op) + if err := tpgresource.Convert(op, &w.Op); err != nil { + return err + } + return nil +} + +func createDatastreamWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*DatastreamOperationWaiter, error) { + w := &DatastreamOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func DatastreamOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createDatastreamWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.Op.Response), response) +} + +func DatastreamOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createDatastreamWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} + +// DatastreamOperationError wraps datastream.Status and implements the +// error interface so it can be returned. +type DatastreamOperationError struct { + Op datastream.Operation +} + +func (e DatastreamOperationError) Error() string { + var buf bytes.Buffer + + for _, err := range e.Op.Error.Details { + buf.Write(err) + buf.WriteString("\n") + } + if validations := e.extractFailedValidationResult(); validations != nil { + buf.Write(validations) + buf.WriteString("\n") + } + + return buf.String() +} + +// extractFailedValidationResult extracts the internal failed validations +// if there are any. +func (e DatastreamOperationError) extractFailedValidationResult() []byte { + var metadata datastream.OperationMetadata + data, err := e.Op.Metadata.MarshalJSON() + if err != nil { + return nil + } + err = json.Unmarshal(data, &metadata) + if err != nil { + return nil + } + if metadata.ValidationResult == nil { + return nil + } + var res []byte + for _, v := range metadata.ValidationResult.Validations { + if v.State == "FAILED" { + data, err := v.MarshalJSON() + if err != nil { + return nil + } + res = append(res, data...) + res = append(res, []byte("\n")...) + } + } + return res +} diff --git a/mmv1/third_party/terraform/services/datastream/resource_datastream_stream_internal_test.go b/mmv1/third_party/terraform/services/datastream/resource_datastream_stream_internal_test.go new file mode 100644 index 000000000000..4a525485f834 --- /dev/null +++ b/mmv1/third_party/terraform/services/datastream/resource_datastream_stream_internal_test.go @@ -0,0 +1,134 @@ +package datastream + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestDatastreamStreamCustomDiff(t *testing.T) { + t.Parallel() + + cases := []struct { + isNew bool + old string + new string + wantError bool + }{ + { + isNew: true, + new: "NOT_STARTED", + wantError: false, + }, + { + isNew: true, + new: "RUNNING", + wantError: false, + }, + { + isNew: true, + new: "PAUSED", + wantError: true, + }, + { + isNew: true, + new: "MAINTENANCE", + wantError: true, + }, + { + // Normally this transition is okay, but if the resource is "new" + // (for example being recreated) it's not. + isNew: true, + old: "RUNNING", + new: "PAUSED", + wantError: true, + }, + { + old: "NOT_STARTED", + new: "RUNNING", + wantError: false, + }, + { + old: "NOT_STARTED", + new: "MAINTENANCE", + wantError: true, + }, + { + old: "NOT_STARTED", + new: "PAUSED", + wantError: true, + }, + { + old: "NOT_STARTED", + new: "NOT_STARTED", + wantError: false, + }, + { + old: "RUNNING", + new: "PAUSED", + wantError: false, + }, + { + old: "RUNNING", + new: "NOT_STARTED", + wantError: true, + }, + { + old: "RUNNING", + new: "RUNNING", + wantError: false, + }, + { + old: "RUNNING", + new: "MAINTENANCE", + wantError: true, + }, + { + old: "PAUSED", + new: "PAUSED", + wantError: false, + }, + { + old: "PAUSED", + new: "NOT_STARTED", + wantError: true, + }, + { + old: "PAUSED", + new: "RUNNING", + wantError: false, + }, + { + old: "PAUSED", + new: "MAINTENANCE", + wantError: true, + }, + } + for _, tc := range cases { + name := "whatever" + tn := fmt.Sprintf("%s => %s", tc.old, tc.new) + if tc.isNew { + name = "" + tn = fmt.Sprintf("(new) %s => %s", tc.old, tc.new) + } + t.Run(tn, func(t *testing.T) { + diff := &tpgresource.ResourceDiffMock{ + Before: map[string]interface{}{ + "desired_state": tc.old, + }, + After: map[string]interface{}{ + "name": name, + "desired_state": tc.new, + }, + } + err := resourceDatastreamStreamCustomDiffFunc(diff) + if tc.wantError && err == nil { + t.Fatalf("want error, got nil") + } + if !tc.wantError && err != nil { + t.Fatalf("got unexpected error: %v", err) + } + }) + } +} diff --git a/mmv1/third_party/terraform/services/deploymentmanager/deployment_manager_operation.go.erb b/mmv1/third_party/terraform/services/deploymentmanager/deployment_manager_operation.go.erb new file mode 100644 index 000000000000..b6f1330ec557 --- /dev/null +++ b/mmv1/third_party/terraform/services/deploymentmanager/deployment_manager_operation.go.erb @@ -0,0 +1,105 @@ +<% autogen_exception -%> +package deploymentmanager + +import ( + "bytes" + "fmt" + "time" + + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + +<% if version == "ga" -%> + "google.golang.org/api/compute/v1" +<% else -%> + compute "google.golang.org/api/compute/v0.beta" +<% end -%> +) + +type DeploymentManagerOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + OperationUrl string + tpgcompute.ComputeOperationWaiter +} + +func (w *DeploymentManagerOperationWaiter) IsRetryable(error) bool { + return false +} + +func (w *DeploymentManagerOperationWaiter) QueryOp() (interface{}, error) { + if w == nil || w.Op == nil || w.Op.SelfLink == "" { + return nil, fmt.Errorf("cannot query unset/nil operation") + } + + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: w.Op.SelfLink, + UserAgent: w.UserAgent, + }) + if err != nil { + return nil, err + } + op := &compute.Operation{} + if err := tpgresource.Convert(resp, op); err != nil { + return nil, fmt.Errorf("could not convert response to operation: %v", err) + } + return op, nil +} + + +func DeploymentManagerOperationWaitTime(config *transport_tpg.Config, resp interface{}, project, activity, userAgent string, timeout time.Duration) error { + op := &compute.Operation{} + err := tpgresource.Convert(resp, op) + if err != nil { + return err + } + + w := &DeploymentManagerOperationWaiter{ + Config: config, + UserAgent: userAgent, + OperationUrl: op.SelfLink, + ComputeOperationWaiter: tpgcompute.ComputeOperationWaiter{ + Project: project, + }, + } + if err := w.SetOp(op); err != nil { + return err + } + + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} + +func (w *DeploymentManagerOperationWaiter) Error() error { + if w != nil && w.Op != nil && w.Op.Error != nil { + return DeploymentManagerOperationError{ + HTTPStatusCode: w.Op.HttpErrorStatusCode, + HTTPMessage: w.Op.HttpErrorMessage, + OperationError: *w.Op.Error, + } + } + return nil +} + +// DeploymentManagerOperationError wraps information from the compute.Operation +// in an implementation of Error. +type DeploymentManagerOperationError struct { + HTTPStatusCode int64 + HTTPMessage string + compute.OperationError +} + +func (e DeploymentManagerOperationError) Error() string { + var buf bytes.Buffer + buf.WriteString("Deployment Manager returned errors for this operation, likely due to invalid configuration.") + buf.WriteString(fmt.Sprintf("Operation failed with HTTP error %d: %s.", e.HTTPStatusCode, e.HTTPMessage)) + buf.WriteString("Errors returned: \n") + for _, err := range e.Errors { + buf.WriteString(err.Message + "\n") + } + return buf.String() +} diff --git a/mmv1/third_party/terraform/services/dialogflowcx/dialogflow_cx_operation.go b/mmv1/third_party/terraform/services/dialogflowcx/dialogflow_cx_operation.go new file mode 100644 index 000000000000..2198891aef13 --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflowcx/dialogflow_cx_operation.go @@ -0,0 +1,69 @@ +package dialogflowcx + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type DialogflowCXOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + tpgresource.CommonOperationWaiter + Location string +} + +func (w *DialogflowCXOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + url := fmt.Sprintf("https://%s-dialogflow.googleapis.com/v3/%s", w.Location, w.CommonOperationWaiter.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createDialogflowCXWaiter(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent, location string) (*DialogflowCXOperationWaiter, error) { + w := &DialogflowCXOperationWaiter{ + Config: config, + UserAgent: userAgent, + Location: location, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func DialogflowCXOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, activity, userAgent, location string, timeout time.Duration) error { + w, err := createDialogflowCXWaiter(config, op, activity, userAgent, location) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func DialogflowCXOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent, location string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createDialogflowCXWaiter(config, op, activity, userAgent, location) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/utils/dns_change.go b/mmv1/third_party/terraform/services/dns/dns_change.go similarity index 98% rename from mmv1/third_party/terraform/utils/dns_change.go rename to mmv1/third_party/terraform/services/dns/dns_change.go index 9afcf21bc4e3..5dff00b3a8b4 100644 --- a/mmv1/third_party/terraform/utils/dns_change.go +++ b/mmv1/third_party/terraform/services/dns/dns_change.go @@ -1,4 +1,4 @@ -package google +package dns import ( "time" diff --git a/mmv1/third_party/terraform/resources/resource_dns_record_set.go b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go similarity index 99% rename from mmv1/third_party/terraform/resources/resource_dns_record_set.go rename to mmv1/third_party/terraform/services/dns/resource_dns_record_set.go index 630625cb85a9..5ce73f120c69 100644 --- a/mmv1/third_party/terraform/resources/resource_dns_record_set.go +++ b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go @@ -1,4 +1,4 @@ -package google +package dns import ( "fmt" @@ -398,11 +398,13 @@ func resourceDnsRecordSetRead(d *schema.ResourceData, meta interface{}) error { dnsType := d.Get("type").(string) var resp *dns.ResourceRecordSetsListResponse - err = transport_tpg.Retry(func() error { - var reqErr error - resp, reqErr = config.NewDnsClient(userAgent).ResourceRecordSets.List( - project, zone).Name(name).Type(dnsType).Do() - return reqErr + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + resp, reqErr = config.NewDnsClient(userAgent).ResourceRecordSets.List( + project, zone).Name(name).Type(dnsType).Do() + return reqErr + }, }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("DNS Record Set %q", d.Get("name").(string))) diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_record_set_internal_test.go b/mmv1/third_party/terraform/services/dns/resource_dns_record_set_internal_test.go new file mode 100644 index 000000000000..de77b5c42636 --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/resource_dns_record_set_internal_test.go @@ -0,0 +1,23 @@ +package dns + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateRecordNameTrailingDot(t *testing.T) { + cases := []verify.StringValidationTestCase{ + // No errors + {TestName: "trailing dot", Value: "test-record.hashicorptest.com."}, + + // With errors + {TestName: "empty string", Value: "", ExpectError: true}, + {TestName: "no trailing dot", Value: "test-record.hashicorptest.com", ExpectError: true}, + } + + es := verify.TestStringValidationCases(cases, validateRecordNameTrailingDot) + if len(es) > 0 { + t.Errorf("Failed to validate DNS Record name with value: %v", es) + } +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_android_app.go.erb b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app.go.erb similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_firebase_android_app.go.erb rename to mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app.go.erb index 46911e7c9f46..e9167e5d8d3e 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_android_app.go.erb +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package firebase <% unless version == 'ga' -%> import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_apple_app.go.erb b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app.go.erb similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_firebase_apple_app.go.erb rename to mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app.go.erb index b5e5b025ed29..9d2351c22011 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_apple_app.go.erb +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package firebase <% unless version == 'ga' -%> import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_web_app.go.erb b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app.go.erb similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_firebase_web_app.go.erb rename to mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app.go.erb index c2808adf7a02..d6da3622e83c 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_web_app.go.erb +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package firebase <% unless version == 'ga' -%> import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_hosting_channel.go.erb b/mmv1/third_party/terraform/services/firebasehosting/data_source_google_firebase_hosting_channel.go.erb similarity index 97% rename from mmv1/third_party/terraform/data_sources/data_source_google_firebase_hosting_channel.go.erb rename to mmv1/third_party/terraform/services/firebasehosting/data_source_google_firebase_hosting_channel.go.erb index 3d3402c0bca8..c8cd9e60a3a1 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_firebase_hosting_channel.go.erb +++ b/mmv1/third_party/terraform/services/firebasehosting/data_source_google_firebase_hosting_channel.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package firebasehosting <% unless version == 'ga' -%> import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_game_services_game_server_deployment_rollout.go b/mmv1/third_party/terraform/services/gameservices/data_source_google_game_services_game_server_deployment_rollout.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_game_services_game_server_deployment_rollout.go rename to mmv1/third_party/terraform/services/gameservices/data_source_google_game_services_game_server_deployment_rollout.go index 5c17315b22d0..ab8d406cafdb 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_game_services_game_server_deployment_rollout.go +++ b/mmv1/third_party/terraform/services/gameservices/data_source_google_game_services_game_server_deployment_rollout.go @@ -1,4 +1,4 @@ -package google +package gameservices import ( "fmt" diff --git a/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.erb b/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.erb new file mode 100644 index 000000000000..6f5bae0643af --- /dev/null +++ b/mmv1/third_party/terraform/services/gkeonprem/gkeonprem_operation.go.erb @@ -0,0 +1,147 @@ +<% autogen_exception -%> +package gkeonprem +<% unless version == 'ga' -%> +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + cloudresourcemanager "google.golang.org/api/cloudresourcemanager/v1" +) + +type gkeonpremOpError struct { + *cloudresourcemanager.Status +} + +func (e gkeonpremOpError) Error() string { + var validationCheck map[string]interface{} + + for _, msg := range e.Details { + detail := make(map[string]interface{}) + if err := json.Unmarshal(msg, &detail); err != nil { + continue + } + + if _, ok := detail["validationCheck"]; ok { + delete(detail, "@type") + validationCheck = detail + } + } + + if validationCheck != nil { + bytes, err := json.MarshalIndent(validationCheck, "", " ") + if err != nil { + return fmt.Sprintf("Error code %v message: %s validation check: %s", e.Code, e.Message, validationCheck) + } + + return fmt.Sprintf("Error code %v message: %s\n %s", e.Code, e.Message, bytes) + } + + return fmt.Sprintf("Error code %v, message: %s", e.Code, e.Message) +} + +type gkeonpremOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + Op tpgresource.CommonOperation +} + +func (w *gkeonpremOperationWaiter) State() string { + if w == nil { + return fmt.Sprintf("Operation is nil!") + } + + return fmt.Sprintf("done: %v", w.Op.Done) +} + +func (w *gkeonpremOperationWaiter) Error() error { + if w != nil && w.Op.Error != nil { + return &gkeonpremOpError{w.Op.Error} + } + return nil +} + +func (w *gkeonpremOperationWaiter) IsRetryable(error) bool { + return false +} + +func (w *gkeonpremOperationWaiter) SetOp(op interface{}) error { + if err := tpgresource.Convert(op, &w.Op); err != nil { + return err + } + return nil +} + +func (w *gkeonpremOperationWaiter) OpName() string { + if w == nil { + return "" + } + + return w.Op.Name +} + +func (w *gkeonpremOperationWaiter) PendingStates() []string { + return []string{"done: false"} +} + +func (w *gkeonpremOperationWaiter) TargetStates() []string { + return []string{"done: true"} +} + +func (w *gkeonpremOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + url := fmt.Sprintf("%s%s", w.Config.GkeonpremBasePath, w.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func creategkeonpremWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*gkeonpremOperationWaiter, error) { + w := &gkeonpremOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func GkeonpremOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := creategkeonpremWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.Op.Response), response) +} + +func GkeonpremOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := creategkeonpremWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} +<% end -%> diff --git a/mmv1/third_party/terraform/utils/healthcare_utils.go b/mmv1/third_party/terraform/services/healthcare/healthcare_utils.go similarity index 99% rename from mmv1/third_party/terraform/utils/healthcare_utils.go rename to mmv1/third_party/terraform/services/healthcare/healthcare_utils.go index 59bc4280c9bb..d6db4f186574 100644 --- a/mmv1/third_party/terraform/utils/healthcare_utils.go +++ b/mmv1/third_party/terraform/services/healthcare/healthcare_utils.go @@ -1,4 +1,4 @@ -package google +package healthcare import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/iam_healthcare_dataset.go b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_dataset.go similarity index 99% rename from mmv1/third_party/terraform/utils/iam_healthcare_dataset.go rename to mmv1/third_party/terraform/services/healthcare/iam_healthcare_dataset.go index 07c9901a823b..370cf88f1066 100644 --- a/mmv1/third_party/terraform/utils/iam_healthcare_dataset.go +++ b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_dataset.go @@ -1,4 +1,4 @@ -package google +package healthcare import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/iam_healthcare_dicom_store.go b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_dicom_store.go similarity index 99% rename from mmv1/third_party/terraform/utils/iam_healthcare_dicom_store.go rename to mmv1/third_party/terraform/services/healthcare/iam_healthcare_dicom_store.go index 4d0d104c8768..1d104eb5c57e 100644 --- a/mmv1/third_party/terraform/utils/iam_healthcare_dicom_store.go +++ b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_dicom_store.go @@ -1,4 +1,4 @@ -package google +package healthcare import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/iam_healthcare_fhir_store.go b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_fhir_store.go similarity index 99% rename from mmv1/third_party/terraform/utils/iam_healthcare_fhir_store.go rename to mmv1/third_party/terraform/services/healthcare/iam_healthcare_fhir_store.go index 4f9275c0fc47..5f474afe7c00 100644 --- a/mmv1/third_party/terraform/utils/iam_healthcare_fhir_store.go +++ b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_fhir_store.go @@ -1,4 +1,4 @@ -package google +package healthcare import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/iam_healthcare_hl7_v2_store.go b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_hl7_v2_store.go similarity index 99% rename from mmv1/third_party/terraform/utils/iam_healthcare_hl7_v2_store.go rename to mmv1/third_party/terraform/services/healthcare/iam_healthcare_hl7_v2_store.go index b6aa0c17f95e..8de1fb7c3ffa 100644 --- a/mmv1/third_party/terraform/utils/iam_healthcare_hl7_v2_store.go +++ b/mmv1/third_party/terraform/services/healthcare/iam_healthcare_hl7_v2_store.go @@ -1,4 +1,4 @@ -package google +package healthcare import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_iam_beta_workload_identity_pool.go.erb b/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool.go.erb similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_iam_beta_workload_identity_pool.go.erb rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool.go.erb index 80f0ea51bfa0..88588781b10d 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_iam_beta_workload_identity_pool.go.erb +++ b/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package iambeta <% unless version == 'ga' -%> import ( diff --git a/mmv1/third_party/terraform/data_sources/data_source_iam_beta_workload_identity_pool_provider.go.erb b/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider.go.erb similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_iam_beta_workload_identity_pool_provider.go.erb rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider.go.erb index 2b8fef09bc39..0aef39e9cfbc 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_iam_beta_workload_identity_pool_provider.go.erb +++ b/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package iambeta <% unless version == 'ga' -%> import ( diff --git a/mmv1/third_party/terraform/data_sources/data_source_iap_client.go b/mmv1/third_party/terraform/services/iap/data_source_iap_client.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_iap_client.go rename to mmv1/third_party/terraform/services/iap/data_source_iap_client.go index bbf44b48a390..6cd3a863188d 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_iap_client.go +++ b/mmv1/third_party/terraform/services/iap/data_source_iap_client.go @@ -1,4 +1,4 @@ -package google +package iap import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_kms_crypto_key.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_crypto_key.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_kms_crypto_key.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_crypto_key.go index 1ca584aa9025..84c124eba250 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_kms_crypto_key.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_crypto_key.go @@ -1,4 +1,4 @@ -package google +package kms import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_kms_crypto_key_version.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_crypto_key_version.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_kms_crypto_key_version.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_crypto_key_version.go index 2f4776502c0a..eb7253f2c674 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_kms_crypto_key_version.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_crypto_key_version.go @@ -1,4 +1,4 @@ -package google +package kms import ( "fmt" @@ -158,7 +158,7 @@ func flattenKmsCryptoKeyVersionVersion(v interface{}, d *schema.ResourceData) in parts := strings.Split(v.(string), "/") version := parts[len(parts)-1] // Handles the string fixed64 format - if intVal, err := StringToFixed64(version); err == nil { + if intVal, err := tpgresource.StringToFixed64(version); err == nil { return intVal } // let terraform core handle it if we can't convert the string to an int. return v diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_kms_key_ring.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_ring.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_google_kms_key_ring.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_key_ring.go index 041de5f5e53c..ae1bc4f73199 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_kms_key_ring.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_key_ring.go @@ -1,4 +1,4 @@ -package google +package kms import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_kms_secret.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_secret.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_kms_secret.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_secret.go index 7cef5162f4a4..91f0eaea82e5 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_kms_secret.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_secret.go @@ -1,4 +1,4 @@ -package google +package kms import ( "google.golang.org/api/cloudkms/v1" diff --git a/mmv1/third_party/terraform/data_sources/data_source_google_kms_secret_ciphertext.go b/mmv1/third_party/terraform/services/kms/data_source_google_kms_secret_ciphertext.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_google_kms_secret_ciphertext.go rename to mmv1/third_party/terraform/services/kms/data_source_google_kms_secret_ciphertext.go index 62cf8092cbd0..1fa80acf1dc5 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_google_kms_secret_ciphertext.go +++ b/mmv1/third_party/terraform/services/kms/data_source_google_kms_secret_ciphertext.go @@ -1,4 +1,4 @@ -package google +package kms import ( "google.golang.org/api/cloudkms/v1" diff --git a/mmv1/third_party/terraform/utils/iam_kms_crypto_key.go.erb b/mmv1/third_party/terraform/services/kms/iam_kms_crypto_key.go.erb similarity index 99% rename from mmv1/third_party/terraform/utils/iam_kms_crypto_key.go.erb rename to mmv1/third_party/terraform/services/kms/iam_kms_crypto_key.go.erb index 3b1f15edcb00..e558699b4676 100644 --- a/mmv1/third_party/terraform/utils/iam_kms_crypto_key.go.erb +++ b/mmv1/third_party/terraform/services/kms/iam_kms_crypto_key.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package kms import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/iam_kms_key_ring.go.erb b/mmv1/third_party/terraform/services/kms/iam_kms_key_ring.go.erb similarity index 99% rename from mmv1/third_party/terraform/utils/iam_kms_key_ring.go.erb rename to mmv1/third_party/terraform/services/kms/iam_kms_key_ring.go.erb index 1a27043b8548..3c40406945cc 100644 --- a/mmv1/third_party/terraform/utils/iam_kms_key_ring.go.erb +++ b/mmv1/third_party/terraform/services/kms/iam_kms_key_ring.go.erb @@ -1,5 +1,5 @@ <% autogen_exception -%> -package google +package kms import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/kms_utils.go b/mmv1/third_party/terraform/services/kms/kms_utils.go similarity index 99% rename from mmv1/third_party/terraform/utils/kms_utils.go rename to mmv1/third_party/terraform/services/kms/kms_utils.go index f73434898ba1..23eb12e90102 100644 --- a/mmv1/third_party/terraform/utils/kms_utils.go +++ b/mmv1/third_party/terraform/services/kms/kms_utils.go @@ -1,4 +1,4 @@ -package google +package kms import ( "fmt" diff --git a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_internal_test.go b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_internal_test.go new file mode 100644 index 000000000000..ff1d7cbeea50 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_internal_test.go @@ -0,0 +1,41 @@ +package kms + +import ( + "testing" + "time" +) + +func TestCryptoKeyNextRotationCalculation(t *testing.T) { + t.Parallel() + + now := time.Now().UTC() + period, _ := time.ParseDuration("1000000s") + + expected := now.Add(period).Format(time.RFC3339Nano) + + timestamp, err := kmsCryptoKeyNextRotation(now, "1000000s") + + if err != nil { + t.Fatalf("unexpected failure parsing time %s and duration 1000s: %s", now, err.Error()) + } + + if expected != timestamp { + t.Fatalf("expected %s to equal %s", timestamp, expected) + } +} + +func TestCryptoKeyNextRotationCalculation_validation(t *testing.T) { + t.Parallel() + + _, errs := validateKmsCryptoKeyRotationPeriod("86399s", "rotation_period") + + if len(errs) == 0 { + t.Fatalf("Periods of less than a day should be invalid") + } + + _, errs = validateKmsCryptoKeyRotationPeriod("100000.0000000001s", "rotation_period") + + if len(errs) == 0 { + t.Fatalf("Numbers with more than 9 fractional digits are invalid") + } +} diff --git a/mmv1/third_party/terraform/services/kms/resource_kms_key_ring_internal_test.go b/mmv1/third_party/terraform/services/kms/resource_kms_key_ring_internal_test.go new file mode 100644 index 000000000000..8c6fa183eff1 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/resource_kms_key_ring_internal_test.go @@ -0,0 +1,69 @@ +package kms + +import ( + "testing" + + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestKeyRingIdParsing(t *testing.T) { + cases := map[string]struct { + ImportId string + ExpectedError bool + ExpectedTerraformId string + ExpectedKeyRingId string + Config *transport_tpg.Config + }{ + "id is in project/location/keyRingName format": { + ImportId: "test-project/us-central1/test-key-ring", + ExpectedError: false, + ExpectedTerraformId: "test-project/us-central1/test-key-ring", + ExpectedKeyRingId: "projects/test-project/locations/us-central1/keyRings/test-key-ring", + }, + "id is in domain:project/location/keyRingName format": { + ImportId: "example.com:test-project/us-central1/test-key-ring", + ExpectedError: false, + ExpectedTerraformId: "example.com:test-project/us-central1/test-key-ring", + ExpectedKeyRingId: "projects/example.com:test-project/locations/us-central1/keyRings/test-key-ring", + }, + "id contains name that is longer than 63 characters": { + ImportId: "test-project/us-central1/can-you-believe-that-this-key-ring-name-is-exactly-64-characters", + ExpectedError: true, + }, + "id is in location/keyRingName format": { + ImportId: "us-central1/test-key-ring", + ExpectedError: false, + ExpectedTerraformId: "test-project/us-central1/test-key-ring", + ExpectedKeyRingId: "projects/test-project/locations/us-central1/keyRings/test-key-ring", + Config: &transport_tpg.Config{Project: "test-project"}, + }, + "id is in location/keyRingName format without project in config": { + ImportId: "us-central1/test-key-ring", + ExpectedError: true, + Config: &transport_tpg.Config{Project: ""}, + }, + } + + for tn, tc := range cases { + keyRingId, err := parseKmsKeyRingId(tc.ImportId, tc.Config) + + if tc.ExpectedError && err == nil { + t.Fatalf("bad: %s, expected an error", tn) + } + + if err != nil { + if tc.ExpectedError { + continue + } + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + if keyRingId.TerraformId() != tc.ExpectedTerraformId { + t.Fatalf("bad: %s, expected Terraform ID to be `%s` but is `%s`", tn, tc.ExpectedTerraformId, keyRingId.TerraformId()) + } + + if keyRingId.KeyRingId() != tc.ExpectedKeyRingId { + t.Fatalf("bad: %s, expected KeyRing ID to be `%s` but is `%s`", tn, tc.ExpectedKeyRingId, keyRingId.KeyRingId()) + } + } +} diff --git a/mmv1/third_party/terraform/services/logging/extract.go b/mmv1/third_party/terraform/services/logging/extract.go new file mode 100644 index 000000000000..5ac2bd1f8fc1 --- /dev/null +++ b/mmv1/third_party/terraform/services/logging/extract.go @@ -0,0 +1,35 @@ +package logging + +import ( + "fmt" + "regexp" +) + +// ExtractFieldByPattern returns the value of a field extracted from a parent field according to the given regular expression pattern. +// An error is returned if the field already has a value different than the value extracted. +func ExtractFieldByPattern(fieldName, fieldValue, parentFieldValue, pattern string) (string, error) { + var extractedValue string + // Fetch value from container if the container exists. + if parentFieldValue != "" { + r := regexp.MustCompile(pattern) + m := r.FindStringSubmatch(parentFieldValue) + if m != nil && len(m) >= 2 { + extractedValue = m[1] + } else if fieldValue == "" { + // The pattern didn't match and the value doesn't exist. + return "", fmt.Errorf("parent of %q has no matching values from pattern %q in value %q", fieldName, pattern, parentFieldValue) + } + } + + // If both values exist and are different, error + if fieldValue != "" && extractedValue != "" && fieldValue != extractedValue { + return "", fmt.Errorf("%q has conflicting values of %q (from parent) and %q (from self)", fieldName, extractedValue, fieldValue) + } + + // If value does not exist, use the value in container. + if fieldValue == "" { + return extractedValue, nil + } + + return fieldValue, nil +} diff --git a/mmv1/third_party/terraform/utils/extract_test.go b/mmv1/third_party/terraform/services/logging/extract_test.go similarity index 98% rename from mmv1/third_party/terraform/utils/extract_test.go rename to mmv1/third_party/terraform/services/logging/extract_test.go index c5b3de3be757..e0e91aefc8dd 100644 --- a/mmv1/third_party/terraform/utils/extract_test.go +++ b/mmv1/third_party/terraform/services/logging/extract_test.go @@ -1,4 +1,4 @@ -package google +package logging import "testing" diff --git a/mmv1/third_party/terraform/data_sources/data_source_monitoring_istio_canonical_service.go b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_istio_canonical_service.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_monitoring_istio_canonical_service.go rename to mmv1/third_party/terraform/services/monitoring/data_source_monitoring_istio_canonical_service.go index 82c22000c301..08cbdbeb1602 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_monitoring_istio_canonical_service.go +++ b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_istio_canonical_service.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_monitoring_notification_channel.go b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_notification_channel.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_monitoring_notification_channel.go rename to mmv1/third_party/terraform/services/monitoring/data_source_monitoring_notification_channel.go index 4e178fea312b..e745e982d3e9 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_monitoring_notification_channel.go +++ b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_notification_channel.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service.go b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_monitoring_service.go rename to mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service.go index 392f5649feaa..0133a31743b5 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service.go +++ b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service_app_engine.go b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_app_engine.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_monitoring_service_app_engine.go rename to mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_app_engine.go index 09db702b6f68..5ac55bc4be93 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service_app_engine.go +++ b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_app_engine.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service_cluster_istio.go b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_cluster_istio.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_monitoring_service_cluster_istio.go rename to mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_cluster_istio.go index b7f0186272ba..aba78a255db6 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service_cluster_istio.go +++ b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_cluster_istio.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service_mesh_istio.go b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_mesh_istio.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_monitoring_service_mesh_istio.go rename to mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_mesh_istio.go index cdbe5542f947..83619eda2660 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_monitoring_service_mesh_istio.go +++ b/mmv1/third_party/terraform/services/monitoring/data_source_monitoring_service_mesh_istio.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/services/networksecurity/network_security_address_group_operation.go.erb b/mmv1/third_party/terraform/services/networksecurity/network_security_address_group_operation.go.erb new file mode 100644 index 000000000000..3c9a27568db8 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/network_security_address_group_operation.go.erb @@ -0,0 +1,16 @@ +<% autogen_exception -%> +package networksecurity +<% unless version == 'ga' -%> + +import ( + "time" + + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// NetworkSecurityAddressGroupOperationWaitTime is specific for address group resource because the only difference is that it does not need project param. +func NetworkSecurityAddressGroupOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent string, timeout time.Duration) error { + // project is not necessary for this operation. + return NetworkSecurityOperationWaitTime(config, op, "", activity, userAgent, timeout) +} +<% end -%> diff --git a/mmv1/third_party/terraform/data_sources/data_source_certificate_authority.go b/mmv1/third_party/terraform/services/privateca/data_source_certificate_authority.go similarity index 99% rename from mmv1/third_party/terraform/data_sources/data_source_certificate_authority.go rename to mmv1/third_party/terraform/services/privateca/data_source_certificate_authority.go index 7fbf61bf2ad5..1c72eda30b86 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_certificate_authority.go +++ b/mmv1/third_party/terraform/services/privateca/data_source_certificate_authority.go @@ -1,4 +1,4 @@ -package google +package privateca import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/privateca_ca_utils.go b/mmv1/third_party/terraform/services/privateca/privateca_ca_utils.go similarity index 95% rename from mmv1/third_party/terraform/utils/privateca_ca_utils.go rename to mmv1/third_party/terraform/services/privateca/privateca_ca_utils.go index d87d0957534f..e4422659e165 100644 --- a/mmv1/third_party/terraform/utils/privateca_ca_utils.go +++ b/mmv1/third_party/terraform/services/privateca/privateca_ca_utils.go @@ -1,4 +1,4 @@ -package google +package privateca import ( "fmt" @@ -262,3 +262,20 @@ func activateSubCAWithFirstPartyIssuer(config *transport_tpg.Config, d *schema.R } return nil } + +// These setters are used for tests +func (u *PrivatecaCaPoolIamUpdater) SetProject(project string) { + u.project = project +} + +func (u *PrivatecaCaPoolIamUpdater) SetLocation(location string) { + u.location = location +} + +func (u *PrivatecaCaPoolIamUpdater) SetCaPool(caPool string) { + u.caPool = caPool +} + +func (u *PrivatecaCaPoolIamUpdater) SetResourceData(d tpgresource.TerraformResourceData) { + u.d = d +} diff --git a/mmv1/third_party/terraform/utils/privateca_utils.go b/mmv1/third_party/terraform/services/privateca/privateca_utils.go similarity index 99% rename from mmv1/third_party/terraform/utils/privateca_utils.go rename to mmv1/third_party/terraform/services/privateca/privateca_utils.go index 7d30d5a76c5f..51211be49454 100644 --- a/mmv1/third_party/terraform/utils/privateca_utils.go +++ b/mmv1/third_party/terraform/services/privateca/privateca_utils.go @@ -1,4 +1,4 @@ -package google +package privateca import ( "fmt" @@ -368,7 +368,7 @@ func flattenPrivatecaCertificateConfigX509ConfigCaOptionsIsCa(v interface{}, d * func flattenPrivatecaCertificateConfigX509ConfigCaOptionsMaxIssuerPathLength(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { - if intVal, err := StringToFixed64(strVal); err == nil { + if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { return intVal } } diff --git a/mmv1/third_party/terraform/data_sources/data_source_pubsub_subscription.go b/mmv1/third_party/terraform/services/pubsub/data_source_pubsub_subscription.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_pubsub_subscription.go rename to mmv1/third_party/terraform/services/pubsub/data_source_pubsub_subscription.go index 522279d85dcc..745315ad39fd 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_pubsub_subscription.go +++ b/mmv1/third_party/terraform/services/pubsub/data_source_pubsub_subscription.go @@ -1,4 +1,4 @@ -package google +package pubsub import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_pubsub_topic.go b/mmv1/third_party/terraform/services/pubsub/data_source_pubsub_topic.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_pubsub_topic.go rename to mmv1/third_party/terraform/services/pubsub/data_source_pubsub_topic.go index 926672914008..9e0fbb909b11 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_pubsub_topic.go +++ b/mmv1/third_party/terraform/services/pubsub/data_source_pubsub_topic.go @@ -1,4 +1,4 @@ -package google +package pubsub import ( "fmt" diff --git a/mmv1/third_party/terraform/services/pubsub/pubsub_utils.go b/mmv1/third_party/terraform/services/pubsub/pubsub_utils.go new file mode 100644 index 000000000000..9ce2a91a1d4b --- /dev/null +++ b/mmv1/third_party/terraform/services/pubsub/pubsub_utils.go @@ -0,0 +1,24 @@ +package pubsub + +import ( + "fmt" + "regexp" +) + +const PubsubTopicRegex = "projects\\/.*\\/topics\\/.*" + +func GetComputedSubscriptionName(project, subscription string) string { + match, _ := regexp.MatchString("projects\\/.*\\/subscriptions\\/.*", subscription) + if match { + return subscription + } + return fmt.Sprintf("projects/%s/subscriptions/%s", project, subscription) +} + +func GetComputedTopicName(project, topic string) string { + match, _ := regexp.MatchString(PubsubTopicRegex, topic) + if match { + return topic + } + return fmt.Sprintf("projects/%s/topics/%s", project, topic) +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_redis_instance.go b/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_redis_instance.go rename to mmv1/third_party/terraform/services/redis/data_source_redis_instance.go index d6000df5e4af..b3da20ecba44 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_redis_instance.go +++ b/mmv1/third_party/terraform/services/redis/data_source_redis_instance.go @@ -1,4 +1,4 @@ -package google +package redis import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/mmv1/third_party/terraform/services/redis/resource_redis_instance_internal_test.go b/mmv1/third_party/terraform/services/redis/resource_redis_instance_internal_test.go new file mode 100644 index 000000000000..131ad772f4ee --- /dev/null +++ b/mmv1/third_party/terraform/services/redis/resource_redis_instance_internal_test.go @@ -0,0 +1,93 @@ +package redis + +import ( + "testing" +) + +func TestSecondaryIpDiffSuppress(t *testing.T) { + cases := map[string]struct { + Old, New string + ExpectDiffSuppress bool + }{ + "empty strings": { + Old: "", + New: "", + ExpectDiffSuppress: true, + }, + "auto range": { + Old: "", + New: "auto", + ExpectDiffSuppress: false, + }, + "auto on already applied range": { + Old: "10.0.0.0/28", + New: "auto", + ExpectDiffSuppress: true, + }, + "same ranges": { + Old: "10.0.0.0/28", + New: "10.0.0.0/28", + ExpectDiffSuppress: true, + }, + "different ranges": { + Old: "10.0.0.0/28", + New: "10.1.2.3/28", + ExpectDiffSuppress: false, + }, + } + + for tn, tc := range cases { + if secondaryIpDiffSuppress("whatever", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { + t.Fatalf("bad: %s, '%s' => '%s' expect %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress) + } + } +} + +func TestUnitRedisInstance_redisVersionIsDecreasing(t *testing.T) { + t.Parallel() + type testcase struct { + name string + old interface{} + new interface{} + decreasing bool + } + tcs := []testcase{ + { + name: "stays the same", + old: "REDIS_4_0", + new: "REDIS_4_0", + decreasing: false, + }, + { + name: "increases", + old: "REDIS_4_0", + new: "REDIS_5_0", + decreasing: false, + }, + { + name: "nil vals", + old: nil, + new: "REDIS_4_0", + decreasing: false, + }, + { + name: "corrupted", + old: "REDIS_4_0", + new: "REDIS_banana", + decreasing: false, + }, + { + name: "decreases", + old: "REDIS_6_0", + new: "REDIS_4_0", + decreasing: true, + }, + } + + for _, tc := range tcs { + decreasing := isRedisVersionDecreasingFunc(tc.old, tc.new) + if decreasing != tc.decreasing { + t.Errorf("%s: expected decreasing to be %v, but was %v", tc.name, tc.decreasing, decreasing) + } + } +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_secret_manager_secret.go b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret.go similarity index 97% rename from mmv1/third_party/terraform/data_sources/data_source_secret_manager_secret.go rename to mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret.go index 8b86fa7736dd..6f424b7cd691 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_secret_manager_secret.go +++ b/mmv1/third_party/terraform/services/secretmanager/data_source_secret_manager_secret.go @@ -1,4 +1,4 @@ -package google +package secretmanager import ( "fmt" diff --git a/mmv1/third_party/terraform/services/servicemanagement/serviceman_operation.go b/mmv1/third_party/terraform/services/servicemanagement/serviceman_operation.go new file mode 100644 index 000000000000..5f7a93a735c1 --- /dev/null +++ b/mmv1/third_party/terraform/services/servicemanagement/serviceman_operation.go @@ -0,0 +1,38 @@ +package servicemanagement + +import ( + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/googleapi" + "google.golang.org/api/servicemanagement/v1" +) + +type ServiceManagementOperationWaiter struct { + Service *servicemanagement.APIService + tpgresource.CommonOperationWaiter +} + +func (w *ServiceManagementOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + return w.Service.Operations.Get(w.Op.Name).Do() +} + +func ServiceManagementOperationWaitTime(config *transport_tpg.Config, op *servicemanagement.Operation, activity, userAgent string, timeout time.Duration) (googleapi.RawMessage, error) { + w := &ServiceManagementOperationWaiter{ + Service: config.NewServiceManClient(userAgent), + } + + if err := w.SetOp(op); err != nil { + return nil, err + } + + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return nil, err + } + return w.Op.Response, nil +} diff --git a/mmv1/third_party/terraform/services/servicenetworking/service_networking_operation.go b/mmv1/third_party/terraform/services/servicenetworking/service_networking_operation.go new file mode 100644 index 000000000000..516b60a79d50 --- /dev/null +++ b/mmv1/third_party/terraform/services/servicenetworking/service_networking_operation.go @@ -0,0 +1,37 @@ +package servicenetworking + +import ( + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/servicenetworking/v1" +) + +type ServiceNetworkingOperationWaiter struct { + Service *servicenetworking.APIService + Project string + UserProjectOverride bool + tpgresource.CommonOperationWaiter +} + +func (w *ServiceNetworkingOperationWaiter) QueryOp() (interface{}, error) { + opGetCall := w.Service.Operations.Get(w.Op.Name) + if w.UserProjectOverride { + opGetCall.Header().Add("X-Goog-User-Project", w.Project) + } + return opGetCall.Do() +} + +func ServiceNetworkingOperationWaitTime(config *transport_tpg.Config, op *servicenetworking.Operation, activity, userAgent, project string, timeout time.Duration) error { + w := &ServiceNetworkingOperationWaiter{ + Service: config.NewServiceNetworkingClient(userAgent), + Project: project, + UserProjectOverride: config.UserProjectOverride, + } + + if err := w.SetOp(op); err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/utils/serviceusage_operation.go b/mmv1/third_party/terraform/services/serviceusage/serviceusage_operation.go similarity index 54% rename from mmv1/third_party/terraform/utils/serviceusage_operation.go rename to mmv1/third_party/terraform/services/serviceusage/serviceusage_operation.go index d795239cc397..c569c03fe982 100644 --- a/mmv1/third_party/terraform/utils/serviceusage_operation.go +++ b/mmv1/third_party/terraform/services/serviceusage/serviceusage_operation.go @@ -1,15 +1,14 @@ -package google +package serviceusage import ( "encoding/json" "time" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "google.golang.org/api/googleapi" "google.golang.org/api/serviceusage/v1" ) -func serviceUsageOperationWait(config *transport_tpg.Config, op *serviceusage.Operation, project, activity, userAgent string, timeout time.Duration) error { +func ServiceUsageOperationWait(config *transport_tpg.Config, op *serviceusage.Operation, project, activity, userAgent string, timeout time.Duration) error { // maintained for compatibility with old code that was written before the // autogenerated waiters. b, err := op.MarshalJSON() @@ -22,18 +21,3 @@ func serviceUsageOperationWait(config *transport_tpg.Config, op *serviceusage.Op } return ServiceUsageOperationWaitTime(config, m, project, activity, userAgent, timeout) } - -func handleServiceUsageRetryableError(err error) error { - if err == nil { - return nil - } - if gerr, ok := err.(*googleapi.Error); ok { - if (gerr.Code == 400 || gerr.Code == 412) && gerr.Message == "Precondition check failed." { - return &googleapi.Error{ - Code: 503, - Message: "api returned \"precondition failed\" while enabling service", - } - } - } - return err -} diff --git a/mmv1/third_party/terraform/data_sources/data_source_sourcerepo_repository.go b/mmv1/third_party/terraform/services/sourcerepo/data_source_sourcerepo_repository.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_sourcerepo_repository.go rename to mmv1/third_party/terraform/services/sourcerepo/data_source_sourcerepo_repository.go index 64bbea41fd1c..5fa2a433c7bf 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_sourcerepo_repository.go +++ b/mmv1/third_party/terraform/services/sourcerepo/data_source_sourcerepo_repository.go @@ -1,4 +1,4 @@ -package google +package sourcerepo import ( "fmt" diff --git a/mmv1/third_party/terraform/utils/source_repo_utils.go b/mmv1/third_party/terraform/services/sourcerepo/source_repo_utils.go similarity index 71% rename from mmv1/third_party/terraform/utils/source_repo_utils.go rename to mmv1/third_party/terraform/services/sourcerepo/source_repo_utils.go index ecb2c7680930..7d779b882125 100644 --- a/mmv1/third_party/terraform/utils/source_repo_utils.go +++ b/mmv1/third_party/terraform/services/sourcerepo/source_repo_utils.go @@ -1,15 +1,16 @@ -package google +package sourcerepo import ( "regexp" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) func expandSourceRepoRepositoryPubsubConfigsTopic(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (string, error) { // short-circuit if the topic is a full uri so we don't need to GetProject - ok, err := regexp.MatchString(PubsubTopicRegex, v.(string)) + ok, err := regexp.MatchString(pubsub.PubsubTopicRegex, v.(string)) if err != nil { return "", err } @@ -23,5 +24,5 @@ func expandSourceRepoRepositoryPubsubConfigsTopic(v interface{}, d tpgresource.T return "", err } - return getComputedTopicName(project, v.(string)), err + return pubsub.GetComputedTopicName(project, v.(string)), err } diff --git a/mmv1/third_party/terraform/data_sources/data_source_spanner_instance.go b/mmv1/third_party/terraform/services/spanner/data_source_spanner_instance.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_spanner_instance.go rename to mmv1/third_party/terraform/services/spanner/data_source_spanner_instance.go index 554952b76d3a..26dff8a17c7d 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_spanner_instance.go +++ b/mmv1/third_party/terraform/services/spanner/data_source_spanner_instance.go @@ -1,4 +1,4 @@ -package google +package spanner import ( "fmt" diff --git a/mmv1/third_party/terraform/services/spanner/resource_spanner_database_internal_test.go b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_internal_test.go new file mode 100644 index 000000000000..4a7800ccf8fe --- /dev/null +++ b/mmv1/third_party/terraform/services/spanner/resource_spanner_database_internal_test.go @@ -0,0 +1,85 @@ +package spanner + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +// Unit Tests for ForceNew when the change in ddl +func TestSpannerDatabase_resourceSpannerDBDdlCustomDiffFuncForceNew(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + before interface{} + after interface{} + forcenew bool + }{ + "remove_old_statements": { + before: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, + after: []interface{}{ + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)"}, + forcenew: true, + }, + "append_new_statements": { + before: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, + after: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + }, + forcenew: false, + }, + "no_change": { + before: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, + after: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, + forcenew: false, + }, + "order_of_statments_change": { + before: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", + }, + after: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + }, + forcenew: true, + }, + "missing_an_old_statement": { + before: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", + }, + after: []interface{}{ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + }, + forcenew: true, + }, + } + + for tn, tc := range cases { + d := &tpgresource.ResourceDiffMock{ + Before: map[string]interface{}{ + "ddl": tc.before, + }, + After: map[string]interface{}{ + "ddl": tc.after, + }, + } + err := resourceSpannerDBDdlCustomDiffFunc(d) + if err != nil { + t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) + } + if d.IsForceNew != tc.forcenew { + t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v;actual:%v", tn, tc.forcenew, d.IsForceNew) + } + } +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_sql_database.go b/mmv1/third_party/terraform/services/sql/data_source_sql_database.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_sql_database.go rename to mmv1/third_party/terraform/services/sql/data_source_sql_database.go index 192d59a87331..264cde5a1f4f 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_sql_database.go +++ b/mmv1/third_party/terraform/services/sql/data_source_sql_database.go @@ -1,4 +1,4 @@ -package google +package sql import ( "fmt" diff --git a/mmv1/third_party/terraform/data_sources/data_source_sql_databases.go b/mmv1/third_party/terraform/services/sql/data_source_sql_databases.go similarity index 86% rename from mmv1/third_party/terraform/data_sources/data_source_sql_databases.go rename to mmv1/third_party/terraform/services/sql/data_source_sql_databases.go index 2d1473ac7a59..ed4ecc4f4f9d 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_sql_databases.go +++ b/mmv1/third_party/terraform/services/sql/data_source_sql_databases.go @@ -1,4 +1,4 @@ -package google +package sql import ( "fmt" @@ -50,10 +50,14 @@ func dataSourceSqlDatabasesRead(d *schema.ResourceData, meta interface{}) error return err } var databases *sqladmin.DatabasesListResponse - err = transport_tpg.RetryTimeDuration(func() (rerr error) { - databases, rerr = config.NewSqlAdminClient(userAgent).Databases.List(project, d.Get("instance").(string)).Do() - return rerr - }, d.Timeout(schema.TimeoutRead), transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (rerr error) { + databases, rerr = config.NewSqlAdminClient(userAgent).Databases.List(project, d.Get("instance").(string)).Do() + return rerr + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Databases in %q instance", d.Get("instance").(string))) diff --git a/mmv1/third_party/terraform/utils/sql_utils.go b/mmv1/third_party/terraform/services/sql/sql_utils.go similarity index 98% rename from mmv1/third_party/terraform/utils/sql_utils.go rename to mmv1/third_party/terraform/services/sql/sql_utils.go index d47d8bfadd15..d56158c7d03f 100644 --- a/mmv1/third_party/terraform/utils/sql_utils.go +++ b/mmv1/third_party/terraform/services/sql/sql_utils.go @@ -1,4 +1,4 @@ -package google +package sql import ( "log" diff --git a/mmv1/third_party/terraform/services/sql/sqladmin_operation.go b/mmv1/third_party/terraform/services/sql/sqladmin_operation.go new file mode 100644 index 000000000000..842b4739b3a1 --- /dev/null +++ b/mmv1/third_party/terraform/services/sql/sqladmin_operation.go @@ -0,0 +1,151 @@ +package sql + +import ( + "bytes" + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + sqladmin "google.golang.org/api/sqladmin/v1beta4" +) + +type SqlAdminOperationWaiter struct { + Service *sqladmin.Service + Op *sqladmin.Operation + Project string +} + +func (w *SqlAdminOperationWaiter) State() string { + if w == nil { + return "Operation Waiter is nil!" + } + + if w.Op == nil { + return "Operation is nil!" + } + + return w.Op.Status +} + +func (w *SqlAdminOperationWaiter) Error() error { + if w != nil && w.Op != nil && w.Op.Error != nil { + return SqlAdminOperationError(*w.Op.Error) + } + return nil +} + +func (w *SqlAdminOperationWaiter) IsRetryable(error) bool { + return false +} + +func (w *SqlAdminOperationWaiter) SetOp(op interface{}) error { + if op == nil { + // Starting as a log statement, this may be a useful error in the future + log.Printf("[DEBUG] attempted to set nil op") + } + + sqlOp, ok := op.(*sqladmin.Operation) + w.Op = sqlOp + if !ok { + return fmt.Errorf("Unable to set operation. Bad type!") + } + + return nil +} + +func (w *SqlAdminOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, waiter is unset or nil.") + } + + if w.Op == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + + if w.Service == nil { + return nil, fmt.Errorf("Cannot query operation, service is nil.") + } + + var op interface{} + var err error + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + op, err = w.Service.Operations.Get(w.Project, w.Op.Name).Do() + return err + }, + Timeout: transport_tpg.DefaultRequestTimeout, + }) + + return op, err +} + +func (w *SqlAdminOperationWaiter) OpName() string { + if w == nil { + return "" + } + + if w.Op == nil { + return "" + } + + return w.Op.Name +} + +func (w *SqlAdminOperationWaiter) PendingStates() []string { + return []string{"PENDING", "RUNNING"} +} + +func (w *SqlAdminOperationWaiter) TargetStates() []string { + return []string{"DONE"} +} + +func SqlAdminOperationWaitTime(config *transport_tpg.Config, res interface{}, project, activity, userAgent string, timeout time.Duration) error { + op := &sqladmin.Operation{} + err := tpgresource.Convert(res, op) + if err != nil { + return err + } + + w := &SqlAdminOperationWaiter{ + Service: config.NewSqlAdminClient(userAgent), + Op: op, + Project: project, + } + if err := w.SetOp(op); err != nil { + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} + +// SqlAdminOperationError wraps sqladmin.OperationError and implements the +// error interface so it can be returned. +type SqlAdminOperationError sqladmin.OperationErrors + +func (e SqlAdminOperationError) Error() string { + var buf bytes.Buffer + + for _, err := range e.Errors { + buf.WriteString(err.Message + "\n") + } + + return buf.String() +} + +// Retry if Cloud SQL operation returns a 429 with a specific message for +// concurrent operations. +func IsSqlInternalError(err error) (bool, string) { + if gerr, ok := err.(*SqlAdminOperationError); ok { + // SqlAdminOperationError is a non-interface type so we need to cast it through + // a layer of interface{}. :) + var ierr interface{} + ierr = gerr + if serr, ok := ierr.(*sqladmin.OperationErrors); ok && serr.Errors[0].Code == "INTERNAL_ERROR" { + return true, "Received an internal error, which is sometimes retryable for some SQL resources. Optimistically retrying." + } + + } + return false, "" +} diff --git a/mmv1/third_party/terraform/services/tags/tags_location_operation.go b/mmv1/third_party/terraform/services/tags/tags_location_operation.go new file mode 100644 index 000000000000..2ae9b733852e --- /dev/null +++ b/mmv1/third_party/terraform/services/tags/tags_location_operation.go @@ -0,0 +1,89 @@ +package tags + +import ( + "encoding/json" + "fmt" + "regexp" + "strings" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type TagsLocationOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + tpgresource.CommonOperationWaiter +} + +func (w *TagsLocationOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + location := GetLocationFromOpName(w.CommonOperationWaiter.Op.Name) + if location != w.CommonOperationWaiter.Op.Name { + // Found location in Op.Name, fill it in TagsLocationBasePath and rewrite URL + url := fmt.Sprintf("%s%s", strings.Replace(w.Config.TagsLocationBasePath, "{{location}}", location, 1), w.CommonOperationWaiter.Op.Name) + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + RawURL: url, + UserAgent: w.UserAgent, + }) + } else { + url := fmt.Sprintf("%s%s", w.Config.TagsBasePath, w.CommonOperationWaiter.Op.Name) + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + RawURL: url, + UserAgent: w.UserAgent, + }) + } +} + +func createTagsLocationWaiter(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent string) (*TagsLocationOperationWaiter, error) { + w := &TagsLocationOperationWaiter{ + Config: config, + UserAgent: userAgent, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +func TagsLocationOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, activity, userAgent string, timeout time.Duration) error { + w, err := createTagsLocationWaiter(config, op, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func TagsLocationOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createTagsLocationWaiter(config, op, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} + +func GetLocationFromOpName(opName string) string { + re := regexp.MustCompile("operations/(?:rctb|rdtb)\\.([a-zA-Z0-9-]*)\\.([0-9]*)") + switch { + case re.MatchString(opName): + if res := re.FindStringSubmatch(opName); len(res) == 3 && res[1] != "" { + return res[1] + } + } + return opName +} diff --git a/mmv1/third_party/terraform/data_sources/data_source_vertex_ai_index.go b/mmv1/third_party/terraform/services/vertexai/data_source_vertex_ai_index.go similarity index 70% rename from mmv1/third_party/terraform/data_sources/data_source_vertex_ai_index.go rename to mmv1/third_party/terraform/services/vertexai/data_source_vertex_ai_index.go index 39a51e6fb817..50e58e12c221 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_vertex_ai_index.go +++ b/mmv1/third_party/terraform/services/vertexai/data_source_vertex_ai_index.go @@ -1,4 +1,4 @@ -package google +package vertexai import ( "fmt" @@ -8,12 +8,12 @@ import ( transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -func dataSourceVertexAIIndex() *schema.Resource { +func DataSourceVertexAIIndex() *schema.Resource { - dsSchema := datasourceSchemaFromResourceSchema(ResourceVertexAIIndex().Schema) + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceVertexAIIndex().Schema) - addRequiredFieldsToSchema(dsSchema, "name", "region") - addOptionalFieldsToSchema(dsSchema, "project") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "region") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") return &schema.Resource{ Read: dataSourceVertexAIIndexRead, diff --git a/mmv1/third_party/terraform/services/vertexai/vertex_ai_operation.go.erb b/mmv1/third_party/terraform/services/vertexai/vertex_ai_operation.go.erb new file mode 100644 index 000000000000..e1287e4d6069 --- /dev/null +++ b/mmv1/third_party/terraform/services/vertexai/vertex_ai_operation.go.erb @@ -0,0 +1,78 @@ +<% autogen_exception -%> +package vertexai + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type VertexAIOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + tpgresource.CommonOperationWaiter +} + +func (w *VertexAIOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + + region := tpgresource.GetRegionFromRegionalSelfLink(w.CommonOperationWaiter.Op.Name) + + // Returns the proper get. +<% if version == 'ga' -%> + url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1/%s", region, w.CommonOperationWaiter.Op.Name) +<% else -%> + url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1beta1/%s", region, w.CommonOperationWaiter.Op.Name) +<% end -%> + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createVertexAIWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*VertexAIOperationWaiter, error) { + w := &VertexAIOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func VertexAIOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createVertexAIWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func VertexAIOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createVertexAIWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/services/vmwareengine/data_source_google_vmwareengine_network.go.erb b/mmv1/third_party/terraform/services/vmwareengine/data_source_google_vmwareengine_network.go.erb new file mode 100644 index 000000000000..dbbdfa4f2fd8 --- /dev/null +++ b/mmv1/third_party/terraform/services/vmwareengine/data_source_google_vmwareengine_network.go.erb @@ -0,0 +1,35 @@ +<% autogen_exception -%> +package vmwareengine +<% unless version == 'ga' -%> +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceVmwareengineNetwork() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceVmwareengineNetwork().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "location", "name") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceVmwareengineNetworkRead, + Schema: dsSchema, + } +} + +func dataSourceVmwareengineNetworkRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/vmwareEngineNetworks/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + return resourceVmwareengineNetworkRead(d, meta) +} +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/data_sources/data_source_vpc_access_connector.go b/mmv1/third_party/terraform/services/vpcaccess/data_source_vpc_access_connector.go similarity index 98% rename from mmv1/third_party/terraform/data_sources/data_source_vpc_access_connector.go rename to mmv1/third_party/terraform/services/vpcaccess/data_source_vpc_access_connector.go index 61e4cdafa28f..7caa852d8ee7 100644 --- a/mmv1/third_party/terraform/data_sources/data_source_vpc_access_connector.go +++ b/mmv1/third_party/terraform/services/vpcaccess/data_source_vpc_access_connector.go @@ -1,4 +1,4 @@ -package google +package vpcaccess import ( "fmt" diff --git a/mmv1/third_party/terraform/tests/data_source_google_billing_account_test.go b/mmv1/third_party/terraform/tests/data_source_google_billing_account_test.go index a65c552aa839..d8184218265f 100644 --- a/mmv1/third_party/terraform/tests/data_source_google_billing_account_test.go +++ b/mmv1/third_party/terraform/tests/data_source_google_billing_account_test.go @@ -85,6 +85,7 @@ func testAccCheckGoogleBillingAccount_byName(name string) string { return fmt.Sprintf(` data "google_billing_account" "acct" { billing_account = "%s" + lookup_projects = false } `, name) } diff --git a/mmv1/third_party/terraform/tests/data_source_google_iam_testable_permissions_test.go b/mmv1/third_party/terraform/tests/data_source_google_iam_testable_permissions_test.go index 03b483ada03a..a4cfa04637e8 100644 --- a/mmv1/third_party/terraform/tests/data_source_google_iam_testable_permissions_test.go +++ b/mmv1/third_party/terraform/tests/data_source_google_iam_testable_permissions_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -133,7 +134,7 @@ func testAccCheckGoogleIamTestablePermissionsMeta(project string, n string, expe for s := 0; s < len(expectedStages); s++ { stageKey := "permissions." + strconv.Itoa(i) + ".stage" supportKey := "permissions." + strconv.Itoa(i) + ".custom_support_level" - if stringInSlice(expectedStages, attrs[stageKey]) { + if tpgresource.StringInSlice(expectedStages, attrs[stageKey]) { foundStageCounter -= 1 } if attrs[supportKey] == expectedSupportLevel { diff --git a/mmv1/third_party/terraform/tests/data_source_google_kms_secret_test.go b/mmv1/third_party/terraform/tests/data_source_google_kms_secret_test.go index 4bf76357addd..359d79d64aae 100644 --- a/mmv1/third_party/terraform/tests/data_source_google_kms_secret_test.go +++ b/mmv1/third_party/terraform/tests/data_source_google_kms_secret_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/kms" "google.golang.org/api/cloudkms/v1" ) @@ -85,7 +86,7 @@ func TestAccKmsSecret_basic(t *testing.T) { }) } -func testAccEncryptSecretDataWithCryptoKey(t *testing.T, s *terraform.State, cryptoKeyResourceName, plaintext, aad string) (string, *KmsCryptoKeyId, error) { +func testAccEncryptSecretDataWithCryptoKey(t *testing.T, s *terraform.State, cryptoKeyResourceName, plaintext, aad string) (string, *kms.KmsCryptoKeyId, error) { config := GoogleProviderConfig(t) rs, ok := s.RootModule().Resources[cryptoKeyResourceName] @@ -93,7 +94,7 @@ func testAccEncryptSecretDataWithCryptoKey(t *testing.T, s *terraform.State, cry return "", nil, fmt.Errorf("Resource not found: %s", cryptoKeyResourceName) } - cryptoKeyId, err := ParseKmsCryptoKeyId(rs.Primary.Attributes["id"], config) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(rs.Primary.Attributes["id"], config) if err != nil { return "", nil, err diff --git a/mmv1/third_party/terraform/tests/data_source_google_logging_project_cmek_settings_test.go b/mmv1/third_party/terraform/tests/data_source_google_logging_project_cmek_settings_test.go index 67c7dd33c376..9e3641e8a075 100644 --- a/mmv1/third_party/terraform/tests/data_source_google_logging_project_cmek_settings_test.go +++ b/mmv1/third_party/terraform/tests/data_source_google_logging_project_cmek_settings_test.go @@ -45,8 +45,13 @@ resource "google_project" "default" { billing_account = "%{billing_account}" } +resource "google_project_service" "logging_service" { + project = google_project.default.project_id + service = "logging.googleapis.com" +} + data "google_logging_project_cmek_settings" "cmek_settings" { - project = google_project.default.name + project = google_project_service.logging_service.project } `, context) } diff --git a/mmv1/third_party/terraform/tests/data_source_google_vmwareengine_network_test.go.erb b/mmv1/third_party/terraform/tests/data_source_google_vmwareengine_network_test.go.erb new file mode 100644 index 000000000000..e35c2be1ffa2 --- /dev/null +++ b/mmv1/third_party/terraform/tests/data_source_google_vmwareengine_network_test.go.erb @@ -0,0 +1,86 @@ +<% autogen_exception -%> +package google +<% unless version == 'ga' -%> + +import ( + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceVmwareEngineNetwork_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "region": acctest.GetTestRegionFromEnv(), + "random_suffix": RandString(t, 10), + "organization": acctest.GetTestOrgFromEnv(t), + "billing_account": acctest.GetTestBillingAccountFromEnv(t), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckVmwareengineNetworkDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceVmwareEngineNetworkConfig(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores("data.google_vmwareengine_network.ds", "google_vmwareengine_network.nw", map[string]struct{}{}), + ), + }, + }, + }) +} + +func testAccDataSourceVmwareEngineNetworkConfig(context map[string]interface{}) string { + return Nprintf(` + # there can be only 1 Legacy network per region for a given project, so creating new project to isolate tests. +resource "google_project" "acceptance" { + name = "tf-test-%{random_suffix}" + provider = google-beta + project_id = "tf-test-%{random_suffix}" + org_id = "%{organization}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "acceptance" { + project = google_project.acceptance.project_id + provider = google-beta + service = "vmwareengine.googleapis.com" + + # Needed for CI tests for permissions to propagate, should not be needed for actual usage + depends_on = [time_sleep.wait_60_seconds] +} + +resource "time_sleep" "wait_60_seconds" { + depends_on = [google_project.acceptance] + + create_duration = "60s" +} + +resource "google_vmwareengine_network" "nw" { + project = google_project_service.acceptance.project + name = "%{region}-default" #Legacy network IDs are in the format: {region-id}-default + provider = google-beta + location = "%{region}" + type = "LEGACY" + description = "VMwareEngine legacy network sample" +} + +data "google_vmwareengine_network" "ds" { + name = google_vmwareengine_network.nw.name + project = google_project_service.acceptance.project + provider = google-beta + location = "%{region}" + depends_on = [ + google_vmwareengine_network.nw, + ] +} +`, context) +} +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_access_context_manager_access_policy_test.go.erb b/mmv1/third_party/terraform/tests/resource_access_context_manager_access_policy_test.go.erb index dcda4cefc190..6f846e77042d 100644 --- a/mmv1/third_party/terraform/tests/resource_access_context_manager_access_policy_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_access_context_manager_access_policy_test.go.erb @@ -92,21 +92,21 @@ func testSweepAccessContextManagerPolicies(region string) error { // can exist, they need to be run serially func TestAccAccessContextManager(t *testing.T) { testCases := map[string]func(t *testing.T){ - "access_policy": testAccAccessContextManagerAccessPolicy_basicTest, - "access_policy_scoped": testAccAccessContextManagerAccessPolicy_scopedTest, - "service_perimeter": testAccAccessContextManagerServicePerimeter_basicTest, - "service_perimeter_update": testAccAccessContextManagerServicePerimeter_updateTest, - "service_perimeter_resource": testAccAccessContextManagerServicePerimeterResource_basicTest, - "access_level": testAccAccessContextManagerAccessLevel_basicTest, - "access_level_full": testAccAccessContextManagerAccessLevel_fullTest, - "access_level_custom": testAccAccessContextManagerAccessLevel_customTest, - "access_levels": testAccAccessContextManagerAccessLevels_basicTest, - "access_level_condition": testAccAccessContextManagerAccessLevelCondition_basicTest, - "egress_policy": testAccAccessContextManagerEgressPolicy_basicTest, - "ingress_policy": testAccAccessContextManagerIngressPolicy_basicTest, - "service_perimeters": testAccAccessContextManagerServicePerimeters_basicTest, - "gcp_user_access_binding": testAccAccessContextManagerGcpUserAccessBinding_basicTest, - "authorized_orgs_desc": testAccAccessContextManagerAuthorizedOrgsDesc_basicTest, + "access_policy": testAccAccessContextManagerAccessPolicy_basicTest, + "access_policy_scoped": testAccAccessContextManagerAccessPolicy_scopedTest, + "service_perimeter": testAccAccessContextManagerServicePerimeter_basicTest, + "service_perimeter_update": testAccAccessContextManagerServicePerimeter_updateTest, + "service_perimeter_resource": testAccAccessContextManagerServicePerimeterResource_basicTest, + "access_level": testAccAccessContextManagerAccessLevel_basicTest, + "access_level_full": testAccAccessContextManagerAccessLevel_fullTest, + "access_level_custom": testAccAccessContextManagerAccessLevel_customTest, + "access_levels": testAccAccessContextManagerAccessLevels_basicTest, + "access_level_condition": testAccAccessContextManagerAccessLevelCondition_basicTest, + "service_perimeter_egress_policy": testAccAccessContextManagerServicePerimeterEgressPolicy_basicTest, + "service_perimeter_ingress_policy": testAccAccessContextManagerServicePerimeterIngressPolicy_basicTest, + "service_perimeters": testAccAccessContextManagerServicePerimeters_basicTest, + "gcp_user_access_binding": testAccAccessContextManagerGcpUserAccessBinding_basicTest, + "authorized_orgs_desc": testAccAccessContextManagerAuthorizedOrgsDesc_basicTest, } for name, tc := range testCases { diff --git a/mmv1/third_party/terraform/tests/resource_access_context_manager_egress_policy_test.go b/mmv1/third_party/terraform/tests/resource_access_context_manager_service_perimeter_egress_policy_test.go similarity index 57% rename from mmv1/third_party/terraform/tests/resource_access_context_manager_egress_policy_test.go rename to mmv1/third_party/terraform/tests/resource_access_context_manager_service_perimeter_egress_policy_test.go index cdc11ad7f5ab..9611d4e4e09a 100644 --- a/mmv1/third_party/terraform/tests/resource_access_context_manager_egress_policy_test.go +++ b/mmv1/third_party/terraform/tests/resource_access_context_manager_service_perimeter_egress_policy_test.go @@ -14,11 +14,11 @@ import ( // Since each test here is acting on the same organization and only one AccessPolicy // can exist, they need to be run serially. See AccessPolicy for the test runner. -func testAccAccessContextManagerEgressPolicy_basicTest(t *testing.T) { +func testAccAccessContextManagerServicePerimeterEgressPolicy_basicTest(t *testing.T) { // Multiple fine-grained resources acctest.SkipIfVcr(t) org := acctest.GetTestOrgFromEnv(t) - projects := BootstrapServicePerimeterProjects(t, 1) + //projects := BootstrapServicePerimeterProjects(t, 1) policyTitle := RandString(t, 10) perimeterTitle := "perimeter" @@ -27,31 +27,31 @@ func testAccAccessContextManagerEgressPolicy_basicTest(t *testing.T) { ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccAccessContextManagerEgressPolicy_basic(org, policyTitle, perimeterTitle, projects[0].ProjectNumber), + Config: testAccAccessContextManagerServicePerimeterEgressPolicy_basic(org, policyTitle, perimeterTitle), }, { - ResourceName: "google_access_context_manager_egress_policy.test-access1", + ResourceName: "google_access_context_manager_service_perimeter.test-access", ImportState: true, ImportStateVerify: true, }, { - Config: testAccAccessContextManagerEgressPolicy_destroy(org, policyTitle, perimeterTitle), - Check: testAccCheckAccessContextManagerEgressPolicyDestroyProducer(t), + Config: testAccAccessContextManagerServicePerimeterEgressPolicy_destroy(org, policyTitle, perimeterTitle), + Check: testAccCheckAccessContextManagerServicePerimeterEgressPolicyDestroyProducer(t), }, }, }) } -func testAccCheckAccessContextManagerEgressPolicyDestroyProducer(t *testing.T) func(s *terraform.State) error { +func testAccCheckAccessContextManagerServicePerimeterEgressPolicyDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for _, rs := range s.RootModule().Resources { - if rs.Type != "google_access_context_manager_egress_policy" { + if rs.Type != "google_access_context_manager_service_perimeter_egress_policy" { continue } config := GoogleProviderConfig(t) - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{AccessContextManagerBasePath}}{{egress_policy_name}}") + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{AccessContextManagerBasePath}}{{perimeter}}") if err != nil { return err } @@ -72,7 +72,7 @@ func testAccCheckAccessContextManagerEgressPolicyDestroyProducer(t *testing.T) f } res = v.(map[string]interface{}) - v, ok = res["resources"] + v, ok = res["egress_policies"] if !ok || v == nil { return nil } @@ -89,19 +89,37 @@ func testAccCheckAccessContextManagerEgressPolicyDestroyProducer(t *testing.T) f } } -func testAccAccessContextManagerEgressPolicy_basic(org, policyTitle, perimeterTitleName string, projectNumber1 int64) string { +func testAccAccessContextManagerServicePerimeterEgressPolicy_basic(org, policyTitle, perimeterTitleName string) string { return fmt.Sprintf(` %s -resource "google_access_context_manager_egress_policy" "test-access1" { - egress_policy_name = google_access_context_manager_service_perimeter.test-access.name - resource = "projects/%d" +resource "google_access_context_manager_service_perimeter_egress_policy" "test-access1" { + perimeter = google_access_context_manager_service_perimeter.test-access.name + egress_from { + identity_type = "ANY_USER_ACCOUNT" + } + egress_to { + operations { + service_name = "storage.googleapis.com" + method_selectors { + method = "*" + } + } + } + +} + +resource "google_access_context_manager_service_perimeter_egress_policy" "test-access2" { + perimeter = google_access_context_manager_service_perimeter.test-access.name + egress_from { + identity_type = "ANY_USER_ACCOUNT" + } } -`, testAccAccessContextManagerEgressPolicy_destroy(org, policyTitle, perimeterTitleName), projectNumber1) +`, testAccAccessContextManagerServicePerimeterEgressPolicy_destroy(org, policyTitle, perimeterTitleName)) } -func testAccAccessContextManagerEgressPolicy_destroy(org, policyTitle, perimeterTitleName string) string { +func testAccAccessContextManagerServicePerimeterEgressPolicy_destroy(org, policyTitle, perimeterTitleName string) string { return fmt.Sprintf(` resource "google_access_context_manager_access_policy" "test-access" { parent = "organizations/%s" @@ -114,15 +132,10 @@ resource "google_access_context_manager_service_perimeter" "test-access" { title = "%s" status { restricted_services = ["storage.googleapis.com"] - egress_policies { - egress_from { - identity_type = "ANY_USER_ACCOUNT" - } - } } lifecycle { - ignore_changes = [status[0].resources] + ignore_changes = [status[0].egress_policies] } } `, org, policyTitle, perimeterTitleName, perimeterTitleName) diff --git a/mmv1/third_party/terraform/tests/resource_access_context_manager_ingress_policy_test.go b/mmv1/third_party/terraform/tests/resource_access_context_manager_service_perimeter_ingress_policy_test.go similarity index 54% rename from mmv1/third_party/terraform/tests/resource_access_context_manager_ingress_policy_test.go rename to mmv1/third_party/terraform/tests/resource_access_context_manager_service_perimeter_ingress_policy_test.go index 0f14aa1bc3b4..411f640ac9c7 100644 --- a/mmv1/third_party/terraform/tests/resource_access_context_manager_ingress_policy_test.go +++ b/mmv1/third_party/terraform/tests/resource_access_context_manager_service_perimeter_ingress_policy_test.go @@ -14,11 +14,11 @@ import ( // Since each test here is acting on the same organization and only one AccessPolicy // can exist, they need to be run serially. See AccessPolicy for the test runner. -func testAccAccessContextManagerIngressPolicy_basicTest(t *testing.T) { +func testAccAccessContextManagerServicePerimeterIngressPolicy_basicTest(t *testing.T) { // Multiple fine-grained resources acctest.SkipIfVcr(t) org := acctest.GetTestOrgFromEnv(t) - projects := BootstrapServicePerimeterProjects(t, 1) + //projects := BootstrapServicePerimeterProjects(t, 1) policyTitle := RandString(t, 10) perimeterTitle := "perimeter" @@ -27,31 +27,31 @@ func testAccAccessContextManagerIngressPolicy_basicTest(t *testing.T) { ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { - Config: testAccAccessContextManagerIngressPolicy_basic(org, policyTitle, perimeterTitle, projects[0].ProjectNumber), + Config: testAccAccessContextManagerServicePerimeterIngressPolicy_basic(org, policyTitle, perimeterTitle), }, { - ResourceName: "google_access_context_manager_ingress_policy.test-access1", + ResourceName: "google_access_context_manager_service_perimeter.test-access", ImportState: true, ImportStateVerify: true, }, { - Config: testAccAccessContextManagerIngressPolicy_destroy(org, policyTitle, perimeterTitle), - Check: testAccCheckAccessContextManagerIngressPolicyDestroyProducer(t), + Config: testAccAccessContextManagerServicePerimeterIngressPolicy_destroy(org, policyTitle, perimeterTitle), + Check: testAccCheckAccessContextManagerServicePerimeterIngressPolicyDestroyProducer(t), }, }, }) } -func testAccCheckAccessContextManagerIngressPolicyDestroyProducer(t *testing.T) func(s *terraform.State) error { +func testAccCheckAccessContextManagerServicePerimeterIngressPolicyDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for _, rs := range s.RootModule().Resources { - if rs.Type != "google_access_context_manager_ingress_policy" { + if rs.Type != "google_access_context_manager_service_perimeter_ingress_policy" { continue } config := GoogleProviderConfig(t) - url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{AccessContextManagerBasePath}}{{ingress_policy_name}}") + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{AccessContextManagerBasePath}}{{perimeter}}") if err != nil { return err } @@ -72,7 +72,7 @@ func testAccCheckAccessContextManagerIngressPolicyDestroyProducer(t *testing.T) } res = v.(map[string]interface{}) - v, ok = res["resources"] + v, ok = res["perimeter"] if !ok || v == nil { return nil } @@ -89,19 +89,54 @@ func testAccCheckAccessContextManagerIngressPolicyDestroyProducer(t *testing.T) } } -func testAccAccessContextManagerIngressPolicy_basic(org, policyTitle, perimeterTitleName string, projectNumber1 int64) string { +func testAccAccessContextManagerServicePerimeterIngressPolicy_basic(org, policyTitle, perimeterTitleName string) string { return fmt.Sprintf(` %s -resource "google_access_context_manager_ingress_policy" "test-access1" { - ingress_policy_name = google_access_context_manager_service_perimeter.test-access.name - resource = "projects/%d" +resource "google_access_context_manager_service_perimeter_ingress_policy" "test-access1" { + perimeter = google_access_context_manager_service_perimeter.test-access.name + ingress_from { + identity_type = "ANY_IDENTITY" + } + ingress_to { + resources = [ "*" ] + operations { + service_name = "bigquery.googleapis.com" + + method_selectors { + method = "BigQueryStorage.ReadRows" + } + + method_selectors { + method = "TableService.ListTables" + } + + method_selectors { + permission = "bigquery.jobs.get" + } + } + + operations { + service_name = "storage.googleapis.com" + + method_selectors { + method = "google.storage.objects.create" + } + } + } } -`, testAccAccessContextManagerIngressPolicy_destroy(org, policyTitle, perimeterTitleName), projectNumber1) +resource "google_access_context_manager_service_perimeter_ingress_policy" "test-access2" { + perimeter = google_access_context_manager_service_perimeter.test-access.name + ingress_from { + identity_type = "ANY_IDENTITY" + } } -func testAccAccessContextManagerIngressPolicy_destroy(org, policyTitle, perimeterTitleName string) string { +`, testAccAccessContextManagerServicePerimeterIngressPolicy_destroy(org, policyTitle, perimeterTitleName)) +} + +func testAccAccessContextManagerServicePerimeterIngressPolicy_destroy(org, policyTitle, perimeterTitleName string) string { return fmt.Sprintf(` resource "google_access_context_manager_access_policy" "test-access" { parent = "organizations/%s" @@ -114,42 +149,10 @@ resource "google_access_context_manager_service_perimeter" "test-access" { title = "%s" status { restricted_services = ["storage.googleapis.com"] - ingress_policies { - ingress_from { - identity_type = "ANY_IDENTITY" - } - - ingress_to { - resources = [ "*" ] - operations { - service_name = "bigquery.googleapis.com" - - method_selectors { - method = "BigQueryStorage.ReadRows" - } - - method_selectors { - method = "TableService.ListTables" - } - - method_selectors { - permission = "bigquery.jobs.get" - } - } - - operations { - service_name = "storage.googleapis.com" - - method_selectors { - method = "google.storage.objects.create" - } - } - } - } } lifecycle { - ignore_changes = [status[0].resources] + ignore_changes = [status[0].ingress_policies] } } `, org, policyTitle, perimeterTitleName, perimeterTitleName) diff --git a/mmv1/third_party/terraform/tests/resource_alloydb_backup_test.go b/mmv1/third_party/terraform/tests/resource_alloydb_backup_test.go index 0ed9350b7ee5..2a7a4fb5f307 100644 --- a/mmv1/third_party/terraform/tests/resource_alloydb_backup_test.go +++ b/mmv1/third_party/terraform/tests/resource_alloydb_backup_test.go @@ -1,7 +1,6 @@ package google import ( - "regexp" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" @@ -93,72 +92,6 @@ data "google_compute_network" "default" { `, context) } -// We expect an error when creating an on-demand backup without location. -// Location is a `required` field. -func TestAccAlloydbBackup_missingLocation(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": RandString(t, 10), - } - - VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckAlloydbBackupDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccAlloydbBackup_missingLocation(context), - ExpectError: regexp.MustCompile("Missing required argument"), - }, - }, - }) -} - -func testAccAlloydbBackup_missingLocation(context map[string]interface{}) string { - return Nprintf(` -resource "google_alloydb_backup" "default" { - backup_id = "tf-test-alloydb-backup%{random_suffix}" - cluster_name = google_alloydb_cluster.default.name - depends_on = [google_alloydb_instance.default] -} - -resource "google_alloydb_cluster" "default" { - location = "us-central1" - cluster_id = "tf-test-alloydb-cluster%{random_suffix}" - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" -} - -data "google_project" "project" { } - -resource "google_compute_network" "default" { - name = "tf-test-alloydb-cluster%{random_suffix}" -} - -resource "google_alloydb_instance" "default" { - cluster = google_alloydb_cluster.default.name - instance_id = "tf-test-alloydb-instance%{random_suffix}" - instance_type = "PRIMARY" - - depends_on = [google_service_networking_connection.vpc_connection] -} - -resource "google_compute_global_address" "private_ip_alloc" { - name = "tf-test-alloydb-cluster%{random_suffix}" - address_type = "INTERNAL" - purpose = "VPC_PEERING" - prefix_length = 16 - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" -} - -resource "google_service_networking_connection" "vpc_connection" { - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] -} -`, context) -} - // Test to create on-demand backup with mandatory fields. func TestAccAlloydbBackup_createBackupWithMandatoryFields(t *testing.T) { t.Parallel() @@ -238,7 +171,7 @@ func TestAccAlloydbBackup_usingCMEK(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "network_name": BootstrapSharedTestNetwork(t, "alloydb-update"), + "network_name": BootstrapSharedTestNetwork(t, "alloydb-cmek"), "random_suffix": RandString(t, 10), "key_name": "tf-test-key-" + RandString(t, 10), } diff --git a/mmv1/third_party/terraform/tests/resource_alloydb_cluster_test.go b/mmv1/third_party/terraform/tests/resource_alloydb_cluster_test.go index 737c0addb6ff..664f4ef2e2c2 100644 --- a/mmv1/third_party/terraform/tests/resource_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/tests/resource_alloydb_cluster_test.go @@ -1,7 +1,6 @@ package google import ( - "regexp" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" @@ -220,43 +219,6 @@ resource "google_compute_network" "default" { `, context) } -// We expect an error when creating a cluster without location. -// Location is a `required` field. -func TestAccAlloydbCluster_missingLocation(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": RandString(t, 10), - } - - VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckAlloydbClusterDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccAlloydbCluster_missingLocation(context), - ExpectError: regexp.MustCompile("Missing required argument"), - }, - }, - }) -} - -func testAccAlloydbCluster_missingLocation(context map[string]interface{}) string { - return Nprintf(` -resource "google_alloydb_cluster" "default" { - cluster_id = "tf-test-alloydb-cluster%{random_suffix}" - network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" -} - -data "google_project" "project" { } - -resource "google_compute_network" "default" { - name = "tf-test-alloydb-cluster%{random_suffix}" -} -`, context) -} - // The cluster creation should work fine even without a weekly schedule. func TestAccAlloydbCluster_missingWeeklySchedule(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_apigee_instance_test.go b/mmv1/third_party/terraform/tests/resource_apigee_instance_test.go index e5cb47ce8f55..15b89ff21592 100644 --- a/mmv1/third_party/terraform/tests/resource_apigee_instance_test.go +++ b/mmv1/third_party/terraform/tests/resource_apigee_instance_test.go @@ -3,6 +3,7 @@ package google import ( "testing" + "github.com/hashicorp/terraform-provider-google/google/services/apigee" "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) @@ -105,7 +106,7 @@ func (tc *ApigeeInstanceDiffSuppressTestCase) Test(t *testing.T) { } for key := range keysHavingDiff { - actual := ProjectListDiffSuppressFunc(mockResourceDiff) + actual := apigee.ProjectListDiffSuppressFunc(mockResourceDiff) if actual != keySuppressionMap[key] { t.Errorf("Test %s: expected key `%s` to be suppressed", tc.Name, key) } diff --git a/mmv1/third_party/terraform/tests/resource_apigee_sharedflow_deployment_test.go b/mmv1/third_party/terraform/tests/resource_apigee_sharedflow_deployment_test.go index c550302b668f..06840b8f15a2 100644 --- a/mmv1/third_party/terraform/tests/resource_apigee_sharedflow_deployment_test.go +++ b/mmv1/third_party/terraform/tests/resource_apigee_sharedflow_deployment_test.go @@ -29,7 +29,16 @@ func TestAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(t * CheckDestroy: testAccCheckApigeeSharedflowDeploymentDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(context), + Config: testAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(context, "./test-fixtures/apigee/apigee_sharedflow_bundle.zip"), + }, + { + ResourceName: "google_apigee_sharedflow_deployment.sharedflow_deployment_test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{}, + }, + { + Config: testAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(context, "./test-fixtures/apigee/apigee_sharedflow_bundle2.zip"), }, { ResourceName: "google_apigee_sharedflow_deployment.sharedflow_deployment_test", @@ -41,7 +50,9 @@ func TestAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(t * }) } -func testAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(context map[string]interface{}) string { +func testAccApigeeSharedflowDeployment_apigeeSharedflowDeploymentTestExample(context map[string]interface{}, configBundle string) string { + context["config_bundle"] = configBundle + return Nprintf(` resource "google_project" "project" { project_id = "tf-test%{random_suffix}" @@ -109,7 +120,7 @@ resource "google_apigee_environment" "apigee_environment" { resource "google_apigee_sharedflow" "test_apigee_sharedflow" { name = "tf-test-apigee-sharedflow" org_id = google_project.project.project_id - config_bundle = "./test-fixtures/apigee/apigee_sharedflow_bundle.zip" + config_bundle = "%{config_bundle}" depends_on = [google_apigee_organization.apigee_org] } diff --git a/mmv1/third_party/terraform/tests/resource_bigquery_data_transfer_config_test.go b/mmv1/third_party/terraform/tests/resource_bigquery_data_transfer_config_test.go index fdf3a8f2c1fb..08614391a7d4 100644 --- a/mmv1/third_party/terraform/tests/resource_bigquery_data_transfer_config_test.go +++ b/mmv1/third_party/terraform/tests/resource_bigquery_data_transfer_config_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/bigquerydatatransfer" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -141,7 +142,7 @@ func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForce "data_source_id": tc.after["data_source_id"], }, } - err := ParamsCustomizeDiffFunc(d) + err := bigquerydatatransfer.ParamsCustomizeDiffFunc(d) if err != nil { t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) } diff --git a/mmv1/third_party/terraform/tests/resource_billing_budget_test.go b/mmv1/third_party/terraform/tests/resource_billing_budget_test.go index d8dbe963a9c4..d09d51dd9ad7 100644 --- a/mmv1/third_party/terraform/tests/resource_billing_budget_test.go +++ b/mmv1/third_party/terraform/tests/resource_billing_budget_test.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/billing" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -523,7 +524,7 @@ func TestBillingBudgetStateUpgradeV0(t *testing.T) { } for tn, tc := range cases { t.Run(tn, func(t *testing.T) { - actual, err := ResourceBillingBudgetUpgradeV0(context.Background(), tc.Attributes, tc.Meta) + actual, err := billing.ResourceBillingBudgetUpgradeV0(context.Background(), tc.Attributes, tc.Meta) if err != nil { t.Error(err) diff --git a/mmv1/third_party/terraform/tests/resource_binary_authorization_attestor_test.go.erb b/mmv1/third_party/terraform/tests/resource_binary_authorization_attestor_test.go.erb index 6ae5001465d0..2a3c49bf07a5 100644 --- a/mmv1/third_party/terraform/tests/resource_binary_authorization_attestor_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_binary_authorization_attestor_test.go.erb @@ -5,7 +5,7 @@ import ( "fmt" "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" - + "github.com/hashicorp/terraform-provider-google/google/services/binaryauthorization" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) @@ -47,7 +47,7 @@ func TestSignatureAlgorithmDiffSuppress(t *testing.T) { } for tn, tc := range cases { - if CompareSignatureAlgorithm("signature_algorithm", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { + if binaryauthorization.CompareSignatureAlgorithm("signature_algorithm", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { t.Errorf("bad: %s, %q => %q expect DiffSuppress to return %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress) } } diff --git a/mmv1/third_party/terraform/tests/resource_binary_authorization_policy_test.go.erb b/mmv1/third_party/terraform/tests/resource_binary_authorization_policy_test.go.erb index 403f2ab41c86..d48cd49f509c 100644 --- a/mmv1/third_party/terraform/tests/resource_binary_authorization_policy_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_binary_authorization_policy_test.go.erb @@ -4,11 +4,13 @@ package google import ( "fmt" "reflect" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/binaryauthorization" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -176,7 +178,7 @@ func testAccCheckBinaryAuthorizationPolicyDefault(t *testing.T, pid string) reso delete(pol, "updateTime") delete(pol, "etag") - defaultPol := DefaultBinaryAuthorizationPolicy(pid) + defaultPol := binaryauthorization.DefaultBinaryAuthorizationPolicy(pid) if !reflect.DeepEqual(pol, defaultPol) { return fmt.Errorf("Policy for project %s was %v, expected default policy %v", pid, pol, defaultPol) } diff --git a/mmv1/third_party/terraform/tests/resource_cloud_run_domain_mapping_test.go b/mmv1/third_party/terraform/tests/resource_cloud_run_domain_mapping_test.go index ec8693113920..b46e096f0f80 100644 --- a/mmv1/third_party/terraform/tests/resource_cloud_run_domain_mapping_test.go +++ b/mmv1/third_party/terraform/tests/resource_cloud_run_domain_mapping_test.go @@ -7,61 +7,6 @@ import ( "github.com/hashicorp/terraform-provider-google/google/acctest" ) -func TestDomainMappingLabelDiffSuppress(t *testing.T) { - cases := map[string]struct { - K, Old, New string - ExpectDiffSuppress bool - }{ - "missing run.googleapis.com/overrideAt": { - K: "metadata.0.labels.run.googleapis.com/overrideAt", - Old: "2021-04-20T22:38:23.584Z", - New: "", - ExpectDiffSuppress: true, - }, - "explicit run.googleapis.com/overrideAt": { - K: "metadata.0.labels.run.googleapis.com/overrideAt", - Old: "2021-04-20T22:38:23.584Z", - New: "2022-04-20T22:38:23.584Z", - ExpectDiffSuppress: false, - }, - "missing cloud.googleapis.com/location": { - K: "metadata.0.labels.cloud.googleapis.com/location", - Old: "us-central1", - New: "", - ExpectDiffSuppress: true, - }, - "explicit cloud.googleapis.com/location": { - K: "metadata.0.labels.cloud.googleapis.com/location", - Old: "us-central1", - New: "us-central2", - ExpectDiffSuppress: false, - }, - "labels.%": { - K: "metadata.0.labels.%", - Old: "3", - New: "1", - ExpectDiffSuppress: true, - }, - "deleted custom key": { - K: "metadata.0.labels.my-label", - Old: "my-value", - New: "", - ExpectDiffSuppress: false, - }, - "added custom key": { - K: "metadata.0.labels.my-label", - Old: "", - New: "my-value", - ExpectDiffSuppress: false, - }, - } - for tn, tc := range cases { - if DomainMappingLabelDiffSuppress(tc.K, tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { - t.Errorf("bad: %s, %q: %q => %q expect DiffSuppress to return %t", tn, tc.K, tc.Old, tc.New, tc.ExpectDiffSuppress) - } - } -} - // Destroy and recreate the mapping, testing that Terraform doesn't return a 409 func TestAccCloudRunDomainMapping_foregroundDeletion(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_cloud_run_service_test.go.erb b/mmv1/third_party/terraform/tests/resource_cloud_run_service_test.go.erb index 72aac61c7b36..cab5d0f0b1b3 100644 --- a/mmv1/third_party/terraform/tests/resource_cloud_run_service_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_cloud_run_service_test.go.erb @@ -9,39 +9,6 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) -func TestCloudrunAnnotationDiffSuppress(t *testing.T) { - cases := map[string]struct { - K, Old, New string - ExpectDiffSuppress bool - }{ - "missing run.googleapis.com/operation-id": { - K: "metadata.0.annotations.run.googleapis.com/operation-id", - Old: "12345abc", - New: "", - ExpectDiffSuppress: true, - }, - "missing run.googleapis.com/ingress": { - K: "metadata.0.annotations.run.googleapis.com/ingress", - Old: "all", - New: "", - ExpectDiffSuppress: true, - }, - "explicit run.googleapis.com/ingress": { - K: "metadata.0.annotations.run.googleapis.com/ingress", - Old: "all", - New: "internal", - ExpectDiffSuppress: false, - }, - } - for tn, tc := range cases { - t.Run(tn, func(t *testing.T) { - if got := cloudrunAnnotationDiffSuppress(tc.K, tc.Old, tc.New, nil); got != tc.ExpectDiffSuppress { - t.Errorf("got %t; want %t", got, tc.ExpectDiffSuppress) - } - }) - } -} - func TestAccCloudRunService_cloudRunServiceUpdate(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_cloud_run_v2_service_test.go b/mmv1/third_party/terraform/tests/resource_cloud_run_v2_service_test.go index e4c2a0521a79..e4544cdd0380 100644 --- a/mmv1/third_party/terraform/tests/resource_cloud_run_v2_service_test.go +++ b/mmv1/third_party/terraform/tests/resource_cloud_run_v2_service_test.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/cloudrunv2" ) func TestAccCloudRunV2Service_cloudrunv2ServiceFullUpdate(t *testing.T) { @@ -345,7 +346,7 @@ func testAccCheckCloudRunV2ServiceDestroyByNameProducer(t *testing.T, serviceNam t.Errorf("Error while deleting the Cloud Run service: %s", err) return } - err = runAdminV2OperationWaitTime(config, op, config.Project, "Waiting for Cloud Run service to be deleted", config.UserAgent, 5*time.Minute) + err = cloudrunv2.RunAdminV2OperationWaitTime(config, op, config.Project, "Waiting for Cloud Run service to be deleted", config.UserAgent, 5*time.Minute) if err != nil { t.Errorf("Error while waiting for Cloud Run service delete operation to complete: %s", err.Error()) } diff --git a/mmv1/third_party/terraform/tests/resource_cloud_scheduler_job_test.go b/mmv1/third_party/terraform/tests/resource_cloud_scheduler_job_test.go index b1a65d2f1ed2..32e252495a57 100644 --- a/mmv1/third_party/terraform/tests/resource_cloud_scheduler_job_test.go +++ b/mmv1/third_party/terraform/tests/resource_cloud_scheduler_job_test.go @@ -1,102 +1,12 @@ package google import ( - "reflect" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/acctest" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -func TestCloudScheduler_FlattenHttpHeaders(t *testing.T) { - - cases := []struct { - Input map[string]interface{} - Output map[string]interface{} - }{ - // simple, no headers included - { - Input: map[string]interface{}{ - "My-Header": "my-header-value", - }, - Output: map[string]interface{}{ - "My-Header": "my-header-value", - }, - }, - - // include the User-Agent header value Google-Cloud-Scheduler - // Tests Removing User-Agent header - { - Input: map[string]interface{}{ - "User-Agent": "Google-Cloud-Scheduler", - "My-Header": "my-header-value", - }, - Output: map[string]interface{}{ - "My-Header": "my-header-value", - }, - }, - - // include the User-Agent header - // Tests removing value AppEngine-Google; (+http://code.google.com/appengine) - { - Input: map[string]interface{}{ - "User-Agent": "My-User-Agent AppEngine-Google; (+http://code.google.com/appengine)", - "My-Header": "my-header-value", - }, - Output: map[string]interface{}{ - "User-Agent": "My-User-Agent", - "My-Header": "my-header-value", - }, - }, - - // include the Content-Type header value application/octet-stream. - // Tests Removing Content-Type header - { - Input: map[string]interface{}{ - "Content-Type": "application/octet-stream", - "My-Header": "my-header-value", - }, - Output: map[string]interface{}{ - "My-Header": "my-header-value", - }, - }, - - // include the Content-Length header - // Tests Removing Content-Length header - { - Input: map[string]interface{}{ - "Content-Length": 7, - "My-Header": "my-header-value", - }, - Output: map[string]interface{}{ - "My-Header": "my-header-value", - }, - }, - - // include the X-Google- header - // Tests Removing X-Google- header - { - Input: map[string]interface{}{ - "X-Google-My-Header": "x-google-my-header-value", - "My-Header": "my-header-value", - }, - Output: map[string]interface{}{ - "My-Header": "my-header-value", - }, - }, - } - - for _, c := range cases { - d := &schema.ResourceData{} - output := flattenCloudSchedulerJobAppEngineHttpTargetHeaders(c.Input, d, &transport_tpg.Config{}) - if !reflect.DeepEqual(output, c.Output) { - t.Fatalf("Error matching output and expected: %#v vs %#v", output, c.Output) - } - } -} - func TestAccCloudSchedulerJob_schedulerPausedExample(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_cloudiot_device_registry_id_test.go b/mmv1/third_party/terraform/tests/resource_cloudiot_device_registry_id_test.go index 6b821aac6c75..be868abe72fb 100644 --- a/mmv1/third_party/terraform/tests/resource_cloudiot_device_registry_id_test.go +++ b/mmv1/third_party/terraform/tests/resource_cloudiot_device_registry_id_test.go @@ -4,6 +4,7 @@ import ( "strings" "testing" + "github.com/hashicorp/terraform-provider-google/google/services/cloudiot" "github.com/hashicorp/terraform-provider-google/google/verify" ) @@ -25,7 +26,7 @@ func TestValidateCloudIoTDeviceRegistryId(t *testing.T) { {TestName: "too long", Value: strings.Repeat("f", 260), ExpectError: true}, } - es := verify.TestStringValidationCases(x, ValidateCloudIotDeviceRegistryID) + es := verify.TestStringValidationCases(x, cloudiot.ValidateCloudIotDeviceRegistryID) if len(es) > 0 { t.Errorf("Failed to validate CloudIoT ID names: %v", es) } diff --git a/mmv1/third_party/terraform/tests/resource_compute_disk_async_replication_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_disk_async_replication_test.go.erb index 10efd814ee98..16e06a9cb75f 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_disk_async_replication_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_disk_async_replication_test.go.erb @@ -7,13 +7,15 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) func TestAccComputeDiskAsyncReplication(t *testing.T) { t.Parallel() region := GetTestRegionFromEnv() - if !stringInSlice([]string{"europe-west2", "europe-west1", "us-central1", "us-east1", "us-west1", "us-east4", "asia-east1", "australia-southeast1"}, region) { + if !tpgresource.StringInSlice([]string{"europe-west2", "europe-west1", "us-central1", "us-east1", "us-west1", "us-east4", "asia-east1", "australia-southeast1"}, region) { return } secondaryRegion := region diff --git a/mmv1/third_party/terraform/tests/resource_compute_disk_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_disk_test.go.erb index bc09e6b7d265..7909e73c90bd 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_disk_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_disk_test.go.erb @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" <% if version == "ga" -%> "google.golang.org/api/compute/v1" @@ -289,7 +290,7 @@ func TestDiskImageDiffSuppress(t *testing.T) { tc := tc t.Run(tn, func(t *testing.T) { t.Parallel() - if DiskImageDiffSuppress("image", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { + if tpgcompute.DiskImageDiffSuppress("image", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { t.Fatalf("%q => %q expect DiffSuppress to return %t", tc.Old, tc.New, tc.ExpectDiffSuppress) } }) @@ -306,7 +307,7 @@ func TestAccComputeDisk_imageDiffSuppressPublicVendorsFamilyNames(t *testing.T) config := getInitializedConfig(t) - for _, publicImageProject := range imageMap { + for _, publicImageProject := range tpgcompute.ImageMap { token := "" for paginate := true; paginate; { resp, err := config.NewComputeClient(config.UserAgent).Images.List(publicImageProject).Filter("deprecated.replacement ne .*images.*").PageToken(token).Do() @@ -315,7 +316,7 @@ func TestAccComputeDisk_imageDiffSuppressPublicVendorsFamilyNames(t *testing.T) } for _, image := range resp.Items { - if !DiskImageDiffSuppress("image", image.SelfLink, "family/"+image.Family, nil) { + if !tpgcompute.DiskImageDiffSuppress("image", image.SelfLink, "family/"+image.Family, nil) { t.Errorf("should suppress diff for image %q and family %q", image.SelfLink, image.Family) } } @@ -353,6 +354,60 @@ func TestAccComputeDisk_update(t *testing.T) { }, }) } +func TestAccComputeDisk_pdHyperDiskProvisionedIopsLifeCycle(t *testing.T) { + t.Parallel() + + context_1 := map[string]interface{}{ + "random_suffix": RandString(t, 10), + "provisioned_iops": 10000, + "disk_size": 64, + "lifecycle_bool": true, + } + context_2 := map[string]interface{}{ + "random_suffix": context_1["random_suffix"], + "provisioned_iops": 11000, + "disk_size": 64, + "lifecycle_bool": true, + } + context_3 := map[string]interface{}{ + "random_suffix": context_1["random_suffix"], + "provisioned_iops": 11000, + "disk_size": 64, + "lifecycle_bool": false, + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeDiskDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeDisk_pdHyperDiskProvisionedIopsLifeCycle(context_1), + }, + { + ResourceName: "google_compute_disk.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeDisk_pdHyperDiskProvisionedIopsLifeCycle(context_2), + }, + { + ResourceName: "google_compute_disk.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeDisk_pdHyperDiskProvisionedIopsLifeCycle(context_3), + }, + { + ResourceName: "google_compute_disk.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} func TestAccComputeDisk_fromSnapshot(t *testing.T) { t.Parallel() @@ -919,6 +974,21 @@ resource "google_compute_instance_group_manager" "manager" { `, diskName, mgrName) } +func testAccComputeDisk_pdHyperDiskProvisionedIopsLifeCycle(context map[string]interface{}) string { + return Nprintf(` + resource "google_compute_disk" "foobar" { + name = "tf-test-hyperdisk-%{random_suffix}" + type = "hyperdisk-extreme" + provisioned_iops = %{provisioned_iops} + size = %{disk_size} + lifecycle { + prevent_destroy = %{lifecycle_bool} + } + } +`, context) +} + + func testAccComputeDisk_pdExtremeImplicitProvisionedIops(diskName string) string { return fmt.Sprintf(` resource "google_compute_disk" "foobar" { diff --git a/mmv1/third_party/terraform/tests/resource_compute_forwarding_rule_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_forwarding_rule_test.go.erb index 15d0490e6b17..c940610be671 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_forwarding_rule_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_forwarding_rule_test.go.erb @@ -161,6 +161,7 @@ func TestAccComputeForwardingRule_serviceDirectoryRegistrations(t *testing.T) { }, }) } +<% end -%> func TestAccComputeForwardingRule_forwardingRuleVpcPscExampleUpdate(t *testing.T) { t.Parallel() @@ -171,7 +172,7 @@ func TestAccComputeForwardingRule_forwardingRuleVpcPscExampleUpdate(t *testing.T VcrTest(t, resource.TestCase{ PreCheck: func() { AccTestPreCheck(t) }, - ProtoV5ProviderFactories: ProtoV5ProviderBetaFactories(t), + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckComputeForwardingRuleDestroyProducer(t), Steps: []resource.TestStep{ resource.TestStep{ @@ -193,7 +194,6 @@ func TestAccComputeForwardingRule_forwardingRuleVpcPscExampleUpdate(t *testing.T }, }) } -<% end -%> func TestAccComputeForwardingRule_forwardingRuleRegionalSteeringExampleUpdate(t *testing.T) { t.Parallel() @@ -547,12 +547,12 @@ resource "google_service_directory_service" "examplesvc" { } `, poolName, ruleName, svcDirNamespace, serviceName) } +<% end -%> func testAccComputeForwardingRule_forwardingRuleVpcPscExampleUpdate(context map[string]interface{}) string { return Nprintf(` // Forwarding rule for VPC private service connect resource "google_compute_forwarding_rule" "default" { - provider = google-beta name = "tf-test-psc-endpoint%{random_suffix}" region = "us-central1" load_balancing_scheme = "" @@ -565,13 +565,11 @@ resource "google_compute_forwarding_rule" "default" { // Consumer service endpoint resource "google_compute_network" "consumer_net" { - provider = google-beta name = "tf-test-consumer-net%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "consumer_subnet" { - provider = google-beta name = "tf-test-consumer-net%{random_suffix}" ip_cidr_range = "10.0.0.0/16" region = "us-central1" @@ -579,7 +577,6 @@ resource "google_compute_subnetwork" "consumer_subnet" { } resource "google_compute_address" "consumer_address" { - provider = google-beta name = "tf-test-website-ip%{random_suffix}-1" region = "us-central1" subnetwork = google_compute_subnetwork.consumer_subnet.id @@ -590,13 +587,11 @@ resource "google_compute_address" "consumer_address" { // Producer service attachment resource "google_compute_network" "producer_net" { - provider = google-beta name = "tf-test-producer-net%{random_suffix}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "producer_subnet" { - provider = google-beta name = "tf-test-producer-net%{random_suffix}" ip_cidr_range = "10.0.0.0/16" region = "us-central1" @@ -604,7 +599,6 @@ resource "google_compute_subnetwork" "producer_subnet" { } resource "google_compute_subnetwork" "psc_producer_subnet" { - provider = google-beta name = "tf-test-producer-psc-net%{random_suffix}" ip_cidr_range = "10.1.0.0/16" region = "us-central1" @@ -614,7 +608,6 @@ resource "google_compute_subnetwork" "psc_producer_subnet" { } resource "google_compute_service_attachment" "producer_service_attachment" { - provider = google-beta name = "tf-test-producer-service%{random_suffix}" region = "us-central1" description = "A service attachment configured with Terraform" @@ -626,7 +619,6 @@ resource "google_compute_service_attachment" "producer_service_attachment" { } resource "google_compute_forwarding_rule" "producer_target_service" { - provider = google-beta name = "tf-test-producer-forwarding-rule%{random_suffix}" region = "us-central1" @@ -638,7 +630,6 @@ resource "google_compute_forwarding_rule" "producer_target_service" { } resource "google_compute_region_backend_service" "producer_service_backend" { - provider = google-beta name = "tf-test-producer-service-backend%{random_suffix}" region = "us-central1" @@ -646,7 +637,6 @@ resource "google_compute_region_backend_service" "producer_service_backend" { } resource "google_compute_health_check" "producer_service_health_check" { - provider = google-beta name = "tf-test-producer-service-health-check%{random_suffix}" check_interval_sec = 1 @@ -657,7 +647,6 @@ resource "google_compute_health_check" "producer_service_health_check" { } `, context) } -<% end -%> func testAccComputeForwardingRule_forwardingRuleRegionalSteeringExampleUpdate(context map[string]interface{}) string { return Nprintf(` diff --git a/mmv1/third_party/terraform/tests/resource_compute_image_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_image_test.go.erb index 8353ee976957..ae1c60785da2 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_image_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_image_test.go.erb @@ -6,6 +6,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" "github.com/hashicorp/terraform-provider-google/google/tpgresource" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" @@ -293,7 +294,7 @@ func testAccCheckComputeImageResolution(t *testing.T, n string) resource.TestChe } for input, expectation := range images { - result, err := resolveImage(config, project, input, config.UserAgent) + result, err := tpgcompute.ResolveImage(config, project, input, config.UserAgent) if err != nil { return fmt.Errorf("Error resolving input %s to image: %+v\n", input, err) } diff --git a/mmv1/third_party/terraform/tests/resource_compute_instance_group_manager_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_instance_group_manager_test.go.erb index 6825cce6e146..4117b0854189 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_instance_group_manager_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_instance_group_manager_test.go.erb @@ -70,79 +70,6 @@ func testSweepComputeInstanceGroupManager(region string) error { return nil } -func TestInstanceGroupManager_parseUniqueId(t *testing.T) { - expectations:= map[string][]string { - "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": []string{"projects/imre-test/global/instanceTemplates/example-template-custom", "123"}, - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": []string{"https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", "123"}, - "projects/imre-test/global/instanceTemplates/example-template-custom": []string{"projects/imre-test/global/instanceTemplates/example-template-custom", ""}, - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom": []string{"https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", ""}, - "example-template-custom?uniqueId=123": []string{"example-template-custom", "123"}, - - // this test demonstrates that uniqueIds can't override eachother - "projects/imre-test/global/instanceTemplates/example?uniqueId=123?uniqueId=456": []string{"projects/imre-test/global/instanceTemplates/example", "123?uniqueId=456"}, - } - - for k, v:= range expectations { - aName, aUniqueId := parseUniqueId(k) - if v[0] != aName { - t.Errorf("parseUniqueId failed; name of %v should be %v, not %v", k, v[0], aName) - } - if v[1] != aUniqueId { - t.Errorf("parseUniqueId failed; uniqueId of %v should be %v, not %v", k, v[1], aUniqueId) - } - } -} - -func TestInstanceGroupManager_compareInstanceTemplate(t *testing.T) { - shouldAllMatch := []string { - // uniqueId not present - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", - "projects/imre-test/global/instanceTemplates/example-template-custom", - // uniqueId present - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123", - "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123", - } - shouldNotMatch := map[string]string { - // mismatching name - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom": "projects/imre-test/global/instanceTemplates/example-template-custom2", - "projects/imre-test/global/instanceTemplates/example-template-custom": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom2", - // matching name, but mismatching uniqueId - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=1234", - "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=1234", - } - for _, v1 := range shouldAllMatch { - for _, v2:= range shouldAllMatch { - if !compareSelfLinkRelativePathsIgnoreParams("", v1, v2, nil) { - t.Fatalf("compareSelfLinkRelativePathsIgnoreParams did not match (and should have) %v and %v", v1, v2) - } - } - } - - for v1, v2 := range shouldNotMatch { - if compareSelfLinkRelativePathsIgnoreParams("", v1, v2, nil) { - t.Fatalf("compareSelfLinkRelativePathsIgnoreParams did match (and shouldn't) %v and %v", v1, v2) - } - } -} - -func TestInstanceGroupManager_convertUniqueId(t *testing.T) { - matches:= map[string]string { - // uniqueId not present (should return the same) - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom", - "projects/imre-test/global/instanceTemplates/example-template-custom": "projects/imre-test/global/instanceTemplates/example-template-custom", - // uniqueId present (should return the last component replaced) - "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "https://www.googleapis.com/compute/v1/projects/imre-test/global/instanceTemplates/123", - "projects/imre-test/global/instanceTemplates/example-template-custom?uniqueId=123": "projects/imre-test/global/instanceTemplates/123", - "tf-test-igm-8amncgtq22?uniqueId=8361222501423044003": "8361222501423044003", - } - for input, expected := range matches { - actual:= ConvertToUniqueIdWhenPresent(input) - if actual != expected { - t.Fatalf("invalid return value by ConvertToUniqueIdWhenPresent for input %v; expected: %v, actual: %v", input, expected, actual) - } - } -} - func TestAccInstanceGroupManager_basic(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_compute_instance_migrate_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_instance_migrate_test.go.erb index dbfb7e88182f..077ff361fc41 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_instance_migrate_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_instance_migrate_test.go.erb @@ -11,6 +11,7 @@ import ( "time" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -137,7 +138,7 @@ func TestAccComputeInstanceMigrateState_empty(t *testing.T) { var meta interface{} // should handle nil - is, err := resourceComputeInstanceMigrateState(0, is, meta) + is, err := tpgcompute.ResourceComputeInstanceMigrateState(0, is, meta) if err != nil { t.Fatalf("err: %#v", err) @@ -148,7 +149,7 @@ func TestAccComputeInstanceMigrateState_empty(t *testing.T) { // should handle non-nil but empty is = &terraform.InstanceState{} - _, err = resourceComputeInstanceMigrateState(0, is, meta) + _, err = tpgcompute.ResourceComputeInstanceMigrateState(0, is, meta) if err != nil { t.Fatalf("err: %#v", err) @@ -868,7 +869,7 @@ func runInstanceMigrateTest(t *testing.T, id, testName string, version int, attr ID: id, Attributes: attributes, } - _, err := resourceComputeInstanceMigrateState(version, is, meta) + _, err := tpgcompute.ResourceComputeInstanceMigrateState(version, is, meta) if err != nil { t.Fatal(err) } diff --git a/mmv1/third_party/terraform/tests/resource_compute_instance_template_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_instance_template_test.go.erb index 41203b976dd3..941a48d04be0 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_instance_template_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_instance_template_test.go.erb @@ -4,7 +4,9 @@ package google import ( "fmt" +<% unless version == "ga" -%> "reflect" +<% end -%> "regexp" "strconv" "strings" @@ -25,218 +27,6 @@ import ( const DEFAULT_MIN_CPU_TEST_VALUE = "Intel Haswell" -func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { - t.Parallel() - - cBoot := map[string]interface{}{ - "source": "boot-source", - } - cFallThrough := map[string]interface{}{ - "auto_delete": true, - } - cDeviceName := map[string]interface{}{ - "device_name": "disk-1", - } - cScratch := map[string]interface{}{ - "type": "SCRATCH", - } - cSource := map[string]interface{}{ - "source": "disk-source", - } - cScratchNvme := map[string]interface{}{ - "type": "SCRATCH", - "interface": "NVME", - } - - aBoot := map[string]interface{}{ - "source": "boot-source", - "boot": true, - } - aScratchNvme := map[string]interface{}{ - "device_name": "scratch-1", - "type": "SCRATCH", - "interface": "NVME", - } - aSource := map[string]interface{}{ - "device_name": "disk-2", - "source": "disk-source", - } - aScratchScsi := map[string]interface{}{ - "device_name": "scratch-2", - "type": "SCRATCH", - "interface": "SCSI", - } - aFallThrough := map[string]interface{}{ - "device_name": "disk-3", - "auto_delete": true, - "source": "fake-source", - } - aFallThrough2 := map[string]interface{}{ - "device_name": "disk-4", - "auto_delete": true, - "source": "fake-source", - } - aDeviceName := map[string]interface{}{ - "device_name": "disk-1", - "auto_delete": true, - "source": "fake-source-2", - } - aNoMatch := map[string]interface{}{ - "device_name": "disk-2", - "source": "disk-source-doesn't-match", - } - - cases := map[string]struct { - ConfigDisks []interface{} - ApiDisks []map[string]interface{} - ExpectedResult []map[string]interface{} - }{ - "all disks represented": { - ApiDisks: []map[string]interface{}{ - aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, - }, - ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, - }, - ExpectedResult: []map[string]interface{}{ - aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, - }, - }, - "one non-match": { - ApiDisks: []map[string]interface{}{ - aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, - }, - ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, - }, - ExpectedResult: []map[string]interface{}{ - aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, - }, - }, - "two fallthroughs": { - ApiDisks: []map[string]interface{}{ - aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, - }, - ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, - }, - ExpectedResult: []map[string]interface{}{ - aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, - }, - }, - } - - for tn, tc := range cases { - t.Run(tn, func(t *testing.T) { - // Disks read using d.Get will always have values for all keys, so set those values - for _, disk := range tc.ConfigDisks { - d := disk.(map[string]interface{}) - for _, k := range []string{"auto_delete", "boot"} { - if _, ok := d[k]; !ok { - d[k] = false - } - } - for _, k := range []string{"device_name", "disk_name", "interface", "mode", "source", "type"} { - if _, ok := d[k]; !ok { - d[k] = "" - } - } - } - - // flattened disks always set auto_delete, boot, device_name, interface, mode, source, and type - for _, d := range tc.ApiDisks { - for _, k := range []string{"auto_delete", "boot"} { - if _, ok := d[k]; !ok { - d[k] = false - } - } - - for _, k := range []string{"device_name", "interface", "mode", "source"} { - if _, ok := d[k]; !ok { - d[k] = "" - } - } - if _, ok := d["type"]; !ok { - d["type"] = "PERSISTENT" - } - } - - result := reorderDisks(tc.ConfigDisks, tc.ApiDisks) - if !reflect.DeepEqual(tc.ExpectedResult, result) { - t.Errorf("reordering did not match\nExpected: %+v\nActual: %+v", tc.ExpectedResult, result) - } - }) - } -} - -func TestComputeInstanceTemplate_scratchDiskSizeCustomizeDiff(t *testing.T) { - t.Parallel() - - cases := map[string]struct { - Typee string // misspelled on purpose, type is a special symbol - DiskType string - DiskSize int - Interfacee string - ExpectError bool - }{ - "scratch disk correct size 1": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 375, - Interfacee: "NVME", - ExpectError: false, - }, - "scratch disk correct size 2": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 3000, - Interfacee: "NVME", - ExpectError: false, - }, - "scratch disk incorrect size": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 300, - Interfacee: "NVME", - ExpectError: true, - }, - "scratch disk incorrect interface": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 3000, - Interfacee: "SCSI", - ExpectError: true, - }, - "non-scratch disk": { - Typee: "PERSISTENT", - DiskType: "", - DiskSize: 300, - Interfacee: "NVME", - ExpectError: false, - }, - - } - - for tn, tc := range cases { - d := &tpgresource.ResourceDiffMock{ - After: map[string]interface{}{ - "disk.#": 1, - "disk.0.type": tc.Typee, - "disk.0.disk_type": tc.DiskType, - "disk.0.disk_size_gb": tc.DiskSize, - "disk.0.interface": tc.Interfacee, - }, - } - err := resourceComputeInstanceTemplateScratchDiskCustomizeDiffFunc(d) - if tc.ExpectError && err == nil { - t.Errorf("%s failed, expected error but was none", tn) - } - if !tc.ExpectError && err != nil { - t.Errorf("%s failed, found unexpected error: %s", tn, err) - } - } -} - func TestAccComputeInstanceTemplate_basic(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_compute_instance_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_instance_test.go.erb index 7f70cb42b5d3..ca598ed72b2c 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_instance_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_instance_test.go.erb @@ -2378,135 +2378,6 @@ func TestAccComputeInstance_spotVM_maxRunDuration_update(t *testing.T) { } <% end -%> - -func TestComputeInstance_networkIPCustomizedDiff(t *testing.T) { - t.Parallel() - - d := &tpgresource.ResourceDiffMock{ - Before: map[string]interface{}{ - "network_interface.#": 0, - }, - After: map[string]interface{}{ - "network_interface.#": 1, - }, - } - - err := forceNewIfNetworkIPNotUpdatableFunc(d) - if err != nil { - t.Error(err) - } - - if d.IsForceNew { - t.Errorf("Expected not force new if network_interface array size changes") - } - - type NetworkInterface struct { - Network string - Subnetwork string - SubnetworkProject string - NetworkIP string - } - NIBefore := NetworkInterface{ - Network: "a", - Subnetwork: "a", - SubnetworkProject: "a", - NetworkIP: "a", - } - - cases := map[string]struct { - ExpectedForceNew bool - Before NetworkInterface - After NetworkInterface - }{ - "NetworkIP only change": { - ExpectedForceNew: true, - Before: NIBefore, - After: NetworkInterface{ - Network: "a", - Subnetwork: "a", - SubnetworkProject: "a", - NetworkIP: "b", - }, - }, - "NetworkIP and Network change": { - ExpectedForceNew: false, - Before: NIBefore, - After: NetworkInterface{ - Network: "b", - Subnetwork: "a", - SubnetworkProject: "a", - NetworkIP: "b", - }, - }, - "NetworkIP and Subnetwork change": { - ExpectedForceNew: false, - Before: NIBefore, - After: NetworkInterface{ - Network: "a", - Subnetwork: "b", - SubnetworkProject: "a", - NetworkIP: "b", - }, - }, - "NetworkIP and SubnetworkProject change": { - ExpectedForceNew: false, - Before: NIBefore, - After: NetworkInterface{ - Network: "a", - Subnetwork: "a", - SubnetworkProject: "b", - NetworkIP: "b", - }, - }, - "All change": { - ExpectedForceNew: false, - Before: NIBefore, - After: NetworkInterface{ - Network: "b", - Subnetwork: "b", - SubnetworkProject: "b", - NetworkIP: "b", - }, - }, - "No change": { - ExpectedForceNew: false, - Before: NIBefore, - After: NetworkInterface{ - Network: "a", - Subnetwork: "a", - SubnetworkProject: "a", - NetworkIP: "a", - }, - }, - } - - for tn, tc := range cases { - d := &tpgresource.ResourceDiffMock{ - Before: map[string]interface{}{ - "network_interface.#": 1, - "network_interface.0.network": tc.Before.Network, - "network_interface.0.subnetwork": tc.Before.Subnetwork, - "network_interface.0.subnetwork_project": tc.Before.SubnetworkProject, - "network_interface.0.network_ip": tc.Before.NetworkIP, - }, - After: map[string]interface{}{ - "network_interface.#": 1, - "network_interface.0.network": tc.After.Network, - "network_interface.0.subnetwork": tc.After.Subnetwork, - "network_interface.0.subnetwork_project": tc.After.SubnetworkProject, - "network_interface.0.network_ip": tc.After.NetworkIP, - }, - } - err := forceNewIfNetworkIPNotUpdatableFunc(d) - if err != nil { - t.Error(err) - } - if tc.ExpectedForceNew != d.IsForceNew { - t.Errorf("%v: expected d.IsForceNew to be %v, but was %v", tn, tc.ExpectedForceNew, d.IsForceNew) - } - } -} - func TestAccComputeInstance_metadataStartupScript_update(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_compute_project_metadata_item_test.go b/mmv1/third_party/terraform/tests/resource_compute_project_metadata_item_test.go index cbbb25f5c2d3..6969524a0b82 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_project_metadata_item_test.go +++ b/mmv1/third_party/terraform/tests/resource_compute_project_metadata_item_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -159,7 +160,7 @@ func testAccCheckProjectMetadataItemDestroyProducer(t *testing.T) func(s *terraf return err } - metadata := flattenMetadata(project.CommonInstanceMetadata) + metadata := tpgcompute.FlattenMetadata(project.CommonInstanceMetadata) for _, rs := range s.RootModule().Resources { if rs.Type != "google_compute_project_metadata_item" { diff --git a/mmv1/third_party/terraform/tests/resource_compute_region_instance_template_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_region_instance_template_test.go.erb index f86848cdd6df..98a6bf9cc736 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_region_instance_template_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_region_instance_template_test.go.erb @@ -26,217 +26,6 @@ import ( <% end -%> ) -func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { - t.Parallel() - - cBoot := map[string]interface{}{ - "source": "boot-source", - } - cFallThrough := map[string]interface{}{ - "auto_delete": true, - } - cDeviceName := map[string]interface{}{ - "device_name": "disk-1", - } - cScratch := map[string]interface{}{ - "type": "SCRATCH", - } - cSource := map[string]interface{}{ - "source": "disk-source", - } - cScratchNvme := map[string]interface{}{ - "type": "SCRATCH", - "interface": "NVME", - } - - aBoot := map[string]interface{}{ - "source": "boot-source", - "boot": true, - } - aScratchNvme := map[string]interface{}{ - "device_name": "scratch-1", - "type": "SCRATCH", - "interface": "NVME", - } - aSource := map[string]interface{}{ - "device_name": "disk-2", - "source": "disk-source", - } - aScratchScsi := map[string]interface{}{ - "device_name": "scratch-2", - "type": "SCRATCH", - "interface": "SCSI", - } - aFallThrough := map[string]interface{}{ - "device_name": "disk-3", - "auto_delete": true, - "source": "fake-source", - } - aFallThrough2 := map[string]interface{}{ - "device_name": "disk-4", - "auto_delete": true, - "source": "fake-source", - } - aDeviceName := map[string]interface{}{ - "device_name": "disk-1", - "auto_delete": true, - "source": "fake-source-2", - } - aNoMatch := map[string]interface{}{ - "device_name": "disk-2", - "source": "disk-source-doesn't-match", - } - - cases := map[string]struct { - ConfigDisks []interface{} - ApiDisks []map[string]interface{} - ExpectedResult []map[string]interface{} - }{ - "all disks represented": { - ApiDisks: []map[string]interface{}{ - aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, - }, - ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, - }, - ExpectedResult: []map[string]interface{}{ - aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, - }, - }, - "one non-match": { - ApiDisks: []map[string]interface{}{ - aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, - }, - ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, - }, - ExpectedResult: []map[string]interface{}{ - aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, - }, - }, - "two fallthroughs": { - ApiDisks: []map[string]interface{}{ - aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, - }, - ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, - }, - ExpectedResult: []map[string]interface{}{ - aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, - }, - }, - } - - for tn, tc := range cases { - t.Run(tn, func(t *testing.T) { - // Disks read using d.Get will always have values for all keys, so set those values - for _, disk := range tc.ConfigDisks { - d := disk.(map[string]interface{}) - for _, k := range []string{"auto_delete", "boot"} { - if _, ok := d[k]; !ok { - d[k] = false - } - } - for _, k := range []string{"device_name", "disk_name", "interface", "mode", "source", "type"} { - if _, ok := d[k]; !ok { - d[k] = "" - } - } - } - - // flattened disks always set auto_delete, boot, device_name, interface, mode, source, and type - for _, d := range tc.ApiDisks { - for _, k := range []string{"auto_delete", "boot"} { - if _, ok := d[k]; !ok { - d[k] = false - } - } - - for _, k := range []string{"device_name", "interface", "mode", "source"} { - if _, ok := d[k]; !ok { - d[k] = "" - } - } - if _, ok := d["type"]; !ok { - d["type"] = "PERSISTENT" - } - } - - result := reorderDisks(tc.ConfigDisks, tc.ApiDisks) - if !reflect.DeepEqual(tc.ExpectedResult, result) { - t.Errorf("reordering did not match\nExpected: %+v\nActual: %+v", tc.ExpectedResult, result) - } - }) - } -} - -func TestComputeRegionInstanceTemplate_scratchDiskSizeCustomizeDiff(t *testing.T) { - t.Parallel() - - cases := map[string]struct { - Typee string // misspelled on purpose, type is a special symbol - DiskType string - DiskSize int - Interfacee string - ExpectError bool - }{ - "scratch disk correct size 1": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 375, - Interfacee: "NVME", - ExpectError: false, - }, - "scratch disk correct size 2": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 3000, - Interfacee: "NVME", - ExpectError: false, - }, - "scratch disk incorrect size": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 300, - Interfacee: "NVME", - ExpectError: true, - }, - "scratch disk incorrect interface": { - Typee: "SCRATCH", - DiskType: "local-ssd", - DiskSize: 3000, - Interfacee: "SCSI", - ExpectError: true, - }, - "non-scratch disk": { - Typee: "PERSISTENT", - DiskType: "", - DiskSize: 300, - Interfacee: "NVME", - ExpectError: false, - }, - } - - for tn, tc := range cases { - d := &tpgresource.ResourceDiffMock{ - After: map[string]interface{}{ - "disk.#": 1, - "disk.0.type": tc.Typee, - "disk.0.disk_type": tc.DiskType, - "disk.0.disk_size_gb": tc.DiskSize, - "disk.0.interface": tc.Interfacee, - }, - } - err := resourceComputeInstanceTemplateScratchDiskCustomizeDiffFunc(d) - if tc.ExpectError && err == nil { - t.Errorf("%s failed, expected error but was none", tn) - } - if !tc.ExpectError && err != nil { - t.Errorf("%s failed, found unexpected error: %s", tn, err) - } - } -} - func TestAccComputeRegionInstanceTemplate_basic(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tests/resource_compute_subnetwork_test.go.erb b/mmv1/third_party/terraform/tests/resource_compute_subnetwork_test.go.erb index 5ba58e365ccb..ebcec9ce8e69 100644 --- a/mmv1/third_party/terraform/tests/resource_compute_subnetwork_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_compute_subnetwork_test.go.erb @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" <% if version == "ga" -%> "google.golang.org/api/compute/v1" @@ -47,7 +48,7 @@ func TestIsShrinkageIpCidr(t *testing.T) { } for tn, tc := range cases { - if IsShrinkageIpCidr(context.Background(), tc.Old, tc.New, nil) != tc.Shrinkage { + if tpgcompute.IsShrinkageIpCidr(context.Background(), tc.Old, tc.New, nil) != tc.Shrinkage { t.Errorf("%s failed: Shrinkage should be %t", tn, tc.Shrinkage) } } diff --git a/mmv1/third_party/terraform/tests/resource_data_loss_prevention_deidentify_template_test.go b/mmv1/third_party/terraform/tests/resource_data_loss_prevention_deidentify_template_test.go index 50d3f29ec974..5f1bd1509965 100644 --- a/mmv1/third_party/terraform/tests/resource_data_loss_prevention_deidentify_template_test.go +++ b/mmv1/third_party/terraform/tests/resource_data_loss_prevention_deidentify_template_test.go @@ -1015,13 +1015,13 @@ func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_imageTran t.Parallel() context := map[string]interface{}{ - "organization": GetTestOrgFromEnv(t), + "organization": acctest.GetTestOrgFromEnv(t), "random_suffix": RandString(t, 10), "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key } VcrTest(t, resource.TestCase{ - PreCheck: func() { AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), Steps: []resource.TestStep{ @@ -1109,3 +1109,3563 @@ resource "google_data_loss_prevention_deidentify_template" "basic" { } `, context) } + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "random_suffix": RandString(t, 10), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformationsStart(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformationsUpdate(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformationsStart(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "DATE_SHIFT_EXAMPLE" + } + + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "DATE_SHIFT_EXAMPLE" + } + crypto_key { + transient { + name = "beep" + } + } + } + } + } + + transformations { + info_types { + name = "EMAIL_ADDRESS" + } + info_types { + name = "FIXED_BUCKETING_EXAMPLE" + } + + primitive_transformation { + fixed_size_bucketing_config { + lower_bound { + integer_value = 0 + } + upper_bound { + integer_value = 100 + } + bucket_size = 10 + } + } + } + + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + string_value = "00:00:00" + } + max { + string_value = "11:59:59" + } + replacement_value { + string_value = "AM" + } + } + buckets { + min { + string_value = "12:00:00" + } + max { + string_value = "23:59:59" + } + replacement_value { + string_value = "PM" + } + } + } + } + } + + transformations { + info_types { + name = "TIME_PART_EXAMPLE" + } + + primitive_transformation { + time_part_config { + part_to_extract = "YEAR" + } + } + } + + transformations { + info_types { + name = "REDACT_EXAMPLE" + } + + primitive_transformation { + redact_config {} + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformationsUpdate(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "DATE_SHIFT_EXAMPLE" + } + + primitive_transformation { + date_shift_config { + # update values + upper_bound_days = 60 + lower_bound_days = -60 + context { + name = "DATE_SHIFT_EXAMPLE" + } + crypto_key { + transient { + # update value + name = "beepy-beep-updated" + } + } + } + } + } + + transformations { + info_types { + name = "EMAIL_ADDRESS" + } + info_types { + name = "FIXED_BUCKETING_EXAMPLE" + } + + primitive_transformation { + # update values + fixed_size_bucketing_config { + lower_bound { + integer_value = 0 + } + upper_bound { + integer_value = 200 + } + bucket_size = 20 + } + } + } + + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + string_value = "00:00:00" + } + max { + string_value = "11:59:59" + } + replacement_value { + string_value = "AM" + } + } + # Add new bucket + buckets { + min { + string_value = "12:00:00" + } + max { + string_value = "13:59:59" + } + replacement_value { + string_value = "Lunchtime" + } + } + buckets { + min { + string_value = "14:00:00" + } + max { + string_value = "23:59:59" + } + replacement_value { + string_value = "PM" + } + } + } + } + } + + transformations { + info_types { + name = "TIME_PART_EXAMPLE" + } + + primitive_transformation { + time_part_config { + part_to_extract = "MONTH" + } + } + } + + # update to remove transformations block using redact_config + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "random_suffix": RandString(t, 10), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_integerValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_floatValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_timestampValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_timeValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_dateValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_dayOfWeekValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_integerValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + integer_value = 921 + } + max { + integer_value = 3010 + } + replacement_value { + integer_value = 1212 + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_floatValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + float_value = 10.50 + } + max { + float_value = 310.75 + } + replacement_value { + float_value = 5.37 + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_timestampValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + timestamp_value = "2014-10-02T15:01:23Z" + } + max { + timestamp_value = "2015-06-29T18:46:39Z" + } + replacement_value { + timestamp_value = "2014-12-24T09:19:50Z" + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_timeValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + time_value { + hours = 09 + minutes = 30 + seconds = 45 + nanos = 123412 + } + } + max { + time_value { + hours = 15 + minutes = 45 + seconds = 00 + nanos = 523278 + } + } + replacement_value { + time_value { + hours = 23 + minutes = 59 + seconds = 59 + nanos = 999999 + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_dateValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + date_value { + year = 1969 + month = 11 + day = 23 + } + } + max { + date_value { + year = 2010 + month = 12 + day = 31 + } + } + replacement_value { + date_value { + year = 2011 + month = 05 + day = 19 + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_bucketingConfig_dayOfWeekValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "BUCKETING_EXAMPLE" + } + + primitive_transformation { + bucketing_config { + buckets { + min { + day_of_week_value = "FRIDAY" + } + max { + day_of_week_value = "SUNDAY" + } + replacement_value { + day_of_week_value = "MONDAY" + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_fixedSizeBucketingConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "random_suffix": RandString(t, 10), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_fixedSizeBucketingConfig_integerValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_fixedSizeBucketingConfig_floatValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_fixedSizeBucketingConfig_integerValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "FIXED_BUCKETING_EXAMPLE" + } + + primitive_transformation { + fixed_size_bucketing_config { + lower_bound { + integer_value = 0 + } + upper_bound { + integer_value = 200 + } + bucket_size = 20 + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_fixedSizeBucketingConfig_floatValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "FIXED_BUCKETING_EXAMPLE" + } + + primitive_transformation { + fixed_size_bucketing_config { + lower_bound { + float_value = 0.5 + } + upper_bound { + float_value = 20.5 + } + bucket_size = 20 + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "random_suffix": RandString(t, 10), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig_transient(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig_unwrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig_kmsWrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.config", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig_transient(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "DATE_SHIFT_EXAMPLE" + } + + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "some-context-field" + } + crypto_key { + transient { + name = "someRandomTerraformKey" + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig_unwrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "DATE_SHIFT_EXAMPLE" + } + + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "some-context-field" + } + crypto_key { + unwrapped { + key = "0836c61118ac590243bdadb25f0bb08e" + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_infoTypeTransformations_primitiveTransformations_dateShiftConfig_kmsWrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "config" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + info_type_transformations { + transformations { + info_types { + name = "DATE_SHIFT_EXAMPLE" + } + + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "some-context-field" + } + crypto_key { + kms_wrapped { + wrapped_key = "B64/WRAPPED/TOKENIZATION/KEY" + crypto_key_name = "%{kms_key_name}" + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_start(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_update(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_start(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field3" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "fizzbuzz" + } + } + } + } + } + } + field_transformations { + fields { + name = "details.pii.email" + } + condition { + expressions { + conditions { + conditions { + field { + name = "details.pii.country_code" + } + operator = "EQUAL_TO" + value { + string_value = "US" + } + } + conditions { + field { + name = "details.pii.date_of_birth" + } + operator = "GREATER_THAN_OR_EQUALS" + value { + date_value { + year = 2001 + month = 6 + day = 29 + } + } + } + } + } + } + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + version = "0.1" + } + info_types { + name = "CREDIT_CARD_NUMBER" + version = "1.2" + } + primitive_transformation { + replace_config { + new_value { + integer_value = 9 + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-redacted-field" + } + info_type_transformations { + transformations { + primitive_transformation { + redact_config {} + } + } + } + } + field_transformations { + fields { + name = "unconditionally-char-masked-field" + } + info_type_transformations { + transformations { + info_types { + name = "EMAIL_ADDRESS" + version = "latest" + } + info_types { + name = "LAST_NAME" + } + primitive_transformation { + character_mask_config { + masking_character = "x" + number_to_mask = 8 + characters_to_ignore { + characters_to_skip = "-" + } + reverse_order = true + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-replace-ffx-fpe-field" + } + info_type_transformations { + transformations { + info_types { + name = "SSN" + } + primitive_transformation { + crypto_replace_ffx_fpe_config { + context { + name = "someTweak" + } + crypto_key { + kms_wrapped { + wrapped_key = "B64/WRAPPED/TOKENIZATION/KEY" + crypto_key_name = "%{kms_key_name}" + } + } + radix = 10 + surrogate_info_type { + name = "CUSTOM_INFO_TYPE" + version = "version-1" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-fixed-size-bucketing-field" + } + info_type_transformations { + transformations { + info_types { + name = "AGE" + } + primitive_transformation { + fixed_size_bucketing_config { + lower_bound { + integer_value = 0 + } + upper_bound { + integer_value = 100 + } + bucket_size = 10 + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-field" + } + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + string_value = "00:00:00" + } + max { + string_value = "11:59:59" + } + replacement_value { + string_value = "AM" + } + } + buckets { + min { + string_value = "12:00:00" + } + max { + string_value = "23:59:59" + } + replacement_value { + string_value = "PM" + } + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-time-part-field" + } + info_type_transformations { + transformations { + info_types { + name = "DATE_OF_BIRTH" + } + primitive_transformation { + time_part_config { + part_to_extract = "YEAR" + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-hash-field" + } + info_type_transformations { + transformations { + info_types { + name = "CREDIT_CARD_SECRET" + } + primitive_transformation { + crypto_hash_config { + crypto_key { + transient { + name = "beep" # Copy-pasting from existing test that uses this field + } + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-date-shift-field" + } + info_type_transformations { + transformations { + info_types { + name = "EXTRACT_DATE" + } + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "unconditionally-date-shift-field" + } + crypto_key { + transient { + name = "beep" + } + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-deterministic-field" + } + info_type_transformations { + transformations { + info_types { + name = "CREDIT_CARD_SECRET1234" + } + primitive_transformation { + crypto_deterministic_config { + crypto_key { + transient { + name = "beep" + } + } + surrogate_info_type { + name = "CREDIT_CARD_NUMBER" + version = "version-1" + } + context { + name = "unconditionally-crypto-deterministic-field" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-dictionary-field" + } + info_type_transformations { + transformations { + info_types { + name = "RANDOM_FIELD" + } + primitive_transformation { + replace_dictionary_config { + word_list { + words = [ + "foo", + "bar", + "baz", + ] + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-with-info-type-config" + } + info_type_transformations { + transformations { + info_types { + name = "FIRST_NAME" + } + primitive_transformation { + replace_with_info_type_config {} + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_update(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field3" + } + operator = "EQUAL_TO" + value { + + # update the condition for field3 + + string_value = "FOO-BAR-updated" + } + } + + # update includes deleting condition affecting field2 + + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + + # update the condition for field1 + + string_value = "fizzbuzz-updated" + } + } + } + } + } + } + field_transformations { + fields { + name = "details.pii.email" + } + condition { + expressions { + conditions { + conditions { + + # update to remove condition checking the details.pii.country_code field + # update to add a new condition + + field { + name = "details.pii.gender" + } + operator = "EQUAL_TO" + value { + string_value = "M" + } + } + conditions { + field { + name = "details.pii.date_of_birth" + } + operator = "GREATER_THAN_OR_EQUALS" + value { + + # update date values + + date_value { + year = 2007 + month = 11 + day = 9 + } + } + } + } + } + } + info_type_transformations { + transformations { + + # removing the info_types + + info_types { + name = "CREDIT_CARD_NUMBER" + version = "1.5" + } + primitive_transformation { + + # update values inside replace_config + + replace_config { + new_value { + float_value = 652.23 + } + } + } + } + } + } + + # update to remove field_transformations block using redact_config + + field_transformations { + fields { + name = "unconditionally-char-masked-field" + } + info_type_transformations { + transformations { + info_types { + name = "EMAIL_ADDRESS" + version = "latest" + } + + # adding the info_types + + info_types { + name = "FIRST_NAME" + } + info_types { + name = "LAST_NAME" + version = "0.5" + } + primitive_transformation { + character_mask_config { + masking_character = "x" + number_to_mask = 8 + + # update to delete old characters_to_ignore block and add new ones + + characters_to_ignore { + common_characters_to_ignore = "PUNCTUATION" + } + characters_to_ignore { + common_characters_to_ignore = "ALPHA_UPPER_CASE" + } + characters_to_ignore { + common_characters_to_ignore = "ALPHA_LOWER_CASE" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-replace-ffx-fpe-field" + } + info_type_transformations { + transformations { + + # updated the info_types + + info_types { + name = "SSN33" + } + primitive_transformation { + crypto_replace_ffx_fpe_config { + common_alphabet = "UPPER_CASE_ALPHA_NUMERIC" + context { + name = "someTweak2" + } + crypto_key { + transient { + name = "beep" + } + } + surrogate_info_type { + name = "CUSTOM_INFO_TYPE" + version = "version-2" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-fixed-size-bucketing-field" + } + info_type_transformations { + transformations { + info_types { + name = "AGE" + } + primitive_transformation { + + # update values inside fixed_size_bucketing_config + + fixed_size_bucketing_config { + lower_bound { + float_value = 23.5 + } + upper_bound { + float_value = 71.75 + } + bucket_size = 20 + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-field" + } + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + string_value = "00:00:00" + } + max { + string_value = "11:59:59" + } + replacement_value { + string_value = "AM" + } + } + + # Add new bucket + + buckets { + min { + string_value = "12:00:00" + } + max { + string_value = "13:59:59" + } + replacement_value { + string_value = "Lunchtime" + } + } + buckets { + min { + string_value = "14:00:00" + } + max { + string_value = "23:59:59" + } + replacement_value { + string_value = "PM" + } + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-time-part-field" + } + info_type_transformations { + transformations { + info_types { + name = "DATE_OF_BIRTH" + } + primitive_transformation { + time_part_config { + part_to_extract = "MONTH" + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-hash-field" + } + info_type_transformations { + transformations { + info_types { + name = "CREDIT_CARD_SECRET" + } + primitive_transformation { + crypto_hash_config { + crypto_key { + transient { + + # update the value + + name = "beepy-beep-updated" + } + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-date-shift-field" + } + info_type_transformations { + transformations { + info_types { + name = "EXTRACT_DATE" + } + primitive_transformation { + + # update the value + + date_shift_config { + upper_bound_days = 60 + lower_bound_days = -60 + context { + name = "unconditionally-date-shift-field" + } + crypto_key { + transient { + name = "beepy-beep-updated" + } + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-deterministic-field" + } + info_type_transformations { + transformations { + info_types { + name = "CREDIT_CARD_SECRET1234" + } + primitive_transformation { + crypto_deterministic_config { + crypto_key { + transient { + + # update the value + + name = "beepy-beep-updated" + } + } + surrogate_info_type { + + # update info type + + name = "CREDIT_CARD_TRACK_NUMBER" + version = "version-2" + } + context { + name = "unconditionally-crypto-deterministic-field" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-dictionary-field" + } + info_type_transformations { + transformations { + info_types { + name = "RANDOM_FIELD" + } + primitive_transformation { + replace_dictionary_config { + word_list { + words = [ + + # update the list + + "foo", + "fizz", + "some", + "bar", + ] + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-with-info-type-config" + } + info_type_transformations { + transformations { + info_types { + + # updated the value + + name = "LAST_NAME" + } + primitive_transformation { + replace_with_info_type_config {} + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigString(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigBoolean(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigTimestamp(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigTimevalue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigDatevalue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigDayOfWeek(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigString(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-config" + } + + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "CREDIT_CARD_NUMBER" + } + primitive_transformation { + replace_config { + new_value { + string_value = "someVal" + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigBoolean(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-config" + } + + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "CREDIT_CARD_NUMBER" + } + primitive_transformation { + replace_config { + new_value { + boolean_value = true + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigTimestamp(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-config" + } + + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "CREDIT_CARD_NUMBER" + } + primitive_transformation { + replace_config { + new_value { + timestamp_value = "2021-11-16T17:28:52Z" + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigTimevalue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-config" + } + + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "CREDIT_CARD_NUMBER" + } + primitive_transformation { + replace_config { + new_value { + time_value { + hours = 22 + minutes = 43 + seconds = 54 + nanos = 428947264 + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigDatevalue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-config" + } + + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "CREDIT_CARD_NUMBER" + } + primitive_transformation { + replace_config { + new_value { + date_value { + day = 24 + month = 8 + year = 2020 + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_replaceConfigDayOfWeek(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-replace-config" + } + + info_type_transformations { + transformations { + info_types { + name = "PHONE_NUMBER" + } + info_types { + name = "CREDIT_CARD_NUMBER" + } + primitive_transformation { + replace_config { + new_value { + day_of_week_value = "WEDNESDAY" + } + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfigTransient(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfigUnwrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfigKmswrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfigTransient(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-replace-ffx-fpe-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_replace_ffx_fpe_config { + context { + name = "someTweak" + } + crypto_key { + transient { + name = "someRandomTerraformKey" + } + } + custom_alphabet = "ASE13RT76" + surrogate_info_type { + name = "CUSTOM_INFO_TYPE" + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfigUnwrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-replace-ffx-fpe-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_replace_ffx_fpe_config { + context { + name = "someTweak2" + } + crypto_key { + unwrapped { + key = "0836c61118ac590243bdadb25f0bb08e" + } + } + common_alphabet = "HEXADECIMAL" + surrogate_info_type { + name = "CUSTOM_INFO_TYPE" + version = "version-1" + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoReplaceFfxFpeConfigKmswrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-replace-ffx-fpe-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_replace_ffx_fpe_config { + context { + name = "someTweak3" + } + crypto_key { + kms_wrapped { + wrapped_key = "B64/WRAPPED/TOKENIZATION/KEY" + crypto_key_name = "%{kms_key_name}" + } + } + radix = 57 + surrogate_info_type { + name = "CUSTOM_INFO_TYPE" + version = "version-2" + } + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigInteger(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigFloat(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigTimestamp(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigTimeValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigDateValue(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigDayOfTheWeek(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigInteger(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-config" + } + + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + integer_value = 921 + } + max { + integer_value = 3010 + } + replacement_value { + integer_value = 1212 + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigFloat(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-config" + } + + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + float_value = 10.50 + } + max { + float_value = 310.75 + } + replacement_value { + float_value = 5.37 + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigTimestamp(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-config" + } + + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + timestamp_value = "2014-10-02T15:01:23Z" + } + max { + timestamp_value = "2015-06-29T18:46:39Z" + } + replacement_value { + timestamp_value = "2014-12-24T09:19:50Z" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigTimeValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-config" + } + + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + time_value { + hours = 09 + minutes = 30 + seconds = 45 + nanos = 123412 + } + } + max { + time_value { + hours = 15 + minutes = 45 + seconds = 00 + nanos = 523278 + } + } + replacement_value { + time_value { + hours = 23 + minutes = 59 + seconds = 59 + nanos = 999999 + } + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigDateValue(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-config" + } + + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config{ + buckets { + min { + date_value { + year = 1969 + month = 11 + day = 23 + } + } + max { + date_value { + year = 2010 + month = 12 + day = 31 + } + } + replacement_value { + date_value { + year = 2011 + month = 05 + day = 19 + } + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_bucketingConfigDayOfTheWeek(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-bucketing-config" + } + + info_type_transformations { + transformations { + info_types { + name = "CREATED_TIME" + } + primitive_transformation { + bucketing_config { + buckets { + min { + day_of_week_value = "MONDAY" + } + max { + day_of_week_value = "THURSDAY" + } + replacement_value { + day_of_week_value = "FRIDAY" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfigTransient(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfigUnwrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfigKmswrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfigTransient(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-hash-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_hash_config { + crypto_key { + transient { + name = "someRandomTerraformKey" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfigUnwrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-hash-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_hash_config { + crypto_key { + unwrapped { + key = "0836c61118ac590243bdadb25f0bb08e" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoHashConfigKmswrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-hash-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_hash_config { + crypto_key { + kms_wrapped { + wrapped_key = "B64/WRAPPED/TOKENIZATION/KEY" + crypto_key_name = "%{kms_key_name}" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfigTransient(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfigUnwrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfigKmswrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfigTransient(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-date-shift-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "some-context-field" + } + crypto_key { + transient { + name = "someRandomTerraformKey" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfigUnwrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-date-shift-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "some-context-field" + } + crypto_key { + unwrapped { + key = "0836c61118ac590243bdadb25f0bb08e" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_dateShiftConfigKmswrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-date-shift-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + date_shift_config { + upper_bound_days = 30 + lower_bound_days = -30 + context { + name = "some-context-field" + } + crypto_key { + kms_wrapped { + wrapped_key = "B64/WRAPPED/TOKENIZATION/KEY" + crypto_key_name = "%{kms_key_name}" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func TestAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfig(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "organization": acctest.GetTestOrgFromEnv(t), + "kms_key_name": BootstrapKMSKey(t).CryptoKey.Name, // global KMS key + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionDeidentifyTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfigTransient(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfigUnwrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfigKmswrapped(context), + }, + { + ResourceName: "google_data_loss_prevention_deidentify_template.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfigTransient(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-deterministic-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_deterministic_config { + surrogate_info_type { + name = "SECRET_NUMBER" + } + crypto_key { + transient { + name = "someRandomTerraformKey" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfigUnwrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-deterministic-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_deterministic_config { + surrogate_info_type { + name = "SECRET_NUMBER" + version = "1.0" + } + context { + name = "some-context-field" + } + crypto_key { + unwrapped { + key = "0836c61118ac590243bdadb25f0bb08e" + } + } + } + } + } + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionDeidentifyTemplate_dlpDeidentifyTemplate_recordTransformations_with_infoTypeTransformations_cryptoDeterministicConfigKmswrapped(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_deidentify_template" "basic" { + parent = "organizations/%{organization}" + description = "Description" + display_name = "Displayname" + + deidentify_config { + record_transformations { + record_suppressions { + condition { + expressions { + logical_operator = "AND" + conditions { + conditions { + field { + name = "field1" + } + operator = "EQUAL_TO" + value { + string_value = "FOO-BAR" + } + } + conditions { + field { + name = "field2" + } + operator = "EQUAL_TO" + value { + string_value = "foobar" + } + } + } + } + } + } + field_transformations { + fields { + name = "unconditionally-crypto-deterministic-config" + } + + info_type_transformations { + transformations { + primitive_transformation { + crypto_deterministic_config { + surrogate_info_type { + name = "SECRET_NUMBER" + version = "2.0" + } + context { + name = "updated-context-field" + } + crypto_key { + kms_wrapped { + wrapped_key = "B64/WRAPPED/TOKENIZATION/KEY" + crypto_key_name = "%{kms_key_name}" + } + } + } + } + } + } + } + } + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/tests/resource_data_loss_prevention_job_trigger_test.go b/mmv1/third_party/terraform/tests/resource_data_loss_prevention_job_trigger_test.go index fc22651be1d9..4d0f553f3d97 100644 --- a/mmv1/third_party/terraform/tests/resource_data_loss_prevention_job_trigger_test.go +++ b/mmv1/third_party/terraform/tests/resource_data_loss_prevention_job_trigger_test.go @@ -319,6 +319,164 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerInspectCustomInfoTypes(t * }) } +func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample3(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIncludedFields(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.included_fields", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIncludedFieldsUpdate(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.included_fields_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + +func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample4(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerExcludedFields(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.excluded_fields", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerExcludedFieldsUpdate(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.excluded_fields_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + +func TestAccDataLossPreventionJobTrigger_dlpJobTriggerActionsOptionalExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerActionsOptionalBasic(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerBasic(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerActionsOptionalBasic(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + +func TestAccDataLossPreventionJobTrigger_dlpJobTriggerInspectOptionalExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerInspectOptionalBasic(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerBasic(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerInspectOptionalBasic(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + func testAccDataLossPreventionJobTrigger_dlpJobTriggerBasic(context map[string]interface{}) string { return Nprintf(` resource "google_data_loss_prevention_job_trigger" "basic" { @@ -400,6 +558,94 @@ resource "google_data_loss_prevention_job_trigger" "identifying_fields" { `, context) } +func testAccDataLossPreventionJobTrigger_dlpJobTriggerIncludedFields(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "included_fields" { + parent = "projects/%{project}" + description = "Starting description" + display_name = "display" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + big_query_options { + table_reference { + project_id = "project" + dataset_id = "dataset" + table_id = "table_to_scan" + } + rows_limit = 1000 + sample_method = "RANDOM_START" + included_fields { + name = "field" + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionJobTrigger_dlpJobTriggerExcludedFields(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "excluded_fields" { + parent = "projects/%{project}" + description = "Starting description" + display_name = "display" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + big_query_options { + table_reference { + project_id = "project" + dataset_id = "dataset" + table_id = "table_to_scan" + } + rows_limit = 1000 + sample_method = "RANDOM_START" + excluded_fields { + name = "field" + } + } + } + } +} +`, context) +} + func testAccDataLossPreventionJobTrigger_dlpJobTriggerUpdate(context map[string]interface{}) string { return Nprintf(` resource "google_data_loss_prevention_job_trigger" "basic" { @@ -481,6 +727,94 @@ resource "google_data_loss_prevention_job_trigger" "identifying_fields_update" { `, context) } +func testAccDataLossPreventionJobTrigger_dlpJobTriggerIncludedFieldsUpdate(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "included_fields_update" { + parent = "projects/%{project}" + description = "An updated description" + display_name = "Different" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + big_query_options { + table_reference { + project_id = "project" + dataset_id = "dataset" + table_id = "table_to_scan" + } + rows_limit = 1000 + sample_method = "RANDOM_START" + included_fields { + name = "different" + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionJobTrigger_dlpJobTriggerExcludedFieldsUpdate(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "excluded_fields_update" { + parent = "projects/%{project}" + description = "An updated description" + display_name = "Different" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + big_query_options { + table_reference { + project_id = "project" + dataset_id = "dataset" + table_id = "table_to_scan" + } + rows_limit = 1000 + sample_method = "RANDOM_START" + excluded_fields { + name = "different" + } + } + } + } +} +`, context) +} + func testAccDataLossPreventionJobTrigger_publishToPubSub(context map[string]interface{}) string { return Nprintf(` resource "google_data_loss_prevention_job_trigger" "pubsub" { @@ -1954,3 +2288,66 @@ resource "google_data_loss_prevention_job_trigger" "inspect" { } `, context) } + +func testAccDataLossPreventionJobTrigger_dlpJobTriggerActionsOptionalBasic(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "basic" { + parent = "projects/%{project}" + description = "Starting description" + display_name = "display" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + storage_config { + cloud_storage_options { + file_set { + url = "gs://mybucket/directory/" + } + } + } + } +} +`, context) +} + +func testAccDataLossPreventionJobTrigger_dlpJobTriggerInspectOptionalBasic(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "basic" { + parent = "projects/%{project}" + description = "Starting description" + display_name = "display" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + cloud_storage_options { + file_set { + url = "gs://mybucket/directory/" + } + } + } + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/tests/resource_data_loss_prevention_stored_info_type_test.go b/mmv1/third_party/terraform/tests/resource_data_loss_prevention_stored_info_type_test.go index 6fdf4ec0cd9a..422d84925955 100644 --- a/mmv1/third_party/terraform/tests/resource_data_loss_prevention_stored_info_type_test.go +++ b/mmv1/third_party/terraform/tests/resource_data_loss_prevention_stored_info_type_test.go @@ -5,212 +5,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) -func TestAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeCustomDiffFuncForceNew(t *testing.T) { - t.Parallel() - - cases := map[string]struct { - before map[string]interface{} - after map[string]interface{} - forcenew bool - }{ - "updating_dictionary": { - before: map[string]interface{}{ - "dictionary": map[string]interface{}{ - "word_list": map[string]interface{}{ - "word": []string{"word", "word2"}, - }, - }, - }, - after: map[string]interface{}{ - "dictionary": map[string]interface{}{ - "word_list": map[string]interface{}{ - "word": []string{"wordnew", "word2"}, - }, - }, - }, - forcenew: false, - }, - "updating_large_custom_dictionary": { - before: map[string]interface{}{ - "large_custom_dictionary": map[string]interface{}{ - "output_path": map[string]interface{}{ - "path": "gs://sample-dlp-bucket/something.json", - }, - }, - }, - after: map[string]interface{}{ - "large_custom_dictionary": map[string]interface{}{ - "output_path": map[string]interface{}{ - "path": "gs://sample-dlp-bucket/somethingnew.json", - }, - }, - }, - forcenew: false, - }, - "updating_regex": { - before: map[string]interface{}{ - "regex": map[string]interface{}{ - "pattern": "patient", - }, - }, - after: map[string]interface{}{ - "regex": map[string]interface{}{ - "pattern": "newpatient", - }, - }, - forcenew: false, - }, - "changing_from_dictionary_to_large_custom_dictionary": { - before: map[string]interface{}{ - "dictionary": map[string]interface{}{ - "word_list": map[string]interface{}{ - "word": []string{"word", "word2"}, - }, - }, - }, - after: map[string]interface{}{ - "large_custom_dictionary": map[string]interface{}{ - "output_path": map[string]interface{}{ - "path": "gs://sample-dlp-bucket/something.json", - }, - }, - }, - forcenew: true, - }, - "changing_from_dictionary_to_regex": { - before: map[string]interface{}{ - "dictionary": map[string]interface{}{ - "word_list": map[string]interface{}{ - "word": []string{"word", "word2"}, - }, - }, - }, - after: map[string]interface{}{ - "regex": map[string]interface{}{ - "pattern": "patient", - }, - }, - forcenew: true, - }, - "changing_from_large_custom_dictionary_to_regex": { - before: map[string]interface{}{ - "large_custom_dictionary": map[string]interface{}{ - "output_path": map[string]interface{}{ - "path": "gs://sample-dlp-bucket/something.json", - }, - }, - }, - after: map[string]interface{}{ - "regex": map[string]interface{}{ - "pattern": "patient", - }, - }, - forcenew: true, - }, - "changing_from_large_custom_dictionary_to_dictionary": { - before: map[string]interface{}{ - "large_custom_dictionary": map[string]interface{}{ - "output_path": map[string]interface{}{ - "path": "gs://sample-dlp-bucket/something.json", - }, - }, - }, - after: map[string]interface{}{ - "dictionary": map[string]interface{}{ - "word_list": map[string]interface{}{ - "word": []string{"word", "word2"}, - }, - }, - }, - forcenew: true, - }, - "changing_from_regex_to_dictionary": { - before: map[string]interface{}{ - "regex": map[string]interface{}{ - "pattern": "patient", - }, - }, - after: map[string]interface{}{ - "dictionary": map[string]interface{}{ - "word_list": map[string]interface{}{ - "word": []string{"word", "word2"}, - }, - }, - }, - forcenew: true, - }, - "changing_from_regex_to_large_custom_dictionary": { - before: map[string]interface{}{ - "regex": map[string]interface{}{ - "pattern": "patient", - }, - }, - after: map[string]interface{}{ - "large_custom_dictionary": map[string]interface{}{ - "output_path": map[string]interface{}{ - "path": "gs://sample-dlp-bucket/something.json", - }, - }, - }, - forcenew: true, - }, - } - - for tn, tc := range cases { - - fieldBefore := "" - fieldAfter := "" - switch tn { - case "updating_dictionary": - fieldBefore = "dictionary" - fieldAfter = fieldBefore - case "updating_large_custom_dictionary": - fieldBefore = "large_custom_dictionary" - fieldAfter = fieldBefore - case "updating_regex": - fieldBefore = "regex" - fieldAfter = fieldBefore - case "changing_from_dictionary_to_large_custom_dictionary": - fieldBefore = "dictionary" - fieldAfter = "large_custom_dictionary" - case "changing_from_dictionary_to_regex": - fieldBefore = "dictionary" - fieldAfter = "regex" - case "changing_from_large_custom_dictionary_to_regex": - fieldBefore = "large_custom_dictionary" - fieldAfter = "regex" - case "changing_from_large_custom_dictionary_to_dictionary": - fieldBefore = "large_custom_dictionary" - fieldAfter = "dictionary" - case "changing_from_regex_to_dictionary": - fieldBefore = "regex" - fieldAfter = "dictionary" - case "changing_from_regex_to_large_custom_dictionary": - fieldBefore = "regex" - fieldAfter = "large_custom_dictionary" - } - - d := &tpgresource.ResourceDiffMock{ - Before: map[string]interface{}{ - fieldBefore: tc.before[fieldBefore], - }, - After: map[string]interface{}{ - fieldAfter: tc.after[fieldAfter], - }, - } - err := storedInfoTypeCustomizeDiffFunc(d) - if err != nil { - t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) - } - if d.IsForceNew != tc.forcenew { - t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v", tn, tc.forcenew, d.IsForceNew) - } - } -} - func TestAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeUpdate(t *testing.T) { t.Parallel() @@ -351,3 +147,68 @@ resource "google_data_loss_prevention_stored_info_type" "basic" { } `, context) } + +func TestAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeStoredInfoTypeId(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": acctest.GetTestProjectFromEnv(), + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataLossPreventionStoredInfoTypeDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeStoredInfoTypeId(context), + }, + { + ResourceName: "google_data_loss_prevention_stored_info_type.basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeStoredInfoTypeIdUpdate(context), + }, + { + ResourceName: "google_data_loss_prevention_stored_info_type.basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeStoredInfoTypeId(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_stored_info_type" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + stored_info_type_id = "tf-test-%{random_suffix}" + + regex { + pattern = "patient" + group_indexes = [2] + } +} +`, context) +} + +func testAccDataLossPreventionStoredInfoType_dlpStoredInfoTypeStoredInfoTypeIdUpdate(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_stored_info_type" "basic" { + parent = "projects/%{project}" + description = "Description" + display_name = "Displayname" + stored_info_type_id = "tf-test-update-%{random_suffix}" + + regex { + pattern = "patient" + group_indexes = [2] + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/tests/resource_dataform_repository_test.go.erb b/mmv1/third_party/terraform/tests/resource_dataform_repository_test.go.erb new file mode 100644 index 000000000000..542219eb2021 --- /dev/null +++ b/mmv1/third_party/terraform/tests/resource_dataform_repository_test.go.erb @@ -0,0 +1,129 @@ +<% autogen_exception -%> +package google +<% unless version == 'ga' -%> + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataformRepository_updated(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckDataformRepositoryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataformRepository_basic(context), + }, + { + ResourceName: "google_dataform_repository.dataform_respository", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + { + Config: testAccDataformRepository_updated(context), + }, + { + ResourceName: "google_dataform_repository.dataform_respository", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + }, + }) +} + +func testAccDataformRepository_basic(context map[string]interface{}) string { + return Nprintf(` +resource "google_sourcerepo_repository" "git_repository" { + provider = google-beta + name = "my/repository%{random_suffix}" +} + +resource "google_secret_manager_secret" "secret" { + provider = google-beta + secret_id = "secret" + + replication { + automatic = true + } +} + +resource "google_secret_manager_secret_version" "secret_version" { + provider = google-beta + secret = google_secret_manager_secret.secret.id + + secret_data = "tf-test-secret-data%{random_suffix}" +} + +resource "google_dataform_repository" "dataform_respository" { + provider = google-beta + name = "tf_test_dataform_repository%{random_suffix}" + + git_remote_settings { + url = google_sourcerepo_repository.git_repository.url + default_branch = "main" + authentication_token_secret_version = google_secret_manager_secret_version.secret_version.id + } + + workspace_compilation_overrides { + default_database = "database" + schema_suffix = "_suffix" + table_prefix = "prefix_" + } +} +`, context) +} + +func testAccDataformRepository_updated(context map[string]interface{}) string { + return Nprintf(` +resource "google_sourcerepo_repository" "git_repository" { + provider = google-beta + name = "my/repository%{random_suffix}" +} + +resource "google_secret_manager_secret" "secret" { + provider = google-beta + secret_id = "secret" + + replication { + automatic = true + } +} + +resource "google_secret_manager_secret_version" "secret_version" { + provider = google-beta + secret = google_secret_manager_secret.secret.id + + secret_data = "tf-test-secret-data%{random_suffix}" +} + +resource "google_dataform_repository" "dataform_respository" { + provider = google-beta + name = "tf_test_dataform_repository%{random_suffix}" + + git_remote_settings { + url = google_sourcerepo_repository.git_repository.url + default_branch = "main" + authentication_token_secret_version = google_secret_manager_secret_version.secret_version.id + } + + workspace_compilation_overrides { + schema_suffix = "_suffix_v2" + table_prefix = "prefix_v2_" + } +} +`, context) +} +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_dataproc_job_test.go.erb b/mmv1/third_party/terraform/tests/resource_dataproc_job_test.go.erb index ee81c0da278e..9939064b435e 100644 --- a/mmv1/third_party/terraform/tests/resource_dataproc_job_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_dataproc_job_test.go.erb @@ -15,6 +15,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgdataproc "github.com/hashicorp/terraform-provider-google/google/services/dataproc" "google.golang.org/api/googleapi" "google.golang.org/api/dataproc/v1" ) @@ -351,7 +352,7 @@ func testAccCheckDataprocJobCompletesSuccessfully(t *testing.T, n string, job *d } jobCompleteTimeoutMins := 5 * time.Minute - waitErr := dataprocJobOperationWait(config, region, project, job.Reference.JobId, + waitErr := tpgdataproc.DataprocJobOperationWait(config, region, project, job.Reference.JobId, "Awaiting Dataproc job completion", config.UserAgent, jobCompleteTimeoutMins) if waitErr != nil { return waitErr diff --git a/mmv1/third_party/terraform/tests/resource_datastream_stream_test.go b/mmv1/third_party/terraform/tests/resource_datastream_stream_test.go index c1e7ee1905f0..62402b9c4413 100644 --- a/mmv1/third_party/terraform/tests/resource_datastream_stream_test.go +++ b/mmv1/third_party/terraform/tests/resource_datastream_stream_test.go @@ -1,140 +1,12 @@ package google import ( - "fmt" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) -func TestDatastreamStreamCustomDiff(t *testing.T) { - t.Parallel() - - cases := []struct { - isNew bool - old string - new string - wantError bool - }{ - { - isNew: true, - new: "NOT_STARTED", - wantError: false, - }, - { - isNew: true, - new: "RUNNING", - wantError: false, - }, - { - isNew: true, - new: "PAUSED", - wantError: true, - }, - { - isNew: true, - new: "MAINTENANCE", - wantError: true, - }, - { - // Normally this transition is okay, but if the resource is "new" - // (for example being recreated) it's not. - isNew: true, - old: "RUNNING", - new: "PAUSED", - wantError: true, - }, - { - old: "NOT_STARTED", - new: "RUNNING", - wantError: false, - }, - { - old: "NOT_STARTED", - new: "MAINTENANCE", - wantError: true, - }, - { - old: "NOT_STARTED", - new: "PAUSED", - wantError: true, - }, - { - old: "NOT_STARTED", - new: "NOT_STARTED", - wantError: false, - }, - { - old: "RUNNING", - new: "PAUSED", - wantError: false, - }, - { - old: "RUNNING", - new: "NOT_STARTED", - wantError: true, - }, - { - old: "RUNNING", - new: "RUNNING", - wantError: false, - }, - { - old: "RUNNING", - new: "MAINTENANCE", - wantError: true, - }, - { - old: "PAUSED", - new: "PAUSED", - wantError: false, - }, - { - old: "PAUSED", - new: "NOT_STARTED", - wantError: true, - }, - { - old: "PAUSED", - new: "RUNNING", - wantError: false, - }, - { - old: "PAUSED", - new: "MAINTENANCE", - wantError: true, - }, - } - for _, tc := range cases { - name := "whatever" - tn := fmt.Sprintf("%s => %s", tc.old, tc.new) - if tc.isNew { - name = "" - tn = fmt.Sprintf("(new) %s => %s", tc.old, tc.new) - } - t.Run(tn, func(t *testing.T) { - diff := &tpgresource.ResourceDiffMock{ - Before: map[string]interface{}{ - "desired_state": tc.old, - }, - After: map[string]interface{}{ - "name": name, - "desired_state": tc.new, - }, - } - err := resourceDatastreamStreamCustomDiffFunc(diff) - if tc.wantError && err == nil { - t.Fatalf("want error, got nil") - } - if !tc.wantError && err != nil { - t.Fatalf("got unexpected error: %v", err) - } - }) - } -} - func TestAccDatastreamStream_update(t *testing.T) { // this test uses the random provider acctest.SkipIfVcr(t) diff --git a/mmv1/third_party/terraform/tests/resource_dns_managed_zone_test.go.erb b/mmv1/third_party/terraform/tests/resource_dns_managed_zone_test.go.erb index 4f128232ebc6..7c11cc907467 100644 --- a/mmv1/third_party/terraform/tests/resource_dns_managed_zone_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_dns_managed_zone_test.go.erb @@ -5,6 +5,7 @@ import ( "fmt" "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgdns "github.com/hashicorp/terraform-provider-google/google/services/dns" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -271,7 +272,7 @@ func testAccCheckManagedZoneCreateRRs(t *testing.T, zoneSuffix string, project s return fmt.Errorf("Error creating DNS RecordSet: %s", err) } - w := &DnsChangeWaiter{ + w := &tpgdns.DnsChangeWaiter{ Service: config.NewDnsClient(config.UserAgent), Change: chg, Project: project, diff --git a/mmv1/third_party/terraform/tests/resource_dns_record_set_test.go.erb b/mmv1/third_party/terraform/tests/resource_dns_record_set_test.go.erb index 028c6e16e7d3..c5606c55e633 100644 --- a/mmv1/third_party/terraform/tests/resource_dns_record_set_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_dns_record_set_test.go.erb @@ -11,27 +11,11 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + tpgdns "github.com/hashicorp/terraform-provider-google/google/services/dns" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-provider-google/google/verify" ) -func TestValidateRecordNameTrailingDot(t *testing.T) { - cases := []verify.StringValidationTestCase{ - // No errors - {TestName: "trailing dot", Value: "test-record.hashicorptest.com."}, - - // With errors - {TestName: "empty string", Value: "", ExpectError: true}, - {TestName: "no trailing dot", Value: "test-record.hashicorptest.com", ExpectError: true}, - } - - es := verify.TestStringValidationCases(cases, validateRecordNameTrailingDot) - if len(es) > 0 { - t.Errorf("Failed to validate DNS Record name with value: %v", es) - } -} - func TestIpv6AddressDiffSuppress(t *testing.T) { cases := map[string]struct { Old, New []string @@ -69,7 +53,7 @@ func TestIpv6AddressDiffSuppress(t *testing.T) { } for tn, tc := range cases { - shouldSuppress := RrdatasListDiffSuppress(tc.Old, tc.New, parseFunc, nil) + shouldSuppress := tpgdns.RrdatasListDiffSuppress(tc.Old, tc.New, parseFunc, nil) if shouldSuppress != tc.ShouldSuppress { t.Errorf("%s: expected %t", tn, tc.ShouldSuppress) } diff --git a/mmv1/third_party/terraform/tests/resource_filestore_instance_test.go b/mmv1/third_party/terraform/tests/resource_filestore_instance_test.go index c9b78d3e326a..a4383ad16161 100644 --- a/mmv1/third_party/terraform/tests/resource_filestore_instance_test.go +++ b/mmv1/third_party/terraform/tests/resource_filestore_instance_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/filestore" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) @@ -28,7 +29,7 @@ func testResourceFilestoreInstanceStateDataV1() map[string]interface{} { func TestFilestoreInstanceStateUpgradeV0(t *testing.T) { expected := testResourceFilestoreInstanceStateDataV1() // linter complains about nil context even in a test setting - actual, err := ResourceFilestoreInstanceUpgradeV0(context.Background(), testResourceFilestoreInstanceStateDataV0(), nil) + actual, err := filestore.ResourceFilestoreInstanceUpgradeV0(context.Background(), testResourceFilestoreInstanceStateDataV0(), nil) if err != nil { t.Fatalf("error migrating state: %s", err) } diff --git a/mmv1/third_party/terraform/tests/resource_firestore_index_test.go b/mmv1/third_party/terraform/tests/resource_firestore_index_test.go index 6b3df170c928..df3f622aa21d 100644 --- a/mmv1/third_party/terraform/tests/resource_firestore_index_test.go +++ b/mmv1/third_party/terraform/tests/resource_firestore_index_test.go @@ -4,6 +4,7 @@ import ( "fmt" "testing" + "github.com/hashicorp/terraform-provider-google/google/services/firestore" "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) @@ -118,7 +119,7 @@ func (tc *FirestoreIndexDiffSuppressTestCase) Test(t *testing.T) { if !ok { newValue = "" } - suppressed := FirestoreIFieldsDiffSuppressFunc(key, fmt.Sprintf("%v", oldValue), fmt.Sprintf("%v", newValue), mockResourceDiff) + suppressed := firestore.FirestoreIFieldsDiffSuppressFunc(key, fmt.Sprintf("%v", oldValue), fmt.Sprintf("%v", newValue), mockResourceDiff) if suppressed != tcSuppress { var expectation string if tcSuppress { diff --git a/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_cluster_test.go.erb b/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_cluster_test.go.erb index 048d61df5290..93d167f9d724 100644 --- a/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_cluster_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_cluster_test.go.erb @@ -2,12 +2,21 @@ package google <% unless version == 'ga' -%> import ( + "fmt" + "strings" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBasic(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -38,6 +47,9 @@ func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBasic(t *testing.T) } func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -68,6 +80,9 @@ func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(t *testing. } func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -553,4 +568,43 @@ func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context map[st } `, context) } + +func testAccCheckGkeonpremBareMetalClusterDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gkeonprem_bare_metal_cluster" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{GkeonpremBasePath}}projects/{{project}}/locations/{{location}}/bareMetalClusters/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GkeonpremBareMetalCluster still exists at %s", url) + } + } + + return nil + } +} <% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_node_pool_test.go.erb b/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_node_pool_test.go.erb index 8b678edda8b0..12b23644be01 100644 --- a/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_node_pool_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_gkeonprem_bare_metal_node_pool_test.go.erb @@ -2,12 +2,21 @@ package google <% unless version == 'ga' -%> import ( + "fmt" + "strings" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) func TestAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -221,4 +230,43 @@ func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context map[strin `, context) } +func testAccCheckGkeonpremBareMetalNodePoolDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gkeonprem_bare_metal_node_pool" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{GkeonpremBasePath}}projects/{{project}}/locations/{{location}}/bareMetalClusters/{{bare_metal_cluster}}/bareMetalNodePools/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GkeonpremBareMetalNodePool still exists at %s", url) + } + } + + return nil + } +} + <% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_cluster_test.go.erb b/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_cluster_test.go.erb index e725424ab292..ce81cfa182fb 100644 --- a/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_cluster_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_cluster_test.go.erb @@ -2,13 +2,22 @@ package google <% unless version == 'ga' -%> import ( + "fmt" + "strings" "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateBasic(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -39,6 +48,9 @@ func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateBasic(t *testing.T) { } func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateF5Lb(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -69,6 +81,9 @@ func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateF5Lb(t *testing.T) { } func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + // VCR fails to handle batched project services acctest.SkipIfVcr(t) t.Parallel() @@ -464,4 +479,43 @@ func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context map[strin `, context) } +func testAccCheckGkeonpremVmwareClusterDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gkeonprem_vmware_cluster" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{GkeonpremBasePath}}projects/{{project}}/locations/{{location}}/vmwareClusters/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GkeonpremVmwareCluster still exists at %s", url) + } + } + + return nil + } +} + <% end -%> diff --git a/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_node_pool_test.go.erb b/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_node_pool_test.go.erb index 08b450ba9d85..df86105b3693 100644 --- a/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_node_pool_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_gkeonprem_vmware_node_pool_test.go.erb @@ -2,12 +2,21 @@ package google <% unless version == 'ga' -%> import ( + "fmt" + "strings" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) func TestAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14417 + t.Skip() + t.Parallel() context := map[string]interface{}{} @@ -183,4 +192,43 @@ func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context map[string]inte `, context) } +func testAccCheckGkeonpremVmwareNodePoolDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gkeonprem_vmware_node_pool" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{GkeonpremBasePath}}projects/{{project}}/locations/{{location}}/vmwareClusters/{{vmware_cluster}}/vmwareNodePools/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GkeonpremVmwareNodePool still exists at %s", url) + } + } + + return nil + } +} + <% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_google_billing_account_iam_test.go b/mmv1/third_party/terraform/tests/resource_google_billing_account_iam_test.go index 311dbaf34310..125e84506da4 100644 --- a/mmv1/third_party/terraform/tests/resource_google_billing_account_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_google_billing_account_iam_test.go @@ -70,11 +70,6 @@ func TestAccBillingAccountIam(t *testing.T) { ImportState: true, ImportStateVerify: true, }, - { - // Test Iam Policy creation - Config: testAccBillingAccountDatasetIamPolicy(account, billing, role), - Check: resource.TestCheckResourceAttrSet("data.google_billing_account_iam_policy.policy", "policy_data"), - }, }, }) } @@ -199,28 +194,3 @@ resource "google_billing_account_iam_member" "foo" { } `, account, billingAccountId, role) } - -func testAccBillingAccountDatasetIamPolicy(account, billing, role string) string { - return fmt.Sprintf(testBigqueryDatasetIam+` -resource "google_service_account" "test-account" { - account_id = "%s" - display_name = "Bigquery Dataset IAM Testing Account" -} - -data "google_iam_policy" "policy" { - binding { - role = "%s" - members = ["serviceAccount:${google_service_account.test-account.email}"] - } -} - -resource "google_billing_account_iam_policy" "policy" { - billing_account_id = "%s" - policy_data = data.google_iam_policy.policy.policy_data -} - -data "google_billing_account_iam_policy" "policy" { - billing_account_id = "%s" -} -`, account, role, billing, billing) -} diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_dataset_iam_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_dataset_iam_test.go index 38bf61968c0b..1d4b6b1cc0cf 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_dataset_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_dataset_iam_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -22,7 +23,7 @@ func TestAccHealthcareDatasetIamBinding(t *testing.T) { roleId := "roles/healthcare.datasetAdmin" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -71,7 +72,7 @@ func TestAccHealthcareDatasetIamMember(t *testing.T) { roleId := "roles/healthcare.datasetViewer" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -106,7 +107,7 @@ func TestAccHealthcareDatasetIamPolicy(t *testing.T) { roleId := "roles/healthcare.datasetAdmin" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_dataset_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_dataset_test.go index 8b7201f1648d..d390a3c41ca9 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_dataset_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_dataset_test.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -48,7 +49,7 @@ func TestAccHealthcareDatasetIdParsing(t *testing.T) { } for tn, tc := range cases { - datasetId, err := ParseHealthcareDatasetId(tc.ImportId, tc.Config) + datasetId, err := healthcare.ParseHealthcareDatasetId(tc.ImportId, tc.Config) if tc.ExpectedError && err == nil { t.Fatalf("bad: %s, expected an error", tn) diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_iam_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_iam_test.go index d7ac82d2b44c..a23471207e26 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_iam_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -19,7 +20,7 @@ func TestAccHealthcareDicomStoreIamBinding(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.dicomStoreAdmin" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -68,7 +69,7 @@ func TestAccHealthcareDicomStoreIamMember(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.dicomEditor" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -103,7 +104,7 @@ func TestAccHealthcareDicomStoreIamPolicy(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.dicomViewer" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -142,7 +143,7 @@ func testAccCheckGoogleHealthcareDicomStoreIamBindingExists(t *testing.T, bindin } config := GoogleProviderConfig(t) - dicomStoreId, err := ParseHealthcareDicomStoreId(bindingRs.Primary.Attributes["dicom_store_id"], config) + dicomStoreId, err := healthcare.ParseHealthcareDicomStoreId(bindingRs.Primary.Attributes["dicom_store_id"], config) if err != nil { return err @@ -178,7 +179,7 @@ func testAccCheckGoogleHealthcareDicomStoreIamMemberExists(t *testing.T, n, role } config := GoogleProviderConfig(t) - dicomStoreId, err := ParseHealthcareDicomStoreId(rs.Primary.Attributes["dicom_store_id"], config) + dicomStoreId, err := healthcare.ParseHealthcareDicomStoreId(rs.Primary.Attributes["dicom_store_id"], config) if err != nil { return err @@ -213,7 +214,7 @@ func testAccCheckGoogleHealthcareDicomStoreIamPolicyExists(t *testing.T, n, role } config := GoogleProviderConfig(t) - dicomStoreId, err := ParseHealthcareDicomStoreId(rs.Primary.Attributes["dicom_store_id"], config) + dicomStoreId, err := healthcare.ParseHealthcareDicomStoreId(rs.Primary.Attributes["dicom_store_id"], config) if err != nil { return err diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_test.go index 60fcc0170196..6e466948dc7c 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_dicom_store_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -50,7 +51,7 @@ func TestAccHealthcareDicomStoreIdParsing(t *testing.T) { } for tn, tc := range cases { - dicomStoreId, err := ParseHealthcareDicomStoreId(tc.ImportId, tc.Config) + dicomStoreId, err := healthcare.ParseHealthcareDicomStoreId(tc.ImportId, tc.Config) if tc.ExpectedError && err == nil { t.Fatalf("bad: %s, expected an error", tn) diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_iam_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_iam_test.go index a0e8f29c6e88..591564f75d04 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_iam_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -19,7 +20,7 @@ func TestAccHealthcareFhirStoreIamBinding(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.fhirStoreAdmin" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -68,7 +69,7 @@ func TestAccHealthcareFhirStoreIamMember(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.fhirResourceEditor" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -103,7 +104,7 @@ func TestAccHealthcareFhirStoreIamPolicy(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.fhirResourceEditor" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -142,7 +143,7 @@ func testAccCheckGoogleHealthcareFhirStoreIamBindingExists(t *testing.T, binding } config := GoogleProviderConfig(t) - fhirStoreId, err := ParseHealthcareFhirStoreId(bindingRs.Primary.Attributes["fhir_store_id"], config) + fhirStoreId, err := healthcare.ParseHealthcareFhirStoreId(bindingRs.Primary.Attributes["fhir_store_id"], config) if err != nil { return err @@ -178,7 +179,7 @@ func testAccCheckGoogleHealthcareFhirStoreIamMemberExists(t *testing.T, n, role, } config := GoogleProviderConfig(t) - fhirStoreId, err := ParseHealthcareFhirStoreId(rs.Primary.Attributes["fhir_store_id"], config) + fhirStoreId, err := healthcare.ParseHealthcareFhirStoreId(rs.Primary.Attributes["fhir_store_id"], config) if err != nil { return err @@ -213,7 +214,7 @@ func testAccCheckGoogleHealthcareFhirStoreIamPolicyExists(t *testing.T, n, role, } config := GoogleProviderConfig(t) - fhirStoreId, err := ParseHealthcareFhirStoreId(rs.Primary.Attributes["fhir_store_id"], config) + fhirStoreId, err := healthcare.ParseHealthcareFhirStoreId(rs.Primary.Attributes["fhir_store_id"], config) if err != nil { return err diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_test.go index e771dd1acb91..1fb31757c077 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_fhir_store_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -50,7 +51,7 @@ func TestAccHealthcareFhirStoreIdParsing(t *testing.T) { } for tn, tc := range cases { - fhirStoreId, err := ParseHealthcareFhirStoreId(tc.ImportId, tc.Config) + fhirStoreId, err := healthcare.ParseHealthcareFhirStoreId(tc.ImportId, tc.Config) if tc.ExpectedError && err == nil { t.Fatalf("bad: %s, expected an error", tn) diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_iam_test.go b/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_iam_test.go index ff3c0fe4f355..e159a265ac49 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_iam_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -19,7 +20,7 @@ func TestAccHealthcareHl7V2StoreIamBinding(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.hl7V2StoreAdmin" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -68,7 +69,7 @@ func TestAccHealthcareHl7V2StoreIamMember(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.hl7V2Editor" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -103,7 +104,7 @@ func TestAccHealthcareHl7V2StoreIamPolicy(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/healthcare.hl7V2Consumer" datasetName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - datasetId := &HealthcareDatasetId{ + datasetId := &healthcare.HealthcareDatasetId{ Project: projectId, Location: DEFAULT_HEALTHCARE_TEST_LOCATION, Name: datasetName, @@ -142,7 +143,7 @@ func testAccCheckGoogleHealthcareHl7V2StoreIamBindingExists(t *testing.T, bindin } config := GoogleProviderConfig(t) - hl7V2StoreId, err := ParseHealthcareHl7V2StoreId(bindingRs.Primary.Attributes["hl7_v2_store_id"], config) + hl7V2StoreId, err := healthcare.ParseHealthcareHl7V2StoreId(bindingRs.Primary.Attributes["hl7_v2_store_id"], config) if err != nil { return err @@ -178,7 +179,7 @@ func testAccCheckGoogleHealthcareHl7V2StoreIamMemberExists(t *testing.T, n, role } config := GoogleProviderConfig(t) - hl7V2StoreId, err := ParseHealthcareHl7V2StoreId(rs.Primary.Attributes["hl7_v2_store_id"], config) + hl7V2StoreId, err := healthcare.ParseHealthcareHl7V2StoreId(rs.Primary.Attributes["hl7_v2_store_id"], config) if err != nil { return err @@ -213,7 +214,7 @@ func testAccCheckGoogleHealthcareHl7V2StoreIamPolicyExists(t *testing.T, n, role } config := GoogleProviderConfig(t) - hl7V2StoreId, err := ParseHealthcareHl7V2StoreId(rs.Primary.Attributes["hl7_v2_store_id"], config) + hl7V2StoreId, err := healthcare.ParseHealthcareHl7V2StoreId(rs.Primary.Attributes["hl7_v2_store_id"], config) if err != nil { return err diff --git a/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_test.go.erb b/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_test.go.erb index 3c6d6c8b353a..1b4981df2592 100644 --- a/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_healthcare_hl7_v2_store_test.go.erb @@ -4,13 +4,15 @@ package google import ( "fmt" "path" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) func TestAccHealthcareHl7V2StoreIdParsing(t *testing.T) { @@ -50,7 +52,7 @@ func TestAccHealthcareHl7V2StoreIdParsing(t *testing.T) { } for tn, tc := range cases { - hl7V2StoreId, err := ParseHealthcareHl7V2StoreId(tc.ImportId, tc.Config) + hl7V2StoreId, err := healthcare.ParseHealthcareHl7V2StoreId(tc.ImportId, tc.Config) if tc.ExpectedError && err == nil { t.Fatalf("bad: %s, expected an error", tn) diff --git a/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_id_test.go.erb b/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_id_test.go.erb index 5b322d4c66e8..0a681078db03 100644 --- a/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_id_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_id_test.go.erb @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/hashicorp/terraform-provider-google/google/services/iambeta" "github.com/hashicorp/terraform-provider-google/google/verify" ) @@ -28,7 +29,7 @@ func TestValidateIAMBetaWorkloadIdentityPoolId(t *testing.T) { {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, } - es := verify.TestStringValidationCases(x, ValidateWorkloadIdentityPoolId) + es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolId) if len(es) > 0 { t.Errorf("Failed to validate WorkloadIdentityPool names: %v", es) } diff --git a/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_provider_id_test.go.erb b/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_provider_id_test.go.erb index bf21b09a087c..23319dc17ac0 100644 --- a/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_provider_id_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_iam_beta_workload_identity_pool_provider_id_test.go.erb @@ -6,6 +6,7 @@ import ( "strings" "testing" + "github.com/hashicorp/terraform-provider-google/google/services/iambeta" "github.com/hashicorp/terraform-provider-google/google/verify" ) @@ -28,7 +29,7 @@ func TestValidateIAMBetaWorkloadIdentityPoolProviderId(t *testing.T) { {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, } - es := verify.TestStringValidationCases(x, ValidateWorkloadIdentityPoolProviderId) + es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolProviderId) if len(es) > 0 { t.Errorf("Failed to validate WorkloadIdentityPoolProvider names: %v", es) } diff --git a/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_id_test.go.erb b/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_id_test.go.erb index ee22cbe60d3f..4d926f806300 100644 --- a/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_id_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_id_test.go.erb @@ -5,6 +5,7 @@ import ( "strings" "testing" + "github.com/hashicorp/terraform-provider-google/google/services/iamworkforcepool" "github.com/hashicorp/terraform-provider-google/google/verify" ) @@ -28,7 +29,7 @@ func TestValidateIAMWorkforcePoolWorkforcePoolId(t *testing.T) { {TestName: "ends with a hyphen", Value: "foobar-", ExpectError: true}, } - es := verify.TestStringValidationCases(x, ValidateWorkforcePoolId) + es := verify.TestStringValidationCases(x, iamworkforcepool.ValidateWorkforcePoolId) if len(es) > 0 { t.Errorf("Failed to validate WorkforcePool names: %v", es) } diff --git a/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.erb b/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.erb index 02a076ea0c9b..1d3fdb5e404a 100644 --- a/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_iam_workforce_pool_workforce_pool_provider_id_test.go.erb @@ -5,6 +5,7 @@ import ( "strings" "testing" + "github.com/hashicorp/terraform-provider-google/google/services/iamworkforcepool" "github.com/hashicorp/terraform-provider-google/google/verify" ) @@ -26,7 +27,7 @@ func TestValidateIAMWorkforcePoolWorkforcePoolProviderId(t *testing.T) { {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, } - es := verify.TestStringValidationCases(x, ValidateWorkforcePoolProviderId) + es := verify.TestStringValidationCases(x, iamworkforcepool.ValidateWorkforcePoolProviderId) if len(es) > 0 { t.Errorf("Failed to validate WorkforcePoolProvider names: %v", es) } diff --git a/mmv1/third_party/terraform/tests/resource_kms_crypto_key_iam_test.go.erb b/mmv1/third_party/terraform/tests/resource_kms_crypto_key_iam_test.go.erb index 58a71f92d308..d0c3d0f0b91b 100644 --- a/mmv1/third_party/terraform/tests/resource_kms_crypto_key_iam_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_kms_crypto_key_iam_test.go.erb @@ -4,12 +4,14 @@ package google import ( "fmt" "reflect" - "github.com/hashicorp/terraform-provider-google/google/acctest" "sort" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/kms" ) func TestAccKmsCryptoKeyIamBinding(t *testing.T) { @@ -21,7 +23,7 @@ func TestAccKmsCryptoKeyIamBinding(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/cloudkms.cryptoKeyDecrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -73,7 +75,7 @@ func TestAccKmsCryptoKeyIamBinding_withCondition(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/cloudkms.cryptoKeyDecrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -108,7 +110,7 @@ func TestAccKmsCryptoKeyIamMember(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/cloudkms.cryptoKeyEncrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -146,7 +148,7 @@ func TestAccKmsCryptoKeyIamMember_withCondition(t *testing.T) { account := fmt.Sprintf("tf-test-%d", RandInt(t)) roleId := "roles/cloudkms.cryptoKeyEncrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -182,7 +184,7 @@ func TestAccKmsCryptoKeyIamPolicy(t *testing.T) { roleId := "roles/cloudkms.cryptoKeyEncrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -223,7 +225,7 @@ func TestAccKmsCryptoKeyIamPolicy_withCondition(t *testing.T) { roleId := "roles/cloudkms.cryptoKeyEncrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -257,7 +259,7 @@ func testAccCheckGoogleKmsCryptoKeyIamBindingExists(t *testing.T, bindingResourc } config := GoogleProviderConfig(t) - cryptoKeyId, err := ParseKmsCryptoKeyId(bindingRs.Primary.Attributes["crypto_key_id"], config) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(bindingRs.Primary.Attributes["crypto_key_id"], config) if err != nil { return err @@ -293,7 +295,7 @@ func testAccCheckGoogleKmsCryptoKeyIamMemberExists(t *testing.T, n, role, member } config := GoogleProviderConfig(t) - cryptoKeyId, err := ParseKmsCryptoKeyId(rs.Primary.Attributes["crypto_key_id"], config) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(rs.Primary.Attributes["crypto_key_id"], config) if err != nil { return err @@ -328,7 +330,7 @@ func testAccCheckGoogleCryptoKmsKeyIam(t *testing.T, n, role string, members []s } config := GoogleProviderConfig(t) - cryptoKeyId, err := ParseKmsCryptoKeyId(rs.Primary.Attributes["crypto_key_id"], config) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(rs.Primary.Attributes["crypto_key_id"], config) if err != nil { return err diff --git a/mmv1/third_party/terraform/tests/resource_kms_crypto_key_test.go b/mmv1/third_party/terraform/tests/resource_kms_crypto_key_test.go index b35a56905aea..569bcae043e9 100644 --- a/mmv1/third_party/terraform/tests/resource_kms_crypto_key_test.go +++ b/mmv1/third_party/terraform/tests/resource_kms_crypto_key_test.go @@ -4,9 +4,9 @@ import ( "context" "fmt" "testing" - "time" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/kms" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" @@ -54,7 +54,7 @@ func TestCryptoKeyIdParsing(t *testing.T) { } for tn, tc := range cases { - cryptoKeyId, err := ParseKmsCryptoKeyId(tc.ImportId, tc.Config) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(tc.ImportId, tc.Config) if tc.ExpectedError && err == nil { t.Fatalf("bad: %s, expected an error", tn) @@ -77,41 +77,6 @@ func TestCryptoKeyIdParsing(t *testing.T) { } } -func TestCryptoKeyNextRotationCalculation(t *testing.T) { - t.Parallel() - - now := time.Now().UTC() - period, _ := time.ParseDuration("1000000s") - - expected := now.Add(period).Format(time.RFC3339Nano) - - timestamp, err := kmsCryptoKeyNextRotation(now, "1000000s") - - if err != nil { - t.Fatalf("unexpected failure parsing time %s and duration 1000s: %s", now, err.Error()) - } - - if expected != timestamp { - t.Fatalf("expected %s to equal %s", timestamp, expected) - } -} - -func TestCryptoKeyNextRotationCalculation_validation(t *testing.T) { - t.Parallel() - - _, errs := validateKmsCryptoKeyRotationPeriod("86399s", "rotation_period") - - if len(errs) == 0 { - t.Fatalf("Periods of less than a day should be invalid") - } - - _, errs = validateKmsCryptoKeyRotationPeriod("100000.0000000001s", "rotation_period") - - if len(errs) == 0 { - t.Fatalf("Numbers with more than 9 fractional digits are invalid") - } -} - func TestCryptoKeyStateUpgradeV0(t *testing.T) { t.Parallel() @@ -152,7 +117,7 @@ func TestCryptoKeyStateUpgradeV0(t *testing.T) { } for tn, tc := range cases { t.Run(tn, func(t *testing.T) { - actual, err := ResourceKMSCryptoKeyUpgradeV0(context.Background(), tc.Attributes, tc.Meta) + actual, err := kms.ResourceKMSCryptoKeyUpgradeV0(context.Background(), tc.Attributes, tc.Meta) if err != nil { t.Error(err) diff --git a/mmv1/third_party/terraform/tests/resource_kms_key_ring_iam_test.go.erb b/mmv1/third_party/terraform/tests/resource_kms_key_ring_iam_test.go.erb index c78219aa5441..00fa6273847b 100644 --- a/mmv1/third_party/terraform/tests/resource_kms_key_ring_iam_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_kms_key_ring_iam_test.go.erb @@ -4,12 +4,14 @@ package google import ( "fmt" "reflect" - "github.com/hashicorp/terraform-provider-google/google/acctest" "sort" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/kms" ) const DEFAULT_KMS_TEST_LOCATION = "us-central1" @@ -24,7 +26,7 @@ func TestAccKmsKeyRingIamBinding(t *testing.T) { roleId := "roles/cloudkms.cryptoKeyDecrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -77,7 +79,7 @@ func TestAccKmsKeyRingIamBinding_withCondition(t *testing.T) { keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) conditionTitle := "expires_after_2019_12_31" - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -111,7 +113,7 @@ func TestAccKmsKeyRingIamMember(t *testing.T) { roleId := "roles/cloudkms.cryptoKeyEncrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -150,7 +152,7 @@ func TestAccKmsKeyRingIamMember_withCondition(t *testing.T) { keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) conditionTitle := "expires_after_2019_12_31" - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -184,7 +186,7 @@ func TestAccKmsKeyRingIamPolicy(t *testing.T) { roleId := "roles/cloudkms.cryptoKeyEncrypter" keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, @@ -222,7 +224,7 @@ func TestAccKmsKeyRingIamPolicy_withCondition(t *testing.T) { keyRingName := fmt.Sprintf("tf-test-%s", RandString(t, 10)) conditionTitle := "expires_after_2019_12_31" - keyRingId := &KmsKeyRingId{ + keyRingId := &kms.KmsKeyRingId{ Project: projectId, Location: DEFAULT_KMS_TEST_LOCATION, Name: keyRingName, diff --git a/mmv1/third_party/terraform/tests/resource_kms_key_ring_test.go b/mmv1/third_party/terraform/tests/resource_kms_key_ring_test.go index 75d673775cda..b67b615e29b1 100644 --- a/mmv1/third_party/terraform/tests/resource_kms_key_ring_test.go +++ b/mmv1/third_party/terraform/tests/resource_kms_key_ring_test.go @@ -7,71 +7,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -func TestKeyRingIdParsing(t *testing.T) { - cases := map[string]struct { - ImportId string - ExpectedError bool - ExpectedTerraformId string - ExpectedKeyRingId string - Config *transport_tpg.Config - }{ - "id is in project/location/keyRingName format": { - ImportId: "test-project/us-central1/test-key-ring", - ExpectedError: false, - ExpectedTerraformId: "test-project/us-central1/test-key-ring", - ExpectedKeyRingId: "projects/test-project/locations/us-central1/keyRings/test-key-ring", - }, - "id is in domain:project/location/keyRingName format": { - ImportId: "example.com:test-project/us-central1/test-key-ring", - ExpectedError: false, - ExpectedTerraformId: "example.com:test-project/us-central1/test-key-ring", - ExpectedKeyRingId: "projects/example.com:test-project/locations/us-central1/keyRings/test-key-ring", - }, - "id contains name that is longer than 63 characters": { - ImportId: "test-project/us-central1/can-you-believe-that-this-key-ring-name-is-exactly-64-characters", - ExpectedError: true, - }, - "id is in location/keyRingName format": { - ImportId: "us-central1/test-key-ring", - ExpectedError: false, - ExpectedTerraformId: "test-project/us-central1/test-key-ring", - ExpectedKeyRingId: "projects/test-project/locations/us-central1/keyRings/test-key-ring", - Config: &transport_tpg.Config{Project: "test-project"}, - }, - "id is in location/keyRingName format without project in config": { - ImportId: "us-central1/test-key-ring", - ExpectedError: true, - Config: &transport_tpg.Config{Project: ""}, - }, - } - - for tn, tc := range cases { - keyRingId, err := parseKmsKeyRingId(tc.ImportId, tc.Config) - - if tc.ExpectedError && err == nil { - t.Fatalf("bad: %s, expected an error", tn) - } - - if err != nil { - if tc.ExpectedError { - continue - } - t.Fatalf("bad: %s, err: %#v", tn, err) - } - - if keyRingId.TerraformId() != tc.ExpectedTerraformId { - t.Fatalf("bad: %s, expected Terraform ID to be `%s` but is `%s`", tn, tc.ExpectedTerraformId, keyRingId.TerraformId()) - } - - if keyRingId.KeyRingId() != tc.ExpectedKeyRingId { - t.Fatalf("bad: %s, expected KeyRing ID to be `%s` but is `%s`", tn, tc.ExpectedKeyRingId, keyRingId.KeyRingId()) - } - } -} - func TestAccKmsKeyRing_basic(t *testing.T) { projectId := fmt.Sprintf("tf-test-%d", RandInt(t)) projectOrg := acctest.GetTestOrgFromEnv(t) diff --git a/mmv1/third_party/terraform/tests/resource_logging_bucket_config_test.go b/mmv1/third_party/terraform/tests/resource_logging_bucket_config_test.go index f2111d0e5d7d..a4b02ac2a11f 100644 --- a/mmv1/third_party/terraform/tests/resource_logging_bucket_config_test.go +++ b/mmv1/third_party/terraform/tests/resource_logging_bucket_config_test.go @@ -292,8 +292,13 @@ resource "google_project" "default" { billing_account = "%{billing_account}" } +resource "google_project_service" "logging_service" { + project = google_project.default.project_id + service = "logging.googleapis.com" +} + data "google_logging_project_cmek_settings" "cmek_settings" { - project = google_project.default.name + project = google_project_service.logging_service.project } resource "google_kms_key_ring" "keyring" { diff --git a/mmv1/third_party/terraform/tests/resource_monitoring_notification_channel_test.go b/mmv1/third_party/terraform/tests/resource_monitoring_notification_channel_test.go index ce18ec9f8a05..74baf0cfbf35 100644 --- a/mmv1/third_party/terraform/tests/resource_monitoring_notification_channel_test.go +++ b/mmv1/third_party/terraform/tests/resource_monitoring_notification_channel_test.go @@ -36,7 +36,9 @@ func TestAccMonitoringNotificationChannel_update(t *testing.T) { }) } -func TestAccMonitoringNotificationChannel_updateSensitiveLabels(t *testing.T) { +func TestAccMonitoringNotificationChannel_updateLabels_slack(t *testing.T) { + // Slack auth_token required for test not to fail, skipping test till interal testing slack can be created + t.Skip() t.Parallel() VcrTest(t, resource.TestCase{ @@ -45,15 +47,28 @@ func TestAccMonitoringNotificationChannel_updateSensitiveLabels(t *testing.T) { CheckDestroy: testAccCheckMonitoringNotificationChannelDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccMonitoringNotificationChannel_updateSensitiveLabels(), + Config: testAccMonitoringNotificationChannel_updateLabels_slack(), }, - // sensitive labels for notification channels are either obfuscated or not returned by the upstream - // API. Therefore when re-importing a resource we cannot know what the value is. { ResourceName: "google_monitoring_notification_channel.slack", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels.%", "labels.auth_token", "sensitive_labels"}, + ImportStateVerifyIgnore: []string{"labels.%", "labels.auth_token"}, + }, + }, + }) +} + +func TestAccMonitoringNotificationChannel_updateLabels(t *testing.T) { + t.Parallel() + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMonitoringNotificationChannelDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMonitoringNotificationChannel_updateLabels(), }, { ResourceName: "google_monitoring_notification_channel.pagerduty", @@ -67,15 +82,47 @@ func TestAccMonitoringNotificationChannel_updateSensitiveLabels(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"labels.%", "labels.password", "sensitive_labels"}, }, + }, + }) +} + +func TestAccMonitoringNotificationChannel_updateSensitiveLabels_slack(t *testing.T) { + // Slack auth_token required for test not to fail, skipping test till interal testing slack can be created + t.Skip() + t.Parallel() + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMonitoringNotificationChannelDestroyProducer(t), + Steps: []resource.TestStep{ { - Config: testAccMonitoringNotificationChannel_updateSensitiveLabels2(), + Config: testAccMonitoringNotificationChannel_updateSensitiveLabels_slack(), }, + // sensitive labels for notification channels are either obfuscated or not returned by the upstream + // API. Therefore when re-importing a resource we cannot know what the value is. { ResourceName: "google_monitoring_notification_channel.slack", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"labels.%", "labels.auth_token", "sensitive_labels"}, + ImportStateVerifyIgnore: []string{"labels.%", "sensitive_labels.auth_token"}, + }, + }, + }) +} +func TestAccMonitoringNotificationChannel_updateSensitiveLabels(t *testing.T) { + t.Parallel() + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMonitoringNotificationChannelDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMonitoringNotificationChannel_updateSensitiveLabels(), }, + // sensitive labels for notification channels are either obfuscated or not returned by the upstream + // API. Therefore when re-importing a resource we cannot know what the value is. { ResourceName: "google_monitoring_notification_channel.pagerduty", ImportState: true, @@ -107,7 +154,7 @@ resource "google_monitoring_notification_channel" "update" { ) } -func testAccMonitoringNotificationChannel_updateSensitiveLabels() string { +func testAccMonitoringNotificationChannel_updateLabels_slack() string { return fmt.Sprintf(` resource "google_monitoring_notification_channel" "slack" { display_name = "TFTest Slack Channel" @@ -117,6 +164,12 @@ resource "google_monitoring_notification_channel" "slack" { "channel_name" = "#foobar" } } +`) +} + +func testAccMonitoringNotificationChannel_updateLabels() string { + return fmt.Sprintf(` + resource "google_monitoring_notification_channel" "basicauth" { display_name = "TFTest Basicauth Channel" @@ -138,7 +191,7 @@ resource "google_monitoring_notification_channel" "pagerduty" { `) } -func testAccMonitoringNotificationChannel_updateSensitiveLabels2() string { +func testAccMonitoringNotificationChannel_updateSensitiveLabels_slack() string { return fmt.Sprintf(` resource "google_monitoring_notification_channel" "slack" { display_name = "TFTest Slack Channel" @@ -151,6 +204,11 @@ resource "google_monitoring_notification_channel" "slack" { auth_token = "one" } } +`) +} + +func testAccMonitoringNotificationChannel_updateSensitiveLabels() string { + return fmt.Sprintf(` resource "google_monitoring_notification_channel" "basicauth" { display_name = "TFTest Basicauth Channel" diff --git a/mmv1/third_party/terraform/tests/resource_network_services_gateway_test.go.erb b/mmv1/third_party/terraform/tests/resource_network_services_gateway_test.go.erb index 339fd0a68531..d001f4033ad0 100644 --- a/mmv1/third_party/terraform/tests/resource_network_services_gateway_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_network_services_gateway_test.go.erb @@ -67,4 +67,671 @@ resource "google_network_services_gateway" "foobar" { `, gatewayName) } +// TODO(#14600): Enable the test once the api allows to update the fields for secure web gateway type. +//func TestAccNetworkServicesGateway_updateSwp(t *testing.T) { +//cmName := fmt.Sprintf("tf-test-gateway-swp-cm-%s", RandString(t, 10)) +// netName := fmt.Sprintf("tf-test-gateway-swp-net-%s", RandString(t, 10)) +// subnetName := fmt.Sprintf("tf-test-gateway-swp-subnet-%s", RandString(t, 10)) +// pSubnetName := fmt.Sprintf("tf-test-gateway-swp-proxyonly-%s", RandString(t, 10)) +// policyName := fmt.Sprintf("tf-test-gateway-swp-policy-%s", RandString(t, 10)) +// ruleName := fmt.Sprintf("tf-test-gateway-swp-rule-%s", RandString(t, 10)) +// gatewayScope := fmt.Sprintf("tf-test-gateway-swp-scope-%s", RandString(t, 10)) +// gatewayName := fmt.Sprintf("tf-test-gateway-swp-%s", RandString(t, 10)) +// // updates +// newCmName := fmt.Sprintf("tf-test-gateway-swp-newcm-%s", RandString(t, 10)) +// newPolicyName := fmt.Sprintf("tf-test-gateway-swp-newpolicy-%s", RandString(t, 10)) +// newRuleName := fmt.Sprintf("tf-test-gateway-swp-newrule-%s", RandString(t, 10)) +// +// VcrTest(t, resource.TestCase{ +// PreCheck: func() { acctest.AccTestPreCheck(t) }, +// ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), +// CheckDestroy: testAccCheckNetworkServicesGatewayDestroyProducer(t), +// Steps: []resource.TestStep{ +// { +// Config: testAccNetworkServicesGateway_basicSwp(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope), +// }, +// { +// ResourceName: "google_network_services_gateway.foobar", +// ImportState: true, +// ImportStateVerify: true, +// ImportStateVerifyIgnore: []string{"name", "location", "delete_swg_autogen_router_on_destroy"}, +// }, +// { +// Config: testAccNetworkServicesGateway_updateSwp(cmName, newCmName, netName, subnetName, pSubnetName, policyName, newPolicyName, ruleName, newRuleName, gatewayName, gatewayScope), +// }, +// { +// ResourceName: "google_network_services_gateway.foobar", +// ImportState: true, +// ImportStateVerify: true, +// ImportStateVerifyIgnore: []string{"name", "location", "delete_swg_autogen_router_on_destroy"}, +// }, +// }, +// }) +//} + +//func testAccNetworkServicesGateway_basicSwp(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope string) string { +// return fmt.Sprintf(` +//resource "google_certificate_manager_certificate" "default" { +// name = "%s" +// location = "us-east1" +// self_managed { +// pem_certificate = file("test-fixtures/certificatemanager/cert.pem") +// pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") +// } +//} +// +//resource "google_compute_network" "default" { +// name = "%s" +// routing_mode = "REGIONAL" +// auto_create_subnetworks = false +//} +// +//resource "google_compute_subnetwork" "proxyonlysubnet" { +// name = "%s" +// purpose = "REGIONAL_MANAGED_PROXY" +// ip_cidr_range = "192.168.0.0/23" +// region = "us-east1" +// network = google_compute_network.default.id +// role = "ACTIVE" +//} +// +//resource "google_compute_subnetwork" "default" { +// name = "%s" +// purpose = "PRIVATE" +// ip_cidr_range = "10.128.0.0/20" +// region = "us-east1" +// network = google_compute_network.default.id +// role = "ACTIVE" +//} +// +//resource "google_network_security_gateway_security_policy" "default" { +// name = "%s" +// location = "us-east1" +//} +// +//resource "google_network_security_gateway_security_policy_rule" "default" { +// name = "%s" +// location = "us-east1" +// gateway_security_policy = google_network_security_gateway_security_policy.default.name +// enabled = true +// priority = 1 +// session_matcher = "host() == 'example.com'" +// basic_profile = "ALLOW" +//} +// +//resource "google_network_services_gateway" "foobar" { +// name = "%s" +// location = "us-east1" +// addresses = ["10.128.0.99"] +// type = "SECURE_WEB_GATEWAY" +// ports = [443] +// description = "my description" +// scope = "%s" +// certificate_urls = [google_certificate_manager_certificate.default.id] +// gateway_security_policy = google_network_security_gateway_security_policy.default.id +// network = google_compute_network.default.id +// subnetwork = google_compute_subnetwork.default.id +// delete_swg_autogen_router_on_destroy = true +// depends_on = [google_compute_subnetwork.proxyonlysubnet] +// +//} +//`, cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope) +//} + +//func testAccNetworkServicesGateway_updateSwp(cmName, newCmName, netName, subnetName, pSubnetName, policyName, newPolicyName, ruleName, newRuleName, gatewayName, gatewayScope string) string { +// return fmt.Sprintf(` +//resource "google_certificate_manager_certificate" "default" { +// name = "%s" +// location = "us-east1" +// self_managed { +// pem_certificate = file("test-fixtures/certificatemanager/cert.pem") +// pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") +// } +//} +// +//resource "google_certificate_manager_certificate" "newcm" { +// name = "%s" +// location = "us-east1" +// self_managed { +// pem_certificate = file("test-fixtures/certificatemanager/cert.pem") +// pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") +// } +//} +// +//resource "google_compute_network" "default" { +// name = "%s" +// routing_mode = "REGIONAL" +// auto_create_subnetworks = false +//} +// +//resource "google_compute_subnetwork" "proxyonlysubnet" { +// name = "%s" +// purpose = "REGIONAL_MANAGED_PROXY" +// ip_cidr_range = "192.168.0.0/23" +// region = "us-east1" +// network = google_compute_network.default.id +// role = "ACTIVE" +//} +// +//resource "google_compute_subnetwork" "default" { +// name = "%s" +// purpose = "PRIVATE" +// ip_cidr_range = "10.128.0.0/20" +// region = "us-east1" +// network = google_compute_network.default.id +// role = "ACTIVE" +//} +// +//resource "google_network_security_gateway_security_policy" "default" { +// name = "%s" +// location = "us-east1" +//} +// +//resource "google_network_security_gateway_security_policy_rule" "default" { +// name = "%s" +// location = "us-east1" +// gateway_security_policy = google_network_security_gateway_security_policy.default.name +// enabled = true +// priority = 1 +// session_matcher = "host() == 'example.com'" +// basic_profile = "ALLOW" +//} +// +//# TODO(#14600): this field will be updatable soon so this test should also cover it. +//# resource "google_network_security_gateway_security_policy" "newpolicy" { +//# name = "%s" +//# location = "us-east1" +//# } +// +//# resource "google_network_security_gateway_security_policy_rule" "newrule" { +//# name = "%s" +//# location = "us-east1" +//# gateway_security_policy = google_network_security_gateway_security_policy.newpolicy.name +//# enabled = true +//# priority = 1 +//# session_matcher = "host() == 'example.com'" +//# basic_profile = "ALLOW" +//# } +// +//resource "google_network_services_gateway" "foobar" { +// name = "%s" +// location = "us-east1" +// addresses = ["10.128.0.99"] +// type = "SECURE_WEB_GATEWAY" +// ports = [443] +// description = "updated description" +// scope = "%s" +// certificate_urls = [google_certificate_manager_certificate.default.id, google_certificate_manager_certificate.newcm.id] +// gateway_security_policy = google_network_security_gateway_security_policy.default.id +// # TODO(#14600): this field will be updatable soon so this test should also cover it. +// # gateway_security_policy = google_network_security_gateway_security_policy.newpolicy.id +// network = google_compute_network.default.id +// subnetwork = google_compute_subnetwork.default.id +// delete_swg_autogen_router_on_destroy = true +// depends_on = [google_compute_subnetwork.proxyonlysubnet] +// +//} +//`, cmName, newCmName, netName, subnetName, pSubnetName, policyName, newPolicyName, ruleName, newRuleName, gatewayName, gatewayScope) +//} + +func TestAccNetworkServicesGateway_multipleSwpGatewaysDifferentSubnetwork(t *testing.T) { + cmName := fmt.Sprintf("tf-test-gateway-multiswp-cm-%s", RandString(t, 10)) + netName := fmt.Sprintf("tf-test-gateway-multiswp-net-%s", RandString(t, 10)) + subnetName := fmt.Sprintf("tf-test-gateway-multiswp-subnet-%s", RandString(t, 10)) + pSubnetName := fmt.Sprintf("tf-test-gateway-multiswp-proxyonly-%s", RandString(t, 10)) + policyName := fmt.Sprintf("tf-test-gateway-multiswp-policy-%s", RandString(t, 10)) + ruleName := fmt.Sprintf("tf-test-gateway-multiswp-rule-%s", RandString(t, 10)) + gatewayScope := fmt.Sprintf("tf-test-gateway-multiswp-scope-%s", RandString(t, 10)) + gatewayName := fmt.Sprintf("tf-test-gateway-multiswp-%s", RandString(t, 10)) + subnet2Name := fmt.Sprintf("tf-test-gateway-multiswp-subnet2-%s", RandString(t, 10)) + gateway2Name := fmt.Sprintf("tf-test-gateway-multiswp2-%s", RandString(t, 10)) + gateway2Scope := fmt.Sprintf("tf-test-gateway-multiswp-scope2-%s", RandString(t, 10)) + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesGatewayDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesGateway_multipleSwpGatewaysDifferentSubnetwork(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, subnet2Name, gateway2Name, gateway2Scope), + }, + { + ResourceName: "google_network_services_gateway.gateway1", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "delete_swg_autogen_router_on_destroy"}, + }, + { + Config: testAccNetworkServicesGateway_multipleSwpGatewaysDifferentSubnetworkRemoveGateway2(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, subnet2Name), + }, + { + ResourceName: "google_network_services_gateway.gateway1", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "delete_swg_autogen_router_on_destroy"}, + }, + }, + }) +} + +func testAccNetworkServicesGateway_multipleSwpGatewaysDifferentSubnetwork(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, subnet2Name, gateway2Name, gateway2Scope string) string { + return fmt.Sprintf(` +resource "google_certificate_manager_certificate" "default" { + name = "%s" + location = "us-west1" + self_managed { + pem_certificate = file("test-fixtures/certificatemanager/cert.pem") + pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") + } +} + +resource "google_compute_network" "default" { + name = "%s" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "proxyonlysubnet" { + name = "%s" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.168.0.0/23" + region = "us-west1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "subnet1" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.128.0.0/20" + region = "us-west1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_security_gateway_security_policy" "default" { + name = "%s" + location = "us-west1" +} + +resource "google_network_security_gateway_security_policy_rule" "default" { + name = "%s" + location = "us-west1" + gateway_security_policy = google_network_security_gateway_security_policy.default.name + enabled = true + priority = 1 + session_matcher = "host() == 'example.com'" + basic_profile = "ALLOW" +} + +resource "google_network_services_gateway" "gateway1" { + name = "%s" + location = "us-west1" + addresses = ["10.128.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + description = "gateway1_subnet1" + scope = "%s" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.subnet1.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} + +resource "google_compute_subnetwork" "subnet2" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.142.0.0/20" + region = "us-west1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_services_gateway" "gateway2" { + name = "%s" + location = "us-west1" + addresses = ["10.142.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + description = "gateway2_subnet2" + scope = "%s" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.subnet2.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} + +`, cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, subnet2Name, gateway2Name, gateway2Scope) +} + +func testAccNetworkServicesGateway_multipleSwpGatewaysDifferentSubnetworkRemoveGateway2(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, subnet2Name string) string { + return fmt.Sprintf(` +resource "google_certificate_manager_certificate" "default" { + name = "%s" + location = "us-west1" + self_managed { + pem_certificate = file("test-fixtures/certificatemanager/cert.pem") + pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") + } +} + +resource "google_compute_network" "default" { + name = "%s" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "proxyonlysubnet" { + name = "%s" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.168.0.0/23" + region = "us-west1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "subnet1" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.128.0.0/20" + region = "us-west1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_security_gateway_security_policy" "default" { + name = "%s" + location = "us-west1" +} + +resource "google_network_security_gateway_security_policy_rule" "default" { + name = "%s" + location = "us-west1" + gateway_security_policy = google_network_security_gateway_security_policy.default.name + enabled = true + priority = 1 + session_matcher = "host() == 'example.com'" + basic_profile = "ALLOW" +} + +resource "google_network_services_gateway" "gateway1" { + name = "%s" + location = "us-west1" + addresses = ["10.128.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + description = "gateway1_subnet1" + scope = "%s" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.subnet1.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} + +resource "google_compute_subnetwork" "subnet2" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.142.0.0/20" + region = "us-west1" + network = google_compute_network.default.id + role = "ACTIVE" +} + +# Destroying gateway2 so it allows to test if there is still a gateway remaining under the same network so the swg_autogen_router is kept. + +`, cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, subnet2Name) +} + +func TestAccNetworkServicesGateway_multipleSwpGatewaysDifferentNetwork(t *testing.T) { + cmName := fmt.Sprintf("tf-test-gateway-diffswp-cm-%s", RandString(t, 10)) + netName := fmt.Sprintf("tf-test-gateway-diffswp-net-%s", RandString(t, 10)) + subnetName := fmt.Sprintf("tf-test-gateway-diffswp-subnet-%s", RandString(t, 10)) + pSubnetName := fmt.Sprintf("tf-test-gateway-diffswp-proxyonly-%s", RandString(t, 10)) + policyName := fmt.Sprintf("tf-test-gateway-diffswp-policy-%s", RandString(t, 10)) + ruleName := fmt.Sprintf("tf-test-gateway-diffswp-rule-%s", RandString(t, 10)) + gatewayName := fmt.Sprintf("tf-test-gateway-diffswp-%s", RandString(t, 10)) + gatewayScope := fmt.Sprintf("tf-test-gateway-diffswp-scope-%s", RandString(t, 10)) + net2Name := fmt.Sprintf("tf-test-gateway-diffswp-net2-%s", RandString(t, 10)) + subnet2Name := fmt.Sprintf("tf-test-gateway-diffswp-subnet2-%s", RandString(t, 10)) + pSubnet2Name := fmt.Sprintf("tf-test-gateway-diffswp-proxyonly2-%s", RandString(t, 10)) + gateway2Name := fmt.Sprintf("tf-test-gateway-diffswp2-%s", RandString(t, 10)) + gateway2Scope := fmt.Sprintf("tf-test-gateway-diffswp-scope2-%s", RandString(t, 10)) + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesGatewayDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesGateway_multipleSwpGatewaysDifferentNetwork(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, net2Name, subnet2Name, pSubnet2Name, gateway2Name, gateway2Scope), + }, + { + ResourceName: "google_network_services_gateway.gateway1", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "delete_swg_autogen_router_on_destroy"}, + }, + { + Config: testAccNetworkServicesGateway_multipleSwpGatewaysDifferentNetworkRemoveGateway2(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, net2Name, subnet2Name, pSubnet2Name), + }, + { + ResourceName: "google_network_services_gateway.gateway1", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "delete_swg_autogen_router_on_destroy"}, + }, + }, + }) +} + +func testAccNetworkServicesGateway_multipleSwpGatewaysDifferentNetwork(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, net2Name, subnet2Name, pSubnet2Name, gateway2Name, gateway2Scope string) string { + return fmt.Sprintf(` +resource "google_certificate_manager_certificate" "default" { + name = "%s" + location = "us-west2" + self_managed { + pem_certificate = file("test-fixtures/certificatemanager/cert.pem") + pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") + } +} + +resource "google_compute_network" "default" { + name = "%s" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "proxyonlysubnet" { + name = "%s" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.168.0.0/23" + region = "us-west2" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "subnet1" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.128.0.0/20" + region = "us-west2" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_security_gateway_security_policy" "default" { + name = "%s" + location = "us-west2" +} + +resource "google_network_security_gateway_security_policy_rule" "default" { + name = "%s" + location = "us-west2" + gateway_security_policy = google_network_security_gateway_security_policy.default.name + enabled = true + priority = 1 + session_matcher = "host() == 'example.com'" + basic_profile = "ALLOW" +} + +resource "google_network_services_gateway" "gateway1" { + name = "%s" + location = "us-west2" + addresses = ["10.128.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + description = "gateway1_subnet1" + scope = "%s" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.subnet1.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} + +resource "google_compute_network" "network2" { + name = "%s" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnet2" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.142.0.0/20" + region = "us-west2" + network = google_compute_network.network2.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "proxyonlysubnet2" { + region = "us-west2" + name = "%s" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.170.0.0/23" + network = google_compute_network.network2.id + role = "ACTIVE" +} + +resource "google_network_services_gateway" "gateway2" { + name = "%s" + location = "us-west2" + addresses = ["10.142.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + description = "gateway2_subnet2" + scope = "%s" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.network2.id + subnetwork = google_compute_subnetwork.subnet2.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet2] +} + +`, cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, net2Name, subnet2Name, pSubnet2Name, gateway2Name, gateway2Scope) +} + +func testAccNetworkServicesGateway_multipleSwpGatewaysDifferentNetworkRemoveGateway2(cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, net2Name, subnet2Name, pSubnet2Name string) string { + return fmt.Sprintf(` +resource "google_certificate_manager_certificate" "default" { + name = "%s" + location = "us-west2" + self_managed { + pem_certificate = file("test-fixtures/certificatemanager/cert.pem") + pem_private_key = file("test-fixtures/certificatemanager/private-key.pem") + } +} + +resource "google_compute_network" "default" { + name = "%s" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "proxyonlysubnet" { + name = "%s" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.168.0.0/23" + region = "us-west2" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "subnet1" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.128.0.0/20" + region = "us-west2" + network = google_compute_network.default.id + role = "ACTIVE" +} + +resource "google_network_security_gateway_security_policy" "default" { + name = "%s" + location = "us-west2" +} + +resource "google_network_security_gateway_security_policy_rule" "default" { + name = "%s" + location = "us-west2" + gateway_security_policy = google_network_security_gateway_security_policy.default.name + enabled = true + priority = 1 + session_matcher = "host() == 'example.com'" + basic_profile = "ALLOW" +} + +resource "google_network_services_gateway" "gateway1" { + name = "%s" + location = "us-west2" + addresses = ["10.128.0.99"] + type = "SECURE_WEB_GATEWAY" + ports = [443] + description = "gateway1_subnet1" + scope = "%s" + certificate_urls = [google_certificate_manager_certificate.default.id] + gateway_security_policy = google_network_security_gateway_security_policy.default.id + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.subnet1.id + delete_swg_autogen_router_on_destroy = true + depends_on = [google_compute_subnetwork.proxyonlysubnet] +} + +resource "google_compute_network" "network2" { + name = "%s" + routing_mode = "REGIONAL" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnet2" { + name = "%s" + purpose = "PRIVATE" + ip_cidr_range = "10.142.0.0/20" + region = "us-west2" + network = google_compute_network.network2.id + role = "ACTIVE" +} + +resource "google_compute_subnetwork" "proxyonlysubnet2" { + region = "us-west2" + name = "%s" + purpose = "REGIONAL_MANAGED_PROXY" + ip_cidr_range = "192.170.0.0/23" + network = google_compute_network.network2.id + role = "ACTIVE" +} + +# Destroying gateway2 so it allows to test that there is no gateway remaining under the same network so the swg_autogen_router is deleted. + +`, cmName, netName, subnetName, pSubnetName, policyName, ruleName, gatewayName, gatewayScope, net2Name, subnet2Name, pSubnet2Name) +} + <% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_privateca_ca_pool_iam_test.go b/mmv1/third_party/terraform/tests/resource_privateca_ca_pool_iam_test.go index 0937bfb4fb72..6863d89270b3 100644 --- a/mmv1/third_party/terraform/tests/resource_privateca_ca_pool_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_privateca_ca_pool_iam_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/privateca" "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) @@ -43,13 +44,13 @@ func testAccCheckPrivatecaCaPoolIam(t *testing.T, capool, region, project, role "member": "", }, } - u := &PrivatecaCaPoolIamUpdater{ - project: project, - location: region, - caPool: capool, - d: d, - Config: GoogleProviderConfig(t), + u := &privateca.PrivatecaCaPoolIamUpdater{ + Config: GoogleProviderConfig(t), } + u.SetProject(project) + u.SetLocation(region) + u.SetCaPool(capool) + u.SetResourceData(d) p, err := u.GetResourceIamPolicy() if err != nil { return err diff --git a/mmv1/third_party/terraform/tests/resource_pubsub_subscription_iam_test.go b/mmv1/third_party/terraform/tests/resource_pubsub_subscription_iam_test.go index 0cdd6387ec46..582c9b0f5b9e 100644 --- a/mmv1/third_party/terraform/tests/resource_pubsub_subscription_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_pubsub_subscription_iam_test.go @@ -6,10 +6,11 @@ import ( "sort" "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" ) func TestAccPubsubSubscriptionIamBinding(t *testing.T) { @@ -40,7 +41,7 @@ func TestAccPubsubSubscriptionIamBinding(t *testing.T) { }, { ResourceName: "google_pubsub_subscription_iam_binding.foo", - ImportStateId: fmt.Sprintf("%s roles/pubsub.subscriber", getComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription)), + ImportStateId: fmt.Sprintf("%s roles/pubsub.subscriber", pubsub.GetComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription)), ImportState: true, ImportStateVerify: true, }, @@ -69,7 +70,7 @@ func TestAccPubsubSubscriptionIamMember(t *testing.T) { }, { ResourceName: "google_pubsub_subscription_iam_member.foo", - ImportStateId: fmt.Sprintf("%s roles/pubsub.subscriber serviceAccount:%s", getComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription), accountEmail), + ImportStateId: fmt.Sprintf("%s roles/pubsub.subscriber serviceAccount:%s", pubsub.GetComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription), accountEmail), ImportState: true, ImportStateVerify: true, }, @@ -105,7 +106,7 @@ func TestAccPubsubSubscriptionIamPolicy(t *testing.T) { }, { ResourceName: "google_pubsub_subscription_iam_policy.foo", - ImportStateId: getComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription), + ImportStateId: pubsub.GetComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription), ImportState: true, ImportStateVerify: true, }, @@ -116,7 +117,7 @@ func TestAccPubsubSubscriptionIamPolicy(t *testing.T) { func testAccCheckPubsubSubscriptionIam(t *testing.T, subscription, role string, members []string) resource.TestCheckFunc { return func(s *terraform.State) error { config := GoogleProviderConfig(t) - p, err := config.NewPubsubClient(config.UserAgent).Projects.Subscriptions.GetIamPolicy(getComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription)).Do() + p, err := config.NewPubsubClient(config.UserAgent).Projects.Subscriptions.GetIamPolicy(pubsub.GetComputedSubscriptionName(acctest.GetTestProjectFromEnv(), subscription)).Do() if err != nil { return err } diff --git a/mmv1/third_party/terraform/tests/resource_pubsub_subscription_test.go b/mmv1/third_party/terraform/tests/resource_pubsub_subscription_test.go index 5756e4d7d75c..90bcb1e9cc64 100644 --- a/mmv1/third_party/terraform/tests/resource_pubsub_subscription_test.go +++ b/mmv1/third_party/terraform/tests/resource_pubsub_subscription_test.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -275,7 +276,7 @@ func TestGetComputedTopicName(t *testing.T) { } for _, testCase := range testCases { - computedTopicName := getComputedTopicName(testCase.project, testCase.topic) + computedTopicName := pubsub.GetComputedTopicName(testCase.project, testCase.topic) if computedTopicName != testCase.expected { t.Fatalf("bad computed topic name: %s' => expected %s", computedTopicName, testCase.expected) } diff --git a/mmv1/third_party/terraform/tests/resource_pubsub_topic_iam_test.go b/mmv1/third_party/terraform/tests/resource_pubsub_topic_iam_test.go index 37a8745e1fb2..3475104d1519 100644 --- a/mmv1/third_party/terraform/tests/resource_pubsub_topic_iam_test.go +++ b/mmv1/third_party/terraform/tests/resource_pubsub_topic_iam_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -31,7 +32,7 @@ func TestAccPubsubTopicIamBinding(t *testing.T) { }, { ResourceName: "google_pubsub_topic_iam_binding.foo", - ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher", getComputedTopicName(acctest.GetTestProjectFromEnv(), topic)), + ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher", pubsub.GetComputedTopicName(acctest.GetTestProjectFromEnv(), topic)), ImportState: true, ImportStateVerify: true, }, @@ -45,7 +46,7 @@ func TestAccPubsubTopicIamBinding(t *testing.T) { }, { ResourceName: "google_pubsub_topic_iam_binding.foo", - ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher", getComputedTopicName(acctest.GetTestProjectFromEnv(), topic)), + ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher", pubsub.GetComputedTopicName(acctest.GetTestProjectFromEnv(), topic)), ImportState: true, ImportStateVerify: true, }, @@ -95,7 +96,7 @@ func TestAccPubsubTopicIamMember(t *testing.T) { }, { ResourceName: "google_pubsub_topic_iam_member.foo", - ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher serviceAccount:%s", getComputedTopicName(acctest.GetTestProjectFromEnv(), topic), accountEmail), + ImportStateId: fmt.Sprintf("%s roles/pubsub.publisher serviceAccount:%s", pubsub.GetComputedTopicName(acctest.GetTestProjectFromEnv(), topic), accountEmail), ImportState: true, ImportStateVerify: true, }, @@ -127,7 +128,7 @@ func TestAccPubsubTopicIamPolicy(t *testing.T) { }, { ResourceName: "google_pubsub_topic_iam_policy.foo", - ImportStateId: getComputedTopicName(acctest.GetTestProjectFromEnv(), topic), + ImportStateId: pubsub.GetComputedTopicName(acctest.GetTestProjectFromEnv(), topic), ImportState: true, ImportStateVerify: true, }, @@ -138,7 +139,7 @@ func TestAccPubsubTopicIamPolicy(t *testing.T) { func testAccCheckPubsubTopicIam(t *testing.T, topic, role string, members []string) resource.TestCheckFunc { return func(s *terraform.State) error { config := GoogleProviderConfig(t) - p, err := config.NewPubsubClient(config.UserAgent).Projects.Topics.GetIamPolicy(getComputedTopicName(acctest.GetTestProjectFromEnv(), topic)).Do() + p, err := config.NewPubsubClient(config.UserAgent).Projects.Topics.GetIamPolicy(pubsub.GetComputedTopicName(acctest.GetTestProjectFromEnv(), topic)).Do() if err != nil { return err } diff --git a/mmv1/third_party/terraform/tests/resource_pubsub_topic_test.go b/mmv1/third_party/terraform/tests/resource_pubsub_topic_test.go index ac13194a33f4..dfd65c55e84f 100644 --- a/mmv1/third_party/terraform/tests/resource_pubsub_topic_test.go +++ b/mmv1/third_party/terraform/tests/resource_pubsub_topic_test.go @@ -67,6 +67,40 @@ func TestAccPubsubTopic_cmek(t *testing.T) { }) } +func TestAccPubsubTopic_schema(t *testing.T) { + t.Parallel() + + schema1 := fmt.Sprintf("tf-test-schema-%s", RandString(t, 10)) + schema2 := fmt.Sprintf("tf-test-schema-%s", RandString(t, 10)) + topic := fmt.Sprintf("tf-test-topic-%s", RandString(t, 10)) + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckPubsubTopicDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccPubsubTopic_updateWithSchema(topic, schema1), + }, + { + ResourceName: "google_pubsub_topic.bar", + ImportStateId: topic, + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccPubsubTopic_updateWithNewSchema(topic, schema2), + }, + { + ResourceName: "google_pubsub_topic.bar", + ImportStateId: topic, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccPubsubTopic_update(topic, key, value string) string { return fmt.Sprintf(` resource "google_pubsub_topic" "foo" { @@ -103,3 +137,39 @@ resource "google_pubsub_topic" "topic" { } `, topicName, kmsKey) } + +func testAccPubsubTopic_updateWithSchema(topic, schema string) string { + return fmt.Sprintf(` +resource "google_pubsub_schema" "foo" { + name = "%s" + type = "PROTOCOL_BUFFER" + definition = "syntax = \"proto3\";\nmessage Results {\nstring f1 = 1;\n}" +} + +resource "google_pubsub_topic" "bar" { + name = "%s" + schema_settings { + schema = google_pubsub_schema.foo.id + encoding = "BINARY" + } +} +`, schema, topic) +} + +func testAccPubsubTopic_updateWithNewSchema(topic, schema string) string { + return fmt.Sprintf(` +resource "google_pubsub_schema" "foo" { + name = "%s" + type = "PROTOCOL_BUFFER" + definition = "syntax = \"proto3\";\nmessage Results {\nstring f1 = 1;\n}" +} + +resource "google_pubsub_topic" "bar" { + name = "%s" + schema_settings { + schema = google_pubsub_schema.foo.id + encoding = "JSON" + } +} +`, schema, topic) +} diff --git a/mmv1/third_party/terraform/tests/resource_redis_instance_test.go b/mmv1/third_party/terraform/tests/resource_redis_instance_test.go index ffa44c2dbedd..764df0c07514 100644 --- a/mmv1/third_party/terraform/tests/resource_redis_instance_test.go +++ b/mmv1/third_party/terraform/tests/resource_redis_instance_test.go @@ -252,45 +252,6 @@ func TestAccRedisInstance_redisInstanceAuthEnabled(t *testing.T) { }) } -func TestSecondaryIpDiffSuppress(t *testing.T) { - cases := map[string]struct { - Old, New string - ExpectDiffSuppress bool - }{ - "empty strings": { - Old: "", - New: "", - ExpectDiffSuppress: true, - }, - "auto range": { - Old: "", - New: "auto", - ExpectDiffSuppress: false, - }, - "auto on already applied range": { - Old: "10.0.0.0/28", - New: "auto", - ExpectDiffSuppress: true, - }, - "same ranges": { - Old: "10.0.0.0/28", - New: "10.0.0.0/28", - ExpectDiffSuppress: true, - }, - "different ranges": { - Old: "10.0.0.0/28", - New: "10.1.2.3/28", - ExpectDiffSuppress: false, - }, - } - - for tn, tc := range cases { - if secondaryIpDiffSuppress("whatever", tc.Old, tc.New, nil) != tc.ExpectDiffSuppress { - t.Fatalf("bad: %s, '%s' => '%s' expect %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress) - } - } -} - func TestAccRedisInstance_downgradeRedisVersion(t *testing.T) { t.Parallel() @@ -321,55 +282,6 @@ func TestAccRedisInstance_downgradeRedisVersion(t *testing.T) { }) } -func TestUnitRedisInstance_redisVersionIsDecreasing(t *testing.T) { - t.Parallel() - type testcase struct { - name string - old interface{} - new interface{} - decreasing bool - } - tcs := []testcase{ - { - name: "stays the same", - old: "REDIS_4_0", - new: "REDIS_4_0", - decreasing: false, - }, - { - name: "increases", - old: "REDIS_4_0", - new: "REDIS_5_0", - decreasing: false, - }, - { - name: "nil vals", - old: nil, - new: "REDIS_4_0", - decreasing: false, - }, - { - name: "corrupted", - old: "REDIS_4_0", - new: "REDIS_banana", - decreasing: false, - }, - { - name: "decreases", - old: "REDIS_6_0", - new: "REDIS_4_0", - decreasing: true, - }, - } - - for _, tc := range tcs { - decreasing := isRedisVersionDecreasingFunc(tc.old, tc.new) - if decreasing != tc.decreasing { - t.Errorf("%s: expected decreasing to be %v, but was %v", tc.name, tc.decreasing, decreasing) - } - } -} - func testAccRedisInstance_update(name string, preventDestroy bool) string { lifecycleBlock := "" if preventDestroy { diff --git a/mmv1/third_party/terraform/tests/resource_spanner_database_test.go.erb b/mmv1/third_party/terraform/tests/resource_spanner_database_test.go.erb index 7a9726acfb42..7734e62e78d2 100644 --- a/mmv1/third_party/terraform/tests/resource_spanner_database_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_spanner_database_test.go.erb @@ -8,7 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/spanner" ) func TestAccSpannerDatabase_basic(t *testing.T) { @@ -401,84 +401,6 @@ func TestDatabaseNameForApi(t *testing.T) { expectEquals(t, expected, actual) } -// Unit Tests for ForceNew when the change in ddl -func TestSpannerDatabase_resourceSpannerDBDdlCustomDiffFuncForceNew(t *testing.T) { - t.Parallel() - - cases := map[string]struct { - before interface{} - after interface{} - forcenew bool - }{ - "remove_old_statements": { - before: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, - after: []interface{}{ - "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)"}, - forcenew: true, - }, - "append_new_statements": { - before: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, - after: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", - }, - forcenew: false, - }, - "no_change": { - before: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, - after: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)"}, - forcenew: false, - }, - "order_of_statments_change": { - before: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", - "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", - }, - after: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", - "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", - }, - forcenew: true, - }, - "missing_an_old_statement": { - before: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", - "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", - }, - after: []interface{}{ - "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", - "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", - }, - forcenew: true, - }, - } - - for tn, tc := range cases { - d := &tpgresource.ResourceDiffMock{ - Before: map[string]interface{}{ - "ddl": tc.before, - }, - After: map[string]interface{}{ - "ddl": tc.after, - }, - } - err := resourceSpannerDBDdlCustomDiffFunc(d) - if err != nil { - t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) - } - if d.IsForceNew != tc.forcenew { - t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v;actual:%v", tn, tc.forcenew, d.IsForceNew) - } - } -} - // Unit Tests for validation of retention period argument func TestValidateDatabaseRetentionPeriod(t *testing.T) { t.Parallel() @@ -536,7 +458,7 @@ func TestValidateDatabaseRetentionPeriod(t *testing.T) { for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { - _, errs := ValidateDatabaseRetentionPeriod(tc.input, "foobar") + _, errs := spanner.ValidateDatabaseRetentionPeriod(tc.input, "foobar") var wantErrCount string if tc.expectError { wantErrCount = "1+" diff --git a/mmv1/third_party/terraform/tests/resource_vertex_ai_index_endpoint_test.go b/mmv1/third_party/terraform/tests/resource_vertex_ai_index_endpoint_test.go new file mode 100644 index 000000000000..32bddcf1cab4 --- /dev/null +++ b/mmv1/third_party/terraform/tests/resource_vertex_ai_index_endpoint_test.go @@ -0,0 +1,111 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccVertexAIIndexEndpoint_updated(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "network_name": BootstrapSharedTestNetwork(t, "vertex-ai-index-endpoint-update"), + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVertexAIIndexEndpointDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIIndexEndpoint_basic(context), + }, + { + ResourceName: "google_vertex_ai_index_endpoint.index_endpoint", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "region"}, + }, + { + Config: testAccVertexAIIndexEndpoint_updated(context), + }, + { + ResourceName: "google_vertex_ai_index_endpoint.index_endpoint", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "region"}, + }, + }, + }) +} + +func testAccVertexAIIndexEndpoint_basic(context map[string]interface{}) string { + return Nprintf(` +resource "google_vertex_ai_index_endpoint" "index_endpoint" { + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = data.google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = data.google_compute_network.vertex_network.id +} +data "google_compute_network" "vertex_network" { + name = "%{network_name}" +} +data "google_project" "project" {} +`, context) +} + +func testAccVertexAIIndexEndpoint_updated(context map[string]interface{}) string { + return Nprintf(` +resource "google_vertex_ai_index_endpoint" "index_endpoint" { + display_name = "sample-endpoint-updated" + description = "A sample vertex endpoint (updated)" + region = "us-central1" + labels = { + label-one = "value-one" + label-two = "value-two" + } + network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = data.google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = data.google_compute_network.vertex_network.id +} +data "google_compute_network" "vertex_network" { + name = "%{network_name}" +} +data "google_project" "project" {} +`, context) +} diff --git a/mmv1/third_party/terraform/tests/resource_vmwareengine_network_test.go.erb b/mmv1/third_party/terraform/tests/resource_vmwareengine_network_test.go.erb new file mode 100644 index 000000000000..726538228b2c --- /dev/null +++ b/mmv1/third_party/terraform/tests/resource_vmwareengine_network_test.go.erb @@ -0,0 +1,91 @@ +<% autogen_exception -%> +package google +<% unless version == 'ga' -%> + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccVmwareengineNetwork_vmwareEngineNetworkUpdate(t *testing.T) { + t.Parallel() + context := map[string]interface{}{ + "region": acctest.GetTestRegionFromEnv(), + "random_suffix": RandString(t, 10), + "organization": acctest.GetTestOrgFromEnv(t), + "billing_account": acctest.GetTestBillingAccountFromEnv(t), + } + + configTemplate := vmwareEngineNetworkConfigTemplate(context) + VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckVmwareengineNetworkDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(configTemplate, "description1"), + }, + { + ResourceName: "google_vmwareengine_network.default-nw", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name"}, + }, + { + Config: fmt.Sprintf(configTemplate, "description2"), + }, + { + ResourceName: "google_vmwareengine_network.default-nw", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name"}, + }, + }, + }) +} + +func vmwareEngineNetworkConfigTemplate(context map[string]interface{}) string { + return Nprintf(` +resource "google_vmwareengine_network" "default-nw" { + provider = google-beta + project = google_project_service.acceptance.project + name = "%{region}-default" + location = "%{region}" + type = "LEGACY" + description = "%s" +} + +# there can be only 1 Legacy network per region for a given project, so creating new project to isolate tests. +resource "google_project" "acceptance" { + name = "tf-test-%{random_suffix}" + provider = google-beta + project_id = "tf-test-%{random_suffix}" + org_id = "%{organization}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "acceptance" { + project = google_project.acceptance.project_id + provider = google-beta + service = "vmwareengine.googleapis.com" + + # Needed for CI tests for permissions to propagate, should not be needed for actual usage + depends_on = [time_sleep.wait_60_seconds] +} + +resource "time_sleep" "wait_60_seconds" { + depends_on = [google_project.acceptance] + + create_duration = "60s" +} +`, context) +} +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/tests/resource_workflows_workflow_test.go b/mmv1/third_party/terraform/tests/resource_workflows_workflow_test.go index 8ed648800e42..205bf95a6d5f 100644 --- a/mmv1/third_party/terraform/tests/resource_workflows_workflow_test.go +++ b/mmv1/third_party/terraform/tests/resource_workflows_workflow_test.go @@ -5,9 +5,10 @@ import ( "fmt" "testing" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/workflows" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -135,7 +136,7 @@ func TestWorkflowsWorkflowStateUpgradeV0(t *testing.T) { } for tn, tc := range cases { t.Run(tn, func(t *testing.T) { - actual, err := ResourceWorkflowsWorkflowUpgradeV0(context.Background(), tc.Attributes, tc.Meta) + actual, err := workflows.ResourceWorkflowsWorkflowUpgradeV0(context.Background(), tc.Attributes, tc.Meta) if err != nil { t.Error(err) diff --git a/mmv1/third_party/terraform/tests/resource_workstations_workstation_config_test.go.erb b/mmv1/third_party/terraform/tests/resource_workstations_workstation_config_test.go.erb index 8d2ac64fbf52..ca656731144b 100644 --- a/mmv1/third_party/terraform/tests/resource_workstations_workstation_config_test.go.erb +++ b/mmv1/third_party/terraform/tests/resource_workstations_workstation_config_test.go.erb @@ -611,4 +611,159 @@ resource "google_workstations_workstation_config" "default" { } `, context) } + +func TestAccWorkstationsWorkstationConfig_updatePersistentDirectorySourceSnapshot(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": RandString(t, 10), + } + + VcrTest(t, resource.TestCase{ + PreCheck: func() { AccTestPreCheck(t) }, + ProtoV5ProviderFactories: ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_withSourceDiskSnapshot(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_withUpdatedSourceDiskSnapshot(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_withSourceDiskSnapshot(context map[string]interface{}) string { + return Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot" { + provider = google-beta + name = "tf-test-workstation-source-snapshot%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + persistent_directories { + mount_path = "/home" + + gce_pd { + source_snapshot = google_compute_snapshot.test_source_snapshot.id + reclaim_policy = "DELETE" + } + } +} +`, context) +} + +func testAccWorkstationsWorkstationConfig_withUpdatedSourceDiskSnapshot(context map[string]interface{}) string { + return Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot" { + provider = google-beta + name = "tf-test-workstation-source-snapshot%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot2" { + provider = google-beta + name = "tf-test-workstation-source-snapshot2%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + persistent_directories { + mount_path = "/home" + + gce_pd { + source_snapshot = google_compute_snapshot.test_source_snapshot2.id + reclaim_policy = "RETAIN" + } + } +} +`, context) +} <% end -%> diff --git a/mmv1/third_party/terraform/tpgiamresource/iam.go.erb b/mmv1/third_party/terraform/tpgiamresource/iam.go.erb index 1f2f3334e0c8..6dcb6c1ba8f7 100644 --- a/mmv1/third_party/terraform/tpgiamresource/iam.go.erb +++ b/mmv1/third_party/terraform/tpgiamresource/iam.go.erb @@ -67,10 +67,13 @@ func iamPolicyReadWithRetry(updater ResourceIamUpdater) (*cloudresourcemanager.P log.Printf("[DEBUG] Retrieving policy for %s\n", updater.DescribeResource()) var policy *cloudresourcemanager.Policy - err := transport_tpg.RetryTime(func() (perr error) { - policy, perr = updater.GetResourceIamPolicy() - return perr - }, 10) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (perr error) { + policy, perr = updater.GetResourceIamPolicy() + return perr + }, + Timeout: 10 * time.Minute, + }) if err != nil { return nil, err } @@ -557,4 +560,4 @@ func MissingBindings(a, b []*cloudresourcemanager.Binding) []*cloudresourcemanag }) } return results -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.erb b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.erb index a4664ca4d3b9..cf435835a5a8 100644 --- a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.erb +++ b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go.erb @@ -248,3 +248,24 @@ func ProjectNumberDiffSuppress(_, old, new string, _ *schema.ResourceData) bool b2 = string(re.ReplaceAll([]byte(new), replacement)) return a2 == b2 } + +func CompareCaseInsensitive(k, old, new string, d *schema.ResourceData) bool { + return strings.ToLower(old) == strings.ToLower(new) +} + +func IsNewResource(diff TerraformResourceDiff) bool { + name := diff.Get("name") + return name.(string) == "" +} + +func CompareCryptoKeyVersions(_, old, new string, _ *schema.ResourceData) bool { + // The API can return cryptoKeyVersions even though it wasn't specified. + // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 + + kmsKeyWithoutVersions := strings.Split(old, "/cryptoKeyVersions")[0] + if kmsKeyWithoutVersions == new { + return true + } + + return false +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/tpgresource/common_operation.go b/mmv1/third_party/terraform/tpgresource/common_operation.go index 8317e4426293..c5d8df356fbe 100644 --- a/mmv1/third_party/terraform/tpgresource/common_operation.go +++ b/mmv1/third_party/terraform/tpgresource/common_operation.go @@ -112,7 +112,7 @@ func CommonRefreshFunc(w Waiter) resource.StateRefreshFunc { op, err := w.QueryOp() if err != nil { // Retry 404 when getting operation (not resource state) - if transport_tpg.IsRetryableError(err, transport_tpg.IsNotFoundRetryableError("GET operation")) { + if transport_tpg.IsRetryableError(err, []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("GET operation")}, nil) { log.Printf("[DEBUG] Dismissed retryable error on GET operation %q: %s", w.OpName(), err) return nil, "done: false", nil } diff --git a/mmv1/third_party/terraform/transport/error_retry_predicates.go b/mmv1/third_party/terraform/transport/error_retry_predicates.go index b41439c5810a..89602914b93e 100644 --- a/mmv1/third_party/terraform/transport/error_retry_predicates.go +++ b/mmv1/third_party/terraform/transport/error_retry_predicates.go @@ -234,6 +234,20 @@ func ServiceUsageServiceBeingActivated(err error) (bool, string) { return false, "" } +// See https://github.com/hashicorp/terraform-provider-google/issues/14691 for +// details on the error message this handles +// This is a post-operation error so it uses tpgresource.CommonOpError instead of googleapi.Error +func ServiceUsageInternalError160009(err error) (bool, string) { + // a cyclical dependency between transport/tpgresource blocks using tpgresource.CommonOpError + // so just work off the error string. Ideally, we'd use that type instead. + s := err.Error() + if strings.Contains(s, "encountered internal error") && strings.Contains(s, "160009") && strings.Contains(s, "with failed services") { + return true, "retrying internal error 160009." + } + + return false, "" +} + // Retry if Bigquery operation returns a 403 with a specific message for // concurrent operations (which are implemented in terms of 'edit quota'). func IsBigqueryIAMQuotaError(err error) (bool, string) { @@ -445,3 +459,17 @@ func IsBigTableRetryableError(err error) (bool, string) { return false, "" } + +// Gateway of type 'SECURE_WEB_GATEWAY' automatically creates a router but does not delete it. +// This router might be re-used by other gateways located in the same network. +// When multiple gateways are being deleted at the same time, multiple attempts to delete the +// same router will be triggered and the api throws an error saying the "The resource is not ready". +func IsSwgAutogenRouterRetryable(err error) (bool, string) { + if gerr, ok := err.(*googleapi.Error); ok { + if gerr.Code == 400 && strings.Contains(strings.ToLower(gerr.Body), "not ready") { + return true, "Waiting swg autogen router to be ready" + } + } + + return false, "" +} diff --git a/mmv1/third_party/terraform/transport/error_retry_predicates_test.go b/mmv1/third_party/terraform/transport/error_retry_predicates_test.go index 2cb9e9ac2d26..8288bfc1e19d 100644 --- a/mmv1/third_party/terraform/transport/error_retry_predicates_test.go +++ b/mmv1/third_party/terraform/transport/error_retry_predicates_test.go @@ -148,3 +148,25 @@ func TestBigtableError_okIsNotRetryable(t *testing.T) { t.Errorf("Error incorrectly detected as retryable") } } + +func TestIsSwgAutogenRouterRetryableError_otherError(t *testing.T) { + err := googleapi.Error{ + Code: 400, + Body: "another error.", + } + isRetryable, _ := IsSwgAutogenRouterRetryable(&err) + if isRetryable { + t.Errorf("Error incorrectly detected as retryable") + } +} + +func TestIsSwgAutogenRouterRetryableError_notReady(t *testing.T) { + err := googleapi.Error{ + Code: 400, + Body: "The resource 'projects/project123/regions/us-central1/routers/swg-autogen-router-123456789' is not ready", + } + isRetryable, _ := IsSwgAutogenRouterRetryable(&err) + if !isRetryable { + t.Errorf("Error not detected as retryable") + } +} diff --git a/mmv1/third_party/terraform/transport/retry_test.go b/mmv1/third_party/terraform/transport/retry_test.go index fb8dec67e8b1..e94e29f0aa01 100644 --- a/mmv1/third_party/terraform/transport/retry_test.go +++ b/mmv1/third_party/terraform/transport/retry_test.go @@ -9,7 +9,7 @@ import ( "google.golang.org/api/googleapi" ) -func TestRetryTimeDuration(t *testing.T) { +func TestRetry(t *testing.T) { i := 0 f := func() error { i++ @@ -17,7 +17,10 @@ func TestRetryTimeDuration(t *testing.T) { Code: 500, } } - if err := RetryTimeDuration(f, time.Duration(1000)*time.Millisecond); err == nil || err.(*googleapi.Error).Code != 500 { + if err := Retry(RetryOptions{ + RetryFunc: f, + Timeout: time.Duration(1000) * time.Millisecond, + }); err == nil || err.(*googleapi.Error).Code != 500 { t.Errorf("unexpected error retrying: %v", err) } if i < 2 { @@ -25,7 +28,7 @@ func TestRetryTimeDuration(t *testing.T) { } } -func TestRetryTimeDuration_wrapped(t *testing.T) { +func TestRetry_wrapped(t *testing.T) { i := 0 f := func() error { i++ @@ -34,7 +37,10 @@ func TestRetryTimeDuration_wrapped(t *testing.T) { } return errwrap.Wrapf("nested error: {{err}}", err) } - if err := RetryTimeDuration(f, time.Duration(1000)*time.Millisecond); err == nil { + if err := Retry(RetryOptions{ + RetryFunc: f, + Timeout: time.Duration(1000) * time.Millisecond, + }); err == nil { t.Errorf("unexpected nil error, expected an error") } else { innerErr := errwrap.GetType(err, &googleapi.Error{}) @@ -51,7 +57,7 @@ func TestRetryTimeDuration_wrapped(t *testing.T) { } } -func TestRetryTimeDuration_noretry(t *testing.T) { +func TestRetry_noretry(t *testing.T) { i := 0 f := func() error { i++ @@ -59,7 +65,10 @@ func TestRetryTimeDuration_noretry(t *testing.T) { Code: 400, } } - if err := RetryTimeDuration(f, time.Duration(1000)*time.Millisecond); err == nil || err.(*googleapi.Error).Code != 400 { + if err := Retry(RetryOptions{ + RetryFunc: f, + Timeout: time.Duration(1000) * time.Millisecond, + }); err == nil || err.(*googleapi.Error).Code != 400 { t.Errorf("unexpected error retrying: %v", err) } if i != 1 { @@ -67,7 +76,7 @@ func TestRetryTimeDuration_noretry(t *testing.T) { } } -func TestRetryTimeDuration_URLTimeoutsShouldRetry(t *testing.T) { +func TestRetry_URLTimeoutsShouldRetry(t *testing.T) { runCount := 0 retryFunc := func() error { runCount++ @@ -78,7 +87,10 @@ func TestRetryTimeDuration_URLTimeoutsShouldRetry(t *testing.T) { } return nil } - err := RetryTimeDuration(retryFunc, 1*time.Minute) + err := Retry(RetryOptions{ + RetryFunc: retryFunc, + Timeout: 1 * time.Minute, + }) if err != nil { t.Errorf("unexpected error: got '%v' want 'nil'", err) } @@ -90,15 +102,19 @@ func TestRetryTimeDuration_URLTimeoutsShouldRetry(t *testing.T) { func TestRetryWithPolling_noRetry(t *testing.T) { retryCount := 0 - retryFunc := func() (interface{}, error) { + retryFunc := func() error { retryCount++ - return "", &googleapi.Error{ + return &googleapi.Error{ Code: 400, } } - result, err := RetryWithPolling(retryFunc, time.Duration(1000)*time.Millisecond, time.Duration(100)*time.Millisecond) - if err == nil || err.(*googleapi.Error).Code != 400 || result.(string) != "" { - t.Errorf("unexpected error %v and result %v", err, result) + err := Retry(RetryOptions{ + RetryFunc: retryFunc, + Timeout: time.Duration(1000) * time.Millisecond, + PollInterval: time.Duration(100) * time.Millisecond, + }) + if err == nil || err.(*googleapi.Error).Code != 400 { + t.Errorf("unexpected error %v", err) } if retryCount != 1 { t.Errorf("expected error function to be called exactly once, but was called %d times", retryCount) @@ -107,9 +123,9 @@ func TestRetryWithPolling_noRetry(t *testing.T) { func TestRetryWithPolling_notRetryable(t *testing.T) { retryCount := 0 - retryFunc := func() (interface{}, error) { + retryFunc := func() error { retryCount++ - return "", &googleapi.Error{ + return &googleapi.Error{ Code: 400, } } @@ -117,9 +133,14 @@ func TestRetryWithPolling_notRetryable(t *testing.T) { isRetryableFunc := func(err error) (bool, string) { return err.(*googleapi.Error).Code != 400, "" } - result, err := RetryWithPolling(retryFunc, time.Duration(1000)*time.Millisecond, time.Duration(100)*time.Millisecond, isRetryableFunc) - if err == nil || err.(*googleapi.Error).Code != 400 || result.(string) != "" { - t.Errorf("unexpected error %v and result %v", err, result) + err := Retry(RetryOptions{ + RetryFunc: retryFunc, + Timeout: time.Duration(1000) * time.Millisecond, + PollInterval: time.Duration(100) * time.Millisecond, + ErrorRetryPredicates: []RetryErrorPredicateFunc{isRetryableFunc}, + }) + if err == nil || err.(*googleapi.Error).Code != 400 { + t.Errorf("unexpected error %v", err) } if retryCount != 1 { t.Errorf("expected error function to be called exactly once, but was called %d times", retryCount) @@ -129,27 +150,29 @@ func TestRetryWithPolling_notRetryable(t *testing.T) { func TestRetryWithPolling_retriedAndSucceeded(t *testing.T) { retryCount := 0 // Retry once and succeeds. - retryFunc := func() (interface{}, error) { + retryFunc := func() error { retryCount++ // Error code of 200 is retryable. if retryCount < 2 { - return "", &googleapi.Error{ + return &googleapi.Error{ Code: 200, } } - return "Ok", nil + return nil } // Retryable if the error code is not 400. isRetryableFunc := func(err error) (bool, string) { return err.(*googleapi.Error).Code != 400, "" } - result, err := RetryWithPolling(retryFunc, time.Duration(1000)*time.Millisecond, time.Duration(100)*time.Millisecond, isRetryableFunc) + err := Retry(RetryOptions{ + RetryFunc: retryFunc, + Timeout: time.Duration(1000) * time.Millisecond, + PollInterval: time.Duration(100) * time.Millisecond, + ErrorRetryPredicates: []RetryErrorPredicateFunc{isRetryableFunc}, + }) if err != nil { t.Errorf("unexpected error %v", err) } - if result.(string) != "Ok" { - t.Errorf("unexpected result %v", result) - } if retryCount != 2 { t.Errorf("expected error function to be called exactly twice, but was called %d times", retryCount) } @@ -158,15 +181,15 @@ func TestRetryWithPolling_retriedAndSucceeded(t *testing.T) { func TestRetryWithPolling_retriedAndFailed(t *testing.T) { retryCount := 0 // Retry once and fails. - retryFunc := func() (interface{}, error) { + retryFunc := func() error { retryCount++ // Error code of 200 is retryable. if retryCount < 2 { - return "", &googleapi.Error{ + return &googleapi.Error{ Code: 200, } } - return "", &googleapi.Error{ + return &googleapi.Error{ Code: 400, } } @@ -174,9 +197,14 @@ func TestRetryWithPolling_retriedAndFailed(t *testing.T) { isRetryableFunc := func(err error) (bool, string) { return err.(*googleapi.Error).Code != 400, "" } - result, err := RetryWithPolling(retryFunc, time.Duration(1000)*time.Millisecond, time.Duration(100)*time.Millisecond, isRetryableFunc) - if err == nil || err.(*googleapi.Error).Code != 400 || result.(string) != "" { - t.Errorf("unexpected error %v and result %v", err, result) + err := Retry(RetryOptions{ + RetryFunc: retryFunc, + Timeout: time.Duration(1000) * time.Millisecond, + PollInterval: time.Duration(100) * time.Millisecond, + ErrorRetryPredicates: []RetryErrorPredicateFunc{isRetryableFunc}, + }) + if err == nil || err.(*googleapi.Error).Code != 400 { + t.Errorf("unexpected error %v", err) } if retryCount != 2 { t.Errorf("expected error function to be called exactly twice, but was called %d times", retryCount) diff --git a/mmv1/third_party/terraform/transport/retry_transport.go b/mmv1/third_party/terraform/transport/retry_transport.go index 44de9a5b3be4..1649a15d7582 100644 --- a/mmv1/third_party/terraform/transport/retry_transport.go +++ b/mmv1/third_party/terraform/transport/retry_transport.go @@ -4,7 +4,7 @@ // Do not use for waiting on operations or polling of resource state, // especially if the expected state (operation done, resource ready, etc) // takes longer to reach than the default client Timeout. -// In those cases, RetryTimeDuration(...)/resource.Retry with appropriate timeout +// In those cases, Retry(...)/resource.Retry with appropriate timeout // and error predicates/handling should be used as a wrapper around the request // instead. // @@ -204,7 +204,7 @@ func (t *retryTransport) checkForRetryableError(resp *http.Response, respErr err if errToCheck == nil { return nil } - if IsRetryableError(errToCheck, t.retryPredicates...) { + if IsRetryableError(errToCheck, t.retryPredicates, nil) { return resource.RetryableError(errToCheck) } return resource.NonRetryableError(errToCheck) diff --git a/mmv1/third_party/terraform/transport/retry_utils.go b/mmv1/third_party/terraform/transport/retry_utils.go index 5efca2ac55a7..fc386a56ebf5 100644 --- a/mmv1/third_party/terraform/transport/retry_utils.go +++ b/mmv1/third_party/terraform/transport/retry_utils.go @@ -8,36 +8,86 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) -func Retry(retryFunc func() error) error { - return RetryTime(retryFunc, 1) +type RetryOptions struct { + RetryFunc func() error + Timeout time.Duration + PollInterval time.Duration + ErrorRetryPredicates []RetryErrorPredicateFunc + ErrorAbortPredicates []RetryErrorPredicateFunc } -func RetryTime(retryFunc func() error, minutes int) error { - return RetryTimeDuration(retryFunc, time.Duration(minutes)*time.Minute) -} +func Retry(opt RetryOptions) error { + if opt.Timeout == 0 { + opt.Timeout = 1 * time.Minute + } + + if opt.PollInterval != 0 { + refreshFunc := func() (interface{}, string, error) { + err := opt.RetryFunc() + if err == nil { + return "", "done", nil + } -func RetryTimeDuration(retryFunc func() error, duration time.Duration, errorRetryPredicates ...RetryErrorPredicateFunc) error { - return resource.Retry(duration, func() *resource.RetryError { - err := retryFunc() + // Check if it is a retryable error. + if IsRetryableError(err, opt.ErrorRetryPredicates, opt.ErrorAbortPredicates) { + return "", "retrying", nil + } + + // The error is not retryable. + return "", "done", err + } + stateChange := &resource.StateChangeConf{ + Pending: []string{ + "retrying", + }, + Target: []string{ + "done", + }, + Refresh: refreshFunc, + Timeout: opt.Timeout, + PollInterval: opt.PollInterval, + } + + _, err := stateChange.WaitForState() + return err + } + + return resource.Retry(opt.Timeout, func() *resource.RetryError { + err := opt.RetryFunc() if err == nil { return nil } - if IsRetryableError(err, errorRetryPredicates...) { + if IsRetryableError(err, opt.ErrorRetryPredicates, opt.ErrorAbortPredicates) { return resource.RetryableError(err) } return resource.NonRetryableError(err) }) } -func IsRetryableError(topErr error, customPredicates ...RetryErrorPredicateFunc) bool { +func IsRetryableError(topErr error, retryPredicates, abortPredicates []RetryErrorPredicateFunc) bool { if topErr == nil { return false } - retryPredicates := append( + retryPredicates = append( // Global error retry predicates are registered in this default list. defaultErrorRetryPredicates, - customPredicates...) + retryPredicates...) + + // Check all wrapped errors for an abortable error status. + isAbortable := false + errwrap.Walk(topErr, func(werr error) { + for _, pred := range abortPredicates { + if predAbort, predReason := pred(werr); predAbort { + log.Printf("[DEBUG] Dismissed an error as abortable. %s - %s", predReason, werr) + isAbortable = true + return + } + } + }) + if isAbortable { + return false + } // Check all wrapped errors for a retryable error status. isRetryable := false @@ -52,34 +102,3 @@ func IsRetryableError(topErr error, customPredicates ...RetryErrorPredicateFunc) }) return isRetryable } - -// The polling overrides the default backoff logic with max backoff of 10s. The poll interval can be greater than 10s. -func RetryWithPolling(retryFunc func() (interface{}, error), timeout time.Duration, pollInterval time.Duration, errorRetryPredicates ...RetryErrorPredicateFunc) (interface{}, error) { - refreshFunc := func() (interface{}, string, error) { - result, err := retryFunc() - if err == nil { - return result, "done", nil - } - - // Check if it is a retryable error. - if IsRetryableError(err, errorRetryPredicates...) { - return result, "retrying", nil - } - - // The error is not retryable. - return result, "done", err - } - stateChange := &resource.StateChangeConf{ - Pending: []string{ - "retrying", - }, - Target: []string{ - "done", - }, - Refresh: refreshFunc, - Timeout: timeout, - PollInterval: pollInterval, - } - - return stateChange.WaitForState() -} diff --git a/mmv1/third_party/terraform/transport/transport.go b/mmv1/third_party/terraform/transport/transport.go index d39079c46c61..374841b34ec1 100644 --- a/mmv1/third_party/terraform/transport/transport.go +++ b/mmv1/third_party/terraform/transport/transport.go @@ -25,6 +25,7 @@ type SendRequestOptions struct { Body map[string]any Timeout time.Duration ErrorRetryPredicates []RetryErrorPredicateFunc + ErrorAbortPredicates []RetryErrorPredicateFunc } func SendRequest(opt SendRequestOptions) (map[string]interface{}, error) { @@ -49,8 +50,8 @@ func SendRequest(opt SendRequestOptions) (map[string]interface{}, error) { } var res *http.Response - err := RetryTimeDuration( - func() error { + err := Retry(RetryOptions{ + RetryFunc: func() error { var buf bytes.Buffer if opt.Body != nil { err := json.NewEncoder(&buf).Encode(opt.Body) @@ -81,9 +82,10 @@ func SendRequest(opt SendRequestOptions) (map[string]interface{}, error) { return nil }, - opt.Timeout, - opt.ErrorRetryPredicates..., - ) + Timeout: opt.Timeout, + ErrorRetryPredicates: opt.ErrorRetryPredicates, + ErrorAbortPredicates: opt.ErrorAbortPredicates, + }) if err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/utils/appengine_operation.go b/mmv1/third_party/terraform/utils/appengine_operation.go index de1ec6a70fb2..c36826c193c6 100644 --- a/mmv1/third_party/terraform/utils/appengine_operation.go +++ b/mmv1/third_party/terraform/utils/appengine_operation.go @@ -1,74 +1,20 @@ package google import ( - "encoding/json" - "fmt" - "regexp" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/appengine" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-provider-google/google/verify" - - "google.golang.org/api/appengine/v1" -) - -var ( - appEngineOperationIdRegexp = regexp.MustCompile(fmt.Sprintf("apps/%s/operations/(.*)", verify.ProjectRegex)) ) -type AppEngineOperationWaiter struct { - Service *appengine.APIService - AppId string - tpgresource.CommonOperationWaiter -} - -func (w *AppEngineOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - matches := appEngineOperationIdRegexp.FindStringSubmatch(w.Op.Name) - if len(matches) != 2 { - return nil, fmt.Errorf("Expected %d results of parsing operation name, got %d from %s", 2, len(matches), w.Op.Name) - } - return w.Service.Apps.Operations.Get(w.AppId, matches[1]).Do() -} - +// Deprecated: For backward compatibility AppEngineOperationWaitTimeWithResponse is still working, +// but all new code should use AppEngineOperationWaitTimeWithResponse in the appengine package instead. func AppEngineOperationWaitTimeWithResponse(config *transport_tpg.Config, res interface{}, response *map[string]interface{}, appId, activity, userAgent string, timeout time.Duration) error { - op := &appengine.Operation{} - err := tpgresource.Convert(res, op) - if err != nil { - return err - } - - w := &AppEngineOperationWaiter{ - Service: config.NewAppEngineClient(userAgent), - AppId: appId, - } - - if err := w.SetOp(op); err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) + return appengine.AppEngineOperationWaitTimeWithResponse(config, res, response, appId, activity, userAgent, timeout) } +// Deprecated: For backward compatibility AppEngineOperationWaitTime is still working, +// but all new code should use AppEngineOperationWaitTime in the appengine package instead. func AppEngineOperationWaitTime(config *transport_tpg.Config, res interface{}, appId, activity, userAgent string, timeout time.Duration) error { - op := &appengine.Operation{} - err := tpgresource.Convert(res, op) - if err != nil { - return err - } - - w := &AppEngineOperationWaiter{ - Service: config.NewAppEngineClient(userAgent), - AppId: appId, - } - - if err := w.SetOp(op); err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return appengine.AppEngineOperationWaitTime(config, res, appId, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/bootstrap_test_utils.go b/mmv1/third_party/terraform/utils/bootstrap_test_utils.go index be0f905e3a2b..7d3d253f4bcb 100644 --- a/mmv1/third_party/terraform/utils/bootstrap_test_utils.go +++ b/mmv1/third_party/terraform/utils/bootstrap_test_utils.go @@ -10,6 +10,8 @@ import ( "time" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/privateca" + "github.com/hashicorp/terraform-provider-google/google/services/sql" "github.com/hashicorp/terraform-provider-google/google/tpgiamresource" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -527,20 +529,26 @@ func BootstrapProject(t *testing.T, projectIDPrefix, billingAccount string, serv if billingAccount != "" { billingClient := config.NewBillingClient(config.UserAgent) var pbi *cloudbilling.ProjectBillingInfo - err = transport_tpg.RetryTimeDuration(func() error { - var reqErr error - pbi, reqErr = billingClient.Projects.GetBillingInfo(PrefixedProject(projectID)).Do() - return reqErr - }, 30*time.Second) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var reqErr error + pbi, reqErr = billingClient.Projects.GetBillingInfo(PrefixedProject(projectID)).Do() + return reqErr + }, + Timeout: 30 * time.Second, + }) if err != nil { t.Fatalf("Error getting billing info for project %q: %v", projectID, err) } if strings.TrimPrefix(pbi.BillingAccountName, "billingAccounts/") != billingAccount { pbi.BillingAccountName = "billingAccounts/" + billingAccount - err := transport_tpg.RetryTimeDuration(func() error { - _, err := config.NewBillingClient(config.UserAgent).Projects.UpdateBillingInfo(PrefixedProject(projectID), pbi).Do() - return err - }, 2*time.Minute) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, err := config.NewBillingClient(config.UserAgent).Projects.UpdateBillingInfo(PrefixedProject(projectID), pbi).Do() + return err + }, + Timeout: 2 * time.Minute, + }) if err != nil { t.Fatalf("Error setting billing account for project %q to %q: %s", projectID, billingAccount, err) } @@ -643,14 +651,18 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { } var op *sqladmin.Operation - err = transport_tpg.RetryTimeDuration(func() (operr error) { - op, operr = config.NewSqlAdminClient(config.UserAgent).Instances.Insert(project, bootstrapInstance).Do() - return operr - }, time.Duration(20)*time.Minute, transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + op, operr = config.NewSqlAdminClient(config.UserAgent).Instances.Insert(project, bootstrapInstance).Do() + return operr + }, + Timeout: 20 * time.Minute, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { t.Fatalf("Error, failed to create instance %s: %s", bootstrapInstance.Name, err) } - err = SqlAdminOperationWaitTime(config, op, project, "Create Instance", config.UserAgent, time.Duration(40)*time.Minute) + err = sql.SqlAdminOperationWaitTime(config, op, project, "Create Instance", config.UserAgent, 40*time.Minute) if err != nil { t.Fatalf("Error, failed to create instance %s: %s", bootstrapInstance.Name, err) } @@ -669,14 +681,18 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { } var op *sqladmin.Operation - err = transport_tpg.RetryTimeDuration(func() (operr error) { - op, operr = config.NewSqlAdminClient(config.UserAgent).BackupRuns.Insert(project, bootstrapInstance.Name, backupRun).Do() - return operr - }, time.Duration(20)*time.Minute, transport_tpg.IsSqlOperationInProgressError) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + op, operr = config.NewSqlAdminClient(config.UserAgent).BackupRuns.Insert(project, bootstrapInstance.Name, backupRun).Do() + return operr + }, + Timeout: 20 * time.Minute, + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsSqlOperationInProgressError}, + }) if err != nil { t.Fatalf("Error, failed to create instance backup: %s", err) } - err = SqlAdminOperationWaitTime(config, op, project, "Backup Instance", config.UserAgent, time.Duration(20)*time.Minute) + err = sql.SqlAdminOperationWaitTime(config, op, project, "Backup Instance", config.UserAgent, 20*time.Minute) if err != nil { t.Fatalf("Error, failed to create instance backup: %s", err) } @@ -724,7 +740,7 @@ func BootstrapSharedCaPoolInLocation(t *testing.T, location string) string { log.Printf("[DEBUG] Waiting for CA pool creation to finish") var opRes map[string]interface{} - err = PrivatecaOperationWaitTimeWithResponse( + err = privateca.PrivatecaOperationWaitTimeWithResponse( config, res, &opRes, project, "Creating CA pool", config.UserAgent, 4*time.Minute) if err != nil { @@ -758,10 +774,13 @@ func setupProjectsAndGetAccessToken(org, billing, pid, service string, config *t } var op *cloudresourcemanager.Operation - err := transport_tpg.RetryTimeDuration(func() (reqErr error) { - op, reqErr = rmService.Projects.Create(project).Do() - return reqErr - }, 5*time.Minute) + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (reqErr error) { + op, reqErr = rmService.Projects.Create(project).Do() + return reqErr + }, + Timeout: 5 * time.Minute, + }) if err != nil { return "", err } @@ -789,10 +808,13 @@ func setupProjectsAndGetAccessToken(org, billing, pid, service string, config *t project.ProjectId = p2 project.Name = fmt.Sprintf("%s-2", pid) - err = transport_tpg.RetryTimeDuration(func() (reqErr error) { - op, reqErr = rmService.Projects.Create(project).Do() - return reqErr - }, 5*time.Minute) + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (reqErr error) { + op, reqErr = rmService.Projects.Create(project).Do() + return reqErr + }, + Timeout: 5 * time.Minute, + }) if err != nil { return "", err } diff --git a/mmv1/third_party/terraform/utils/cloudfunctions_operation.go b/mmv1/third_party/terraform/utils/cloudfunctions_operation.go index 37f57aba2fc6..d210bd8d83b9 100644 --- a/mmv1/third_party/terraform/utils/cloudfunctions_operation.go +++ b/mmv1/third_party/terraform/utils/cloudfunctions_operation.go @@ -1,41 +1,21 @@ package google import ( - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgcloudfunctions "github.com/hashicorp/terraform-provider-google/google/services/cloudfunctions" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "google.golang.org/api/cloudfunctions/v1" ) -type CloudFunctionsOperationWaiter struct { - Service *cloudfunctions.Service - tpgresource.CommonOperationWaiter -} - -func (w *CloudFunctionsOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - return w.Service.Operations.Get(w.Op.Name).Do() -} - +// Deprecated: For backward compatibility cloudFunctionsOperationWait is still working, +// but all new code should use CloudFunctionsOperationWait in the tpgcloudfunctions package instead. func cloudFunctionsOperationWait(config *transport_tpg.Config, op *cloudfunctions.Operation, activity, userAgent string, timeout time.Duration) error { - w := &CloudFunctionsOperationWaiter{ - Service: config.NewCloudFunctionsClient(userAgent), - } - if err := w.SetOp(op); err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return tpgcloudfunctions.CloudFunctionsOperationWait(config, op, activity, userAgent, timeout) } +// Deprecated: For backward compatibility IsCloudFunctionsSourceCodeError is still working, +// but all new code should use IsCloudFunctionsSourceCodeError in the tpgcloudfunctions package instead. func IsCloudFunctionsSourceCodeError(err error) (bool, string) { - if operr, ok := err.(*tpgresource.CommonOpError); ok { - if operr.Code == 3 && operr.Message == "Failed to retrieve function source code" { - return true, fmt.Sprintf("Retry on Function failing to pull code from GCS") - } - } - return false, "" + return tpgcloudfunctions.IsCloudFunctionsSourceCodeError(err) } diff --git a/mmv1/third_party/terraform/utils/cloudrun_polling.go b/mmv1/third_party/terraform/utils/cloudrun_polling.go index 65ba6b237b74..0ac5474e48b6 100644 --- a/mmv1/third_party/terraform/utils/cloudrun_polling.go +++ b/mmv1/third_party/terraform/utils/cloudrun_polling.go @@ -1,90 +1,12 @@ package google import ( - "fmt" - "log" - - "github.com/hashicorp/errwrap" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/cloudrun" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -const readyStatusType string = "Ready" -const pendingCertificateReason string = "CertificatePending" - -type Condition struct { - Type string - Status string - Reason string - Message string -} - -// KnativeStatus is a struct that can contain a Knative style resource's Status block. It is not -// intended to be used for anything other than polling for the success of the given resource. -type KnativeStatus struct { - Metadata struct { - Name string - Namespace string - SelfLink string - } - Status struct { - Conditions []Condition - ObservedGeneration float64 - } -} - -func getGeneration(res map[string]interface{}) (int, error) { - metadata, ok := res["metadata"] - if !ok { - return 0, fmt.Errorf("Unable to find knative metadata") - } - m, ok := metadata.(map[string]interface{}) - if !ok { - return 0, fmt.Errorf("Unable to find generation in knative metadata") - } - gen, ok := m["generation"] - if !ok { - return 0, fmt.Errorf("Unable to find generation in knative metadata") - } - return int(gen.(float64)), nil -} - +// Deprecated: For backward compatibility PollCheckKnativeStatusFunc is still working, +// but all new code should use PollCheckKnativeStatusFunc in the cloudrun package instead. func PollCheckKnativeStatusFunc(knativeRestResponse map[string]interface{}) func(resp map[string]interface{}, respErr error) transport_tpg.PollResult { - return func(resp map[string]interface{}, respErr error) transport_tpg.PollResult { - if respErr != nil { - return ErrorPollResult(respErr) - } - s := KnativeStatus{} - if err := tpgresource.Convert(resp, &s); err != nil { - return ErrorPollResult(errwrap.Wrapf("unable to get KnativeStatus: {{err}}", err)) - } - - gen, err := getGeneration(knativeRestResponse) - if err != nil { - return ErrorPollResult(errwrap.Wrapf("unable to find Knative generation: {{err}}", err)) - } - if int(s.Status.ObservedGeneration) < gen { - return PendingStatusPollResult("waiting for observed generation to match") - } - for _, condition := range s.Status.Conditions { - if condition.Type == readyStatusType { - log.Printf("[DEBUG] checking KnativeStatus Ready condition %s: %s", condition.Status, condition.Message) - switch condition.Status { - case "True": - // Resource is ready - return SuccessPollResult() - case "Unknown": - // DomainMapping can enter a 'terminal' state where "Ready" status is "Unknown" - // but the resource is waiting for external verification of DNS records. - if condition.Reason == pendingCertificateReason { - return SuccessPollResult() - } - return PendingStatusPollResult(fmt.Sprintf("%s:%s", condition.Status, condition.Message)) - case "False": - return ErrorPollResult(fmt.Errorf(`resource is in failed state "Ready:False", message: %s`, condition.Message)) - } - } - } - return PendingStatusPollResult("no status yet") - } + return cloudrun.PollCheckKnativeStatusFunc(knativeRestResponse) } diff --git a/mmv1/third_party/terraform/utils/common_diff_suppress.go.erb b/mmv1/third_party/terraform/utils/common_diff_suppress.go.erb index 9ae19a814116..3a19997c0f82 100644 --- a/mmv1/third_party/terraform/utils/common_diff_suppress.go.erb +++ b/mmv1/third_party/terraform/utils/common_diff_suppress.go.erb @@ -176,3 +176,7 @@ func LastSlashDiffSuppress(k, old, new string, d *schema.ResourceData) bool { func ProjectNumberDiffSuppress(k, old, new string, d *schema.ResourceData) bool { return tpgresource.ProjectNumberDiffSuppress(k, old, new, d) } + +func compareCryptoKeyVersions(_, old, new string, _ *schema.ResourceData) bool { + return tpgresource.CompareCryptoKeyVersions("", old, new, nil) +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/utils/composer_operation.go.erb b/mmv1/third_party/terraform/utils/composer_operation.go.erb index 1de4d58092b1..f33b11f1b6e6 100644 --- a/mmv1/third_party/terraform/utils/composer_operation.go.erb +++ b/mmv1/third_party/terraform/utils/composer_operation.go.erb @@ -2,10 +2,9 @@ package google import ( - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgcomposer "github.com/hashicorp/terraform-provider-google/google/services/composer" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" <% if version == "ga" -%> @@ -15,24 +14,8 @@ import ( <% end -%> ) -type ComposerOperationWaiter struct { - Service *composer.ProjectsLocationsService - tpgresource.CommonOperationWaiter -} - -func (w *ComposerOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - return w.Service.Operations.Get(w.Op.Name).Do() -} - +// Deprecated: For backward compatibility ComposerOperationWaitTime is still working, +// but all new code should use ComposerOperationWaitTime in the tpgcomposer package instead. func ComposerOperationWaitTime(config *transport_tpg.Config, op *composer.Operation, project, activity, userAgent string, timeout time.Duration) error { - w := &ComposerOperationWaiter{ - Service: config.NewComposerClient(userAgent).Projects.Locations, - } - if err := w.SetOp(op); err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return tpgcomposer.ComposerOperationWaitTime(config, op, project, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/compute_operation.go.erb b/mmv1/third_party/terraform/utils/compute_operation.go.erb index 06557cc6021d..8c92bb997858 100644 --- a/mmv1/third_party/terraform/utils/compute_operation.go.erb +++ b/mmv1/third_party/terraform/utils/compute_operation.go.erb @@ -2,221 +2,23 @@ package google import ( - "bytes" - "context" -<% unless version == 'ga' -%> - "encoding/json" -<% end -%> - "errors" - "fmt" - "io" - "log" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/compute" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - -<% if version == "ga" -%> - "google.golang.org/api/compute/v1" -<% else -%> - compute "google.golang.org/api/compute/v0.beta" -<% end -%> ) -type ComputeOperationWaiter struct { - Service *compute.Service - Op *compute.Operation - Context context.Context - Project string -<% unless version == 'ga' -%> - Parent string -<% end -%> -} - -func (w *ComputeOperationWaiter) State() string { - if w == nil || w.Op == nil { - return "" - } - - return w.Op.Status -} - -func (w *ComputeOperationWaiter) Error() error { - if w != nil && w.Op != nil && w.Op.Error != nil { - return ComputeOperationError(*w.Op.Error) - } - return nil -} - -func (w *ComputeOperationWaiter) IsRetryable(err error) bool { - if oe, ok := err.(ComputeOperationError); ok { - for _, e := range oe.Errors { - if e.Code == "RESOURCE_NOT_READY" { - return true - } - } - } - return false -} - -func (w *ComputeOperationWaiter) SetOp(op interface{}) error { - var ok bool - w.Op, ok = op.(*compute.Operation) - if !ok { - return fmt.Errorf("Unable to set operation. Bad type!") - } - return nil -} - -func (w *ComputeOperationWaiter) QueryOp() (interface{}, error) { - if w == nil || w.Op == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - if w.Context != nil { - select { - case <-w.Context.Done(): - log.Println("[WARN] request has been cancelled early") - return w.Op, errors.New("unable to finish polling, context has been cancelled") - default: - // default must be here to keep the previous case from blocking - } - } - if w.Op.Zone != "" { - zone := tpgresource.GetResourceNameFromSelfLink(w.Op.Zone) - return w.Service.ZoneOperations.Get(w.Project, zone, w.Op.Name).Do() - } else if w.Op.Region != "" { - region := tpgresource.GetResourceNameFromSelfLink(w.Op.Region) - return w.Service.RegionOperations.Get(w.Project, region, w.Op.Name).Do() -<% unless version == 'ga' -%> - } else if w.Parent != "" { - return w.Service.GlobalOrganizationOperations.Get(w.Op.Name).ParentId(w.Parent).Do() -<% end -%> - } - return w.Service.GlobalOperations.Get(w.Project, w.Op.Name).Do() -} - -func (w *ComputeOperationWaiter) OpName() string { - if w == nil || w.Op == nil { - return " Compute Op" - } - - return w.Op.Name -} - -func (w *ComputeOperationWaiter) PendingStates() []string { - return []string{"PENDING", "RUNNING"} -} - -func (w *ComputeOperationWaiter) TargetStates() []string { - return []string{"DONE"} -} - +// Deprecated: For backward compatibility ComputeOperationWaitTime is still working, +// but all new code should use ComputeOperationWaitTime in the compute package instead. func ComputeOperationWaitTime(config *transport_tpg.Config, res interface{}, project, activity, userAgent string, timeout time.Duration) error { - op := &compute.Operation{} - err := tpgresource.Convert(res, op) - if err != nil { - return err - } - - w := &ComputeOperationWaiter{ - Service: config.NewComputeClient(userAgent), - Context: config.Context, - Op: op, - Project: project, - } - - if err := w.SetOp(op); err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return compute.ComputeOperationWaitTime(config, res, project, activity, userAgent, timeout) } <% unless version == 'ga' -%> +// Deprecated: For backward compatibility ComputeOrgOperationWaitTimeWithResponse is still working, +// but all new code should use ComputeOrgOperationWaitTimeWithResponse in the compute package instead. func ComputeOrgOperationWaitTimeWithResponse(config *transport_tpg.Config, res interface{}, response *map[string]interface{}, parent, activity, userAgent string, timeout time.Duration) error { - op := &compute.Operation{} - err := tpgresource.Convert(res, op) - if err != nil { - return err - } - - w := &ComputeOperationWaiter{ - Service: config.NewComputeClient(userAgent), - Op: op, - Parent: parent, - } - - if err := w.SetOp(op); err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - e, err := json.Marshal(w.Op) - if err != nil { - return err - } - return json.Unmarshal(e, response) + return compute.ComputeOrgOperationWaitTimeWithResponse(config, res, response, parent, activity, userAgent, timeout) } <% end -%> - -// ComputeOperationError wraps compute.OperationError and implements the -// error interface so it can be returned. -type ComputeOperationError compute.OperationError - -func (e ComputeOperationError) Error() string { - buf := bytes.NewBuffer(nil) - for _, err := range e.Errors { - writeOperationError(buf, err) - } - - return buf.String() -} - -const errMsgSep = "\n\n" - -func writeOperationError(w io.StringWriter, opError *compute.OperationErrorErrors) { - w.WriteString(opError.Message + "\n") - - var lm *compute.LocalizedMessage - var link *compute.HelpLink - - for _, ed := range opError.ErrorDetails { - if opError.Code == "QUOTA_EXCEEDED" && ed.QuotaInfo != nil { - w.WriteString("\tmetric name = " + ed.QuotaInfo.MetricName + "\n") - w.WriteString("\tlimit name = " + ed.QuotaInfo.LimitName + "\n") - if ed.QuotaInfo.Dimensions != nil { - w.WriteString("\tdimensions = " + fmt.Sprint(ed.QuotaInfo.Dimensions) + "\n") - } - break - } - if lm == nil && ed.LocalizedMessage != nil { - lm = ed.LocalizedMessage - } - - if link == nil && ed.Help != nil && len(ed.Help.Links) > 0 { - link = ed.Help.Links[0] - } - - if lm != nil && link != nil { - break - } - } - - if lm != nil && lm.Message != "" { - w.WriteString(errMsgSep) - w.WriteString(lm.Message + "\n") - } - - if link != nil { - w.WriteString(errMsgSep) - - if link.Description != "" { - w.WriteString(link.Description + "\n") - } - - if link.Url != "" { - w.WriteString(link.Url + "\n") - } - } -} diff --git a/mmv1/third_party/terraform/utils/container_attached_operation.go b/mmv1/third_party/terraform/utils/container_attached_operation.go index 04f05a4bf718..e964bcbd4f71 100644 --- a/mmv1/third_party/terraform/utils/container_attached_operation.go +++ b/mmv1/third_party/terraform/utils/container_attached_operation.go @@ -1,73 +1,22 @@ package google import ( - "encoding/json" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/containerattached" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -type ContainerAttachedOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - tpgresource.CommonOperationWaiter -} - -func (w *ContainerAttachedOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - - region := tpgresource.GetRegionFromRegionalSelfLink(w.CommonOperationWaiter.Op.Name) - - // Returns the proper get. - url := fmt.Sprintf("https://%s-gkemulticloud.googleapis.com/v1/%s", region, w.CommonOperationWaiter.Op.Name) - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func createContainerAttachedWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*ContainerAttachedOperationWaiter, error) { - w := &ContainerAttachedOperationWaiter{ - Config: config, - UserAgent: userAgent, - Project: project, - } - if err := w.CommonOperationWaiter.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - // nolint: deadcode,unused +// +// Deprecated: For backward compatibility ContainerAttachedOperationWaitTimeWithResponse is still working, +// but all new code should use ContainerAttachedOperationWaitTimeWithResponse in the containerattached package instead. func ContainerAttachedOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - w, err := createContainerAttachedWaiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) + return containerattached.ContainerAttachedOperationWaitTimeWithResponse(config, op, response, project, activity, userAgent, timeout) } +// Deprecated: For backward compatibility ContainerAttachedOperationWaitTime is still working, +// but all new code should use ContainerAttachedOperationWaitTime in the containerattached package instead. func ContainerAttachedOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := createContainerAttachedWaiter(config, op, project, activity, userAgent) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return containerattached.ContainerAttachedOperationWaitTime(config, op, project, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/container_operation.go.erb b/mmv1/third_party/terraform/utils/container_operation.go.erb index d96dc8c0a65b..7803ebd10982 100644 --- a/mmv1/third_party/terraform/utils/container_operation.go.erb +++ b/mmv1/third_party/terraform/utils/container_operation.go.erb @@ -2,13 +2,9 @@ package google import ( - "context" - "errors" - "fmt" - "log" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgcontainer "github.com/hashicorp/terraform-provider-google/google/services/container" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" <% if version == "ga" -%> @@ -18,111 +14,8 @@ import ( <% end -%> ) -type ContainerOperationWaiter struct { - Service *container.Service - Context context.Context - Op *container.Operation - Project string - Location string - UserProjectOverride bool -} - -func (w *ContainerOperationWaiter) State() string { - if w == nil || w.Op == nil { - return "" - } - return w.Op.Status -} - -func (w *ContainerOperationWaiter) Error() error { - if w == nil || w.Op == nil { - return nil - } - - // Error gets called during operation polling to see if there is an error. - // Since container's operation doesn't have an "error" field, we must wait - // until it's done and check the status message - for _, pending := range w.PendingStates() { - if w.Op.Status == pending { - return nil - } - } - - if w.Op.StatusMessage != "" { - return fmt.Errorf(w.Op.StatusMessage) - } - - return nil -} - -func (w *ContainerOperationWaiter) IsRetryable(error) bool { - return false -} - -func (w *ContainerOperationWaiter) SetOp(op interface{}) error { - var ok bool - w.Op, ok = op.(*container.Operation) - if !ok { - return fmt.Errorf("Unable to set operation. Bad type!") - } - return nil -} - -func (w *ContainerOperationWaiter) QueryOp() (interface{}, error) { - if w == nil || w.Op == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - name := fmt.Sprintf("projects/%s/locations/%s/operations/%s", - w.Project, w.Location, w.Op.Name) - - var op *container.Operation - select { - case <-w.Context.Done(): - log.Println("[WARN] request has been cancelled early") - return op, errors.New("unable to finish polling, context has been cancelled") - default: - // default must be here to keep the previous case from blocking - } - err := transport_tpg.RetryTimeDuration(func() (opErr error) { - opGetCall := w.Service.Projects.Locations.Operations.Get(name) - if w.UserProjectOverride { - opGetCall.Header().Add("X-Goog-User-Project", w.Project) - } - op, opErr = opGetCall.Do() - return opErr - }, transport_tpg.DefaultRequestTimeout) - - return op, err -} - -func (w *ContainerOperationWaiter) OpName() string { - if w == nil || w.Op == nil { - return "" - } - return w.Op.Name -} - -func (w *ContainerOperationWaiter) PendingStates() []string { - return []string{"PENDING", "RUNNING"} -} - -func (w *ContainerOperationWaiter) TargetStates() []string { - return []string{"DONE"} -} - +// Deprecated: For backward compatibility ContainerOperationWait is still working, +// but all new code should use ContainerOperationWait in the tpgcontainer package instead. func ContainerOperationWait(config *transport_tpg.Config, op *container.Operation, project, location, activity, userAgent string, timeout time.Duration) error { - w := &ContainerOperationWaiter{ - Service: config.NewContainerClient(userAgent), - Context: config.Context, - Op: op, - Project: project, - Location: location, - UserProjectOverride: config.UserProjectOverride, - } - - if err := w.SetOp(op); err != nil { - return err - } - - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return tpgcontainer.ContainerOperationWait(config, op, project, location, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/dataproc_cluster_operation.go b/mmv1/third_party/terraform/utils/dataproc_cluster_operation.go new file mode 100644 index 000000000000..271a5e89878a --- /dev/null +++ b/mmv1/third_party/terraform/utils/dataproc_cluster_operation.go @@ -0,0 +1,16 @@ +package google + +import ( + "time" + + tpgdataproc "github.com/hashicorp/terraform-provider-google/google/services/dataproc" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "google.golang.org/api/dataproc/v1" +) + +// Deprecated: For backward compatibility dataprocClusterOperationWait is still working, +// but all new code should use DataprocClusterOperationWait in the tpgdataproc package instead. +func dataprocClusterOperationWait(config *transport_tpg.Config, op *dataproc.Operation, activity, userAgent string, timeout time.Duration) error { + return tpgdataproc.DataprocClusterOperationWait(config, op, activity, userAgent, timeout) +} diff --git a/mmv1/third_party/terraform/utils/dataproc_job_operation.go b/mmv1/third_party/terraform/utils/dataproc_job_operation.go new file mode 100644 index 000000000000..8c492dd5ac67 --- /dev/null +++ b/mmv1/third_party/terraform/utils/dataproc_job_operation.go @@ -0,0 +1,20 @@ +package google + +import ( + "time" + + "github.com/hashicorp/terraform-provider-google/google/services/dataproc" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// Deprecated: For backward compatibility dataprocJobOperationWait is still working, +// but all new code should use DataprocJobOperationWait in the dataproc package instead. +func dataprocJobOperationWait(config *transport_tpg.Config, region, projectId, jobId, activity, userAgent string, timeout time.Duration) error { + return dataproc.DataprocJobOperationWait(config, region, projectId, jobId, activity, userAgent, timeout) +} + +// Deprecated: For backward compatibility dataprocDeleteOperationWait is still working, +// but all new code should use DataprocDeleteOperationWait in the dataproc package instead. +func dataprocDeleteOperationWait(config *transport_tpg.Config, region, projectId, jobId, activity, userAgent string, timeout time.Duration) error { + return dataproc.DataprocDeleteOperationWait(config, region, projectId, jobId, activity, userAgent, timeout) +} diff --git a/mmv1/third_party/terraform/utils/datastream_operation.go b/mmv1/third_party/terraform/utils/datastream_operation.go index 5c7f842b651f..eec8a44467d9 100644 --- a/mmv1/third_party/terraform/utils/datastream_operation.go +++ b/mmv1/third_party/terraform/utils/datastream_operation.go @@ -1,139 +1,22 @@ package google import ( - "bytes" - "encoding/json" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/datastream" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - - datastream "google.golang.org/api/datastream/v1" ) -type DatastreamOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - Op datastream.Operation - tpgresource.CommonOperationWaiter -} - -func (w *DatastreamOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - // Returns the proper get. - url := fmt.Sprintf("%s%s", w.Config.DatastreamBasePath, w.Op.Name) - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func (w *DatastreamOperationWaiter) Error() error { - if w != nil && w.Op.Error != nil { - return &DatastreamOperationError{Op: w.Op} - } - return nil -} - -func (w *DatastreamOperationWaiter) SetOp(op interface{}) error { - w.CommonOperationWaiter.SetOp(op) - if err := tpgresource.Convert(op, &w.Op); err != nil { - return err - } - return nil -} - -func createDatastreamWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*DatastreamOperationWaiter, error) { - w := &DatastreamOperationWaiter{ - Config: config, - UserAgent: userAgent, - Project: project, - } - if err := w.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - // nolint: deadcode,unused +// +// Deprecated: For backward compatibility DatastreamOperationWaitTimeWithResponse is still working, +// but all new code should use DatastreamOperationWaitTimeWithResponse in the datastream package instead. func DatastreamOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - w, err := createDatastreamWaiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.Op.Response), response) + return datastream.DatastreamOperationWaitTimeWithResponse(config, op, response, project, activity, userAgent, timeout) } +// Deprecated: For backward compatibility DatastreamOperationWaitTime is still working, +// but all new code should use DatastreamOperationWaitTime in the datastream package instead. func DatastreamOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := createDatastreamWaiter(config, op, project, activity, userAgent) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) -} - -// DatastreamOperationError wraps datastream.Status and implements the -// error interface so it can be returned. -type DatastreamOperationError struct { - Op datastream.Operation -} - -func (e DatastreamOperationError) Error() string { - var buf bytes.Buffer - - for _, err := range e.Op.Error.Details { - buf.Write(err) - buf.WriteString("\n") - } - if validations := e.extractFailedValidationResult(); validations != nil { - buf.Write(validations) - buf.WriteString("\n") - } - - return buf.String() -} - -// extractFailedValidationResult extracts the internal failed validations -// if there are any. -func (e DatastreamOperationError) extractFailedValidationResult() []byte { - var metadata datastream.OperationMetadata - data, err := e.Op.Metadata.MarshalJSON() - if err != nil { - return nil - } - err = json.Unmarshal(data, &metadata) - if err != nil { - return nil - } - if metadata.ValidationResult == nil { - return nil - } - var res []byte - for _, v := range metadata.ValidationResult.Validations { - if v.State == "FAILED" { - data, err := v.MarshalJSON() - if err != nil { - return nil - } - res = append(res, data...) - res = append(res, []byte("\n")...) - } - } - return res + return datastream.DatastreamOperationWaitTime(config, op, project, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/deployment_manager_operation.go.erb b/mmv1/third_party/terraform/utils/deployment_manager_operation.go.erb index f33c24a7ff2b..875b0f522f69 100644 --- a/mmv1/third_party/terraform/utils/deployment_manager_operation.go.erb +++ b/mmv1/third_party/terraform/utils/deployment_manager_operation.go.erb @@ -2,103 +2,14 @@ package google import ( - "bytes" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/deploymentmanager" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - -<% if version == "ga" -%> - "google.golang.org/api/compute/v1" -<% else -%> - compute "google.golang.org/api/compute/v0.beta" -<% end -%> ) -type DeploymentManagerOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - OperationUrl string - ComputeOperationWaiter -} - -func (w *DeploymentManagerOperationWaiter) IsRetryable(error) bool { - return false -} - -func (w *DeploymentManagerOperationWaiter) QueryOp() (interface{}, error) { - if w == nil || w.Op == nil || w.Op.SelfLink == "" { - return nil, fmt.Errorf("cannot query unset/nil operation") - } - - resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: w.Op.SelfLink, - UserAgent: w.UserAgent, - }) - if err != nil { - return nil, err - } - op := &compute.Operation{} - if err := tpgresource.Convert(resp, op); err != nil { - return nil, fmt.Errorf("could not convert response to operation: %v", err) - } - return op, nil -} - - +// Deprecated: For backward compatibility DeploymentManagerOperationWaitTime is still working, +// but all new code should use DeploymentManagerOperationWaitTime in the deploymentmanager package instead. func DeploymentManagerOperationWaitTime(config *transport_tpg.Config, resp interface{}, project, activity, userAgent string, timeout time.Duration) error { - op := &compute.Operation{} - err := tpgresource.Convert(resp, op) - if err != nil { - return err - } - - w := &DeploymentManagerOperationWaiter{ - Config: config, - UserAgent: userAgent, - OperationUrl: op.SelfLink, - ComputeOperationWaiter: ComputeOperationWaiter{ - Project: project, - }, - } - if err := w.SetOp(op); err != nil { - return err - } - - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) -} - -func (w *DeploymentManagerOperationWaiter) Error() error { - if w != nil && w.Op != nil && w.Op.Error != nil { - return DeploymentManagerOperationError{ - HTTPStatusCode: w.Op.HttpErrorStatusCode, - HTTPMessage: w.Op.HttpErrorMessage, - OperationError: *w.Op.Error, - } - } - return nil -} - -// DeploymentManagerOperationError wraps information from the compute.Operation -// in an implementation of Error. -type DeploymentManagerOperationError struct { - HTTPStatusCode int64 - HTTPMessage string - compute.OperationError -} - -func (e DeploymentManagerOperationError) Error() string { - var buf bytes.Buffer - buf.WriteString("Deployment Manager returned errors for this operation, likely due to invalid configuration.") - buf.WriteString(fmt.Sprintf("Operation failed with HTTP error %d: %s.", e.HTTPStatusCode, e.HTTPMessage)) - buf.WriteString("Errors returned: \n") - for _, err := range e.Errors { - buf.WriteString(err.Message + "\n") - } - return buf.String() + return deploymentmanager.DeploymentManagerOperationWaitTime(config, resp, project, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/dialogflow_cx_operation.go b/mmv1/third_party/terraform/utils/dialogflow_cx_operation.go index 906362459b03..e190aacae00e 100644 --- a/mmv1/third_party/terraform/utils/dialogflow_cx_operation.go +++ b/mmv1/third_party/terraform/utils/dialogflow_cx_operation.go @@ -1,69 +1,22 @@ package google import ( - "encoding/json" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/dialogflowcx" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -type DialogflowCXOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - tpgresource.CommonOperationWaiter - Location string -} - -func (w *DialogflowCXOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - // Returns the proper get. - url := fmt.Sprintf("https://%s-dialogflow.googleapis.com/v3/%s", w.Location, w.CommonOperationWaiter.Op.Name) - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func createDialogflowCXWaiter(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent, location string) (*DialogflowCXOperationWaiter, error) { - w := &DialogflowCXOperationWaiter{ - Config: config, - UserAgent: userAgent, - Location: location, - } - if err := w.CommonOperationWaiter.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - // nolint: deadcode,unused +// +// Deprecated: For backward compatibility DialogflowCXOperationWaitTimeWithResponse is still working, +// but all new code should use DialogflowCXOperationWaitTimeWithResponse in the dialogflowcx package instead. func DialogflowCXOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, activity, userAgent, location string, timeout time.Duration) error { - w, err := createDialogflowCXWaiter(config, op, activity, userAgent, location) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) + return dialogflowcx.DialogflowCXOperationWaitTimeWithResponse(config, op, response, activity, userAgent, location, timeout) } +// Deprecated: For backward compatibility DialogflowCXOperationWaitTime is still working, +// but all new code should use DialogflowCXOperationWaitTime in the dialogflowcx package instead. func DialogflowCXOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent, location string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := createDialogflowCXWaiter(config, op, activity, userAgent, location) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return dialogflowcx.DialogflowCXOperationWaitTime(config, op, activity, userAgent, location, timeout) } diff --git a/mmv1/third_party/terraform/utils/error_retry_predicates.go b/mmv1/third_party/terraform/utils/error_retry_predicates.go index 5ee01d7a39fe..4855d5cafe0e 100644 --- a/mmv1/third_party/terraform/utils/error_retry_predicates.go +++ b/mmv1/third_party/terraform/utils/error_retry_predicates.go @@ -157,3 +157,7 @@ func IamServiceAccountNotFound(err error) (bool, string) { func IsApigeeRetryableError(err error) (bool, string) { return transport_tpg.IsApigeeRetryableError(err) } + +func IsSwgAutogenRouterRetryable(err error) (bool, string) { + return transport_tpg.IsSwgAutogenRouterRetryable(err) +} diff --git a/mmv1/third_party/terraform/utils/extract.go b/mmv1/third_party/terraform/utils/extract.go index 2bd37d518bb2..649b59f518b7 100644 --- a/mmv1/third_party/terraform/utils/extract.go +++ b/mmv1/third_party/terraform/utils/extract.go @@ -1,35 +1,11 @@ package google import ( - "fmt" - "regexp" + "github.com/hashicorp/terraform-provider-google/google/services/logging" ) // ExtractFieldByPattern returns the value of a field extracted from a parent field according to the given regular expression pattern. // An error is returned if the field already has a value different than the value extracted. func ExtractFieldByPattern(fieldName, fieldValue, parentFieldValue, pattern string) (string, error) { - var extractedValue string - // Fetch value from container if the container exists. - if parentFieldValue != "" { - r := regexp.MustCompile(pattern) - m := r.FindStringSubmatch(parentFieldValue) - if m != nil && len(m) >= 2 { - extractedValue = m[1] - } else if fieldValue == "" { - // The pattern didn't match and the value doesn't exist. - return "", fmt.Errorf("parent of %q has no matching values from pattern %q in value %q", fieldName, pattern, parentFieldValue) - } - } - - // If both values exist and are different, error - if fieldValue != "" && extractedValue != "" && fieldValue != extractedValue { - return "", fmt.Errorf("%q has conflicting values of %q (from parent) and %q (from self)", fieldName, extractedValue, fieldValue) - } - - // If value does not exist, use the value in container. - if fieldValue == "" { - return extractedValue, nil - } - - return fieldValue, nil + return logging.ExtractFieldByPattern(fieldName, fieldValue, parentFieldValue, pattern) } diff --git a/mmv1/third_party/terraform/utils/gkeonprem_operation.go.erb b/mmv1/third_party/terraform/utils/gkeonprem_operation.go.erb index e122efcb3078..41ea24fa6446 100644 --- a/mmv1/third_party/terraform/utils/gkeonprem_operation.go.erb +++ b/mmv1/third_party/terraform/utils/gkeonprem_operation.go.erb @@ -2,146 +2,23 @@ package google <% unless version == 'ga' -%> import ( - "encoding/json" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/gkeonprem" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - - cloudresourcemanager "google.golang.org/api/cloudresourcemanager/v1" ) -type gkeonpremOpError struct { - *cloudresourcemanager.Status -} - -func (e gkeonpremOpError) Error() string { - var validationCheck map[string]interface{} - - for _, msg := range e.Details { - detail := make(map[string]interface{}) - if err := json.Unmarshal(msg, &detail); err != nil { - continue - } - - if _, ok := detail["validationCheck"]; ok { - delete(detail, "@type") - validationCheck = detail - } - } - - if validationCheck != nil { - bytes, err := json.MarshalIndent(validationCheck, "", " ") - if err != nil { - return fmt.Sprintf("Error code %v message: %s validation check: %s", e.Code, e.Message, validationCheck) - } - - return fmt.Sprintf("Error code %v message: %s\n %s", e.Code, e.Message, bytes) - } - - return fmt.Sprintf("Error code %v, message: %s", e.Code, e.Message) -} - -type gkeonpremOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - Op tpgresource.CommonOperation -} - -func (w *gkeonpremOperationWaiter) State() string { - if w == nil { - return fmt.Sprintf("Operation is nil!") - } - - return fmt.Sprintf("done: %v", w.Op.Done) -} - -func (w *gkeonpremOperationWaiter) Error() error { - if w != nil && w.Op.Error != nil { - return &gkeonpremOpError{w.Op.Error} - } - return nil -} - -func (w *gkeonpremOperationWaiter) IsRetryable(error) bool { - return false -} - -func (w *gkeonpremOperationWaiter) SetOp(op interface{}) error { - if err := tpgresource.Convert(op, &w.Op); err != nil { - return err - } - return nil -} - -func (w *gkeonpremOperationWaiter) OpName() string { - if w == nil { - return "" - } - - return w.Op.Name -} - -func (w *gkeonpremOperationWaiter) PendingStates() []string { - return []string{"done: false"} -} - -func (w *gkeonpremOperationWaiter) TargetStates() []string { - return []string{"done: true"} -} - -func (w *gkeonpremOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - // Returns the proper get. - url := fmt.Sprintf("%s%s", w.Config.GkeonpremBasePath, w.Op.Name) - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func creategkeonpremWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*gkeonpremOperationWaiter, error) { - w := &gkeonpremOperationWaiter{ - Config: config, - UserAgent: userAgent, - Project: project, - } - if err := w.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - // nolint: deadcode,unused +// +// Deprecated: For backward compatibility GkeonpremOperationWaitTimeWithResponse is still working, +// but all new code should use GkeonpremOperationWaitTimeWithResponse in the gkeonprem package instead. func GkeonpremOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - w, err := creategkeonpremWaiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.Op.Response), response) + return gkeonprem.GkeonpremOperationWaitTimeWithResponse(config, op, response, project, activity, userAgent, timeout) } +// Deprecated: For backward compatibility GkeonpremOperationWaitTime is still working, +// but all new code should use GkeonpremOperationWaitTime in the gkeonprem package instead. func GkeonpremOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := creategkeonpremWaiter(config, op, project, activity, userAgent) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return gkeonprem.GkeonpremOperationWaitTime(config, op, project, activity, userAgent, timeout) } <% end -%> diff --git a/mmv1/third_party/terraform/utils/iam_pubsub_subscription.go b/mmv1/third_party/terraform/utils/iam_pubsub_subscription.go index 9849b683b499..abae81fc22c7 100644 --- a/mmv1/third_party/terraform/utils/iam_pubsub_subscription.go +++ b/mmv1/third_party/terraform/utils/iam_pubsub_subscription.go @@ -5,6 +5,7 @@ import ( "github.com/hashicorp/errwrap" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + tpgpubsub "github.com/hashicorp/terraform-provider-google/google/services/pubsub" "github.com/hashicorp/terraform-provider-google/google/tpgiamresource" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -39,7 +40,7 @@ func NewPubsubSubscriptionIamUpdater(d tpgresource.TerraformResourceData, config return nil, err } - subscription := getComputedSubscriptionName(project, d.Get("subscription").(string)) + subscription := tpgpubsub.GetComputedSubscriptionName(project, d.Get("subscription").(string)) return &PubsubSubscriptionIamUpdater{ subscription: subscription, diff --git a/mmv1/third_party/terraform/utils/image.go b/mmv1/third_party/terraform/utils/image.go index 2bb997ead128..cf1bd3aa023f 100644 --- a/mmv1/third_party/terraform/utils/image.go +++ b/mmv1/third_party/terraform/utils/image.go @@ -1,80 +1,13 @@ package google import ( - "fmt" - "regexp" - "strings" - - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/compute" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "github.com/hashicorp/terraform-provider-google/google/verify" - - "google.golang.org/api/googleapi" -) - -const ( - resolveImageFamilyRegex = "[-_a-zA-Z0-9]*" - resolveImageImageRegex = "[-_a-zA-Z0-9]*" -) - -var ( - resolveImageProjectImage = regexp.MustCompile(fmt.Sprintf("projects/(%s)/global/images/(%s)$", verify.ProjectRegex, resolveImageImageRegex)) - resolveImageProjectFamily = regexp.MustCompile(fmt.Sprintf("projects/(%s)/global/images/family/(%s)$", verify.ProjectRegex, resolveImageFamilyRegex)) - resolveImageGlobalImage = regexp.MustCompile(fmt.Sprintf("^global/images/(%s)$", resolveImageImageRegex)) - resolveImageGlobalFamily = regexp.MustCompile(fmt.Sprintf("^global/images/family/(%s)$", resolveImageFamilyRegex)) - resolveImageFamilyFamily = regexp.MustCompile(fmt.Sprintf("^family/(%s)$", resolveImageFamilyRegex)) - resolveImageProjectImageShorthand = regexp.MustCompile(fmt.Sprintf("^(%s)/(%s)$", verify.ProjectRegex, resolveImageImageRegex)) - resolveImageProjectFamilyShorthand = regexp.MustCompile(fmt.Sprintf("^(%s)/(%s)$", verify.ProjectRegex, resolveImageFamilyRegex)) - resolveImageFamily = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageFamilyRegex)) - resolveImageImage = regexp.MustCompile(fmt.Sprintf("^(%s)$", resolveImageImageRegex)) - resolveImageLink = regexp.MustCompile(fmt.Sprintf("^https://www.googleapis.com/compute/[a-z0-9]+/projects/(%s)/global/images/(%s)", verify.ProjectRegex, resolveImageImageRegex)) - - windowsSqlImage = regexp.MustCompile("^sql-(?:server-)?([0-9]{4})-([a-z]+)-windows-(?:server-)?([0-9]{4})(?:-r([0-9]+))?-dc-v[0-9]+$") - canonicalUbuntuLtsImage = regexp.MustCompile("^ubuntu-(minimal-)?([0-9]+)(?:.*(arm64))?.*$") - cosLtsImage = regexp.MustCompile("^cos-([0-9]+)-") ) // built-in projects to look for images/families containing the string // on the left in -var imageMap = map[string]string{ - "centos": "centos-cloud", - "coreos": "coreos-cloud", - "debian": "debian-cloud", - "opensuse": "opensuse-cloud", - "rhel": "rhel-cloud", - "rocky-linux": "rocky-linux-cloud", - "sles": "suse-cloud", - "ubuntu": "ubuntu-os-cloud", - "windows": "windows-cloud", - "windows-sql": "windows-sql-cloud", -} - -func resolveImageImageExists(c *transport_tpg.Config, project, name, userAgent string) (bool, error) { - if _, err := c.NewComputeClient(userAgent).Images.Get(project, name).Do(); err == nil { - return true, nil - } else if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { - return false, nil - } else { - return false, fmt.Errorf("Error checking if image %s exists: %s", name, err) - } -} - -func resolveImageFamilyExists(c *transport_tpg.Config, project, name, userAgent string) (bool, error) { - if _, err := c.NewComputeClient(userAgent).Images.GetFromFamily(project, name).Do(); err == nil { - return true, nil - } else if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { - return false, nil - } else { - return false, fmt.Errorf("Error checking if family %s exists: %s", name, err) - } -} - -func sanityTestRegexMatches(expected int, got []string, regexType, name string) error { - if len(got)-1 != expected { // subtract one, index zero is the entire matched expression - return fmt.Errorf("Expected %d %s regex matches, got %d for %s", expected, regexType, len(got)-1, name) - } - return nil -} +var imageMap = compute.ImageMap // If the given name is a URL, return it. // If it's in the form projects/{project}/global/images/{image}, return it @@ -94,154 +27,9 @@ func sanityTestRegexMatches(expected int, got []string, regexType, name string) // If not, check if it could be a GCP-provided image, and if it exists. If it does, return it as projects/{project}/global/images/{image}. // If not, check if it's a family in the current project. If it is, return it as global/images/family/{family}. // If not, check if it could be a GCP-provided family, and if it exists. If it does, return it as projects/{project}/global/images/family/{family} +// +// Deprecated: For backward compatibility resolveImage is still working, +// but all new code should use ResolveImage in the compute package instead. func resolveImage(c *transport_tpg.Config, project, name, userAgent string) (string, error) { - var builtInProject string - for k, v := range imageMap { - if strings.Contains(name, k) { - builtInProject = v - break - } - } - switch { - case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz - return name, nil - case resolveImageProjectImage.MatchString(name): // projects/xyz/global/images/xyz - res := resolveImageProjectImage.FindStringSubmatch(name) - if err := sanityTestRegexMatches(2, res, "project image", name); err != nil { - return "", err - } - return fmt.Sprintf("projects/%s/global/images/%s", res[1], res[2]), nil - case resolveImageProjectFamily.MatchString(name): // projects/xyz/global/images/family/xyz - res := resolveImageProjectFamily.FindStringSubmatch(name) - if err := sanityTestRegexMatches(2, res, "project family", name); err != nil { - return "", err - } - return fmt.Sprintf("projects/%s/global/images/family/%s", res[1], res[2]), nil - case resolveImageGlobalImage.MatchString(name): // global/images/xyz - res := resolveImageGlobalImage.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "global image", name); err != nil { - return "", err - } - return fmt.Sprintf("global/images/%s", res[1]), nil - case resolveImageGlobalFamily.MatchString(name): // global/images/family/xyz - res := resolveImageGlobalFamily.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "global family", name); err != nil { - return "", err - } - return fmt.Sprintf("global/images/family/%s", res[1]), nil - case resolveImageFamilyFamily.MatchString(name): // family/xyz - res := resolveImageFamilyFamily.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "family family", name); err != nil { - return "", err - } - if ok, err := resolveImageFamilyExists(c, project, res[1], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("global/images/family/%s", res[1]), nil - } - if builtInProject != "" { - if ok, err := resolveImageFamilyExists(c, builtInProject, res[1], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("projects/%s/global/images/family/%s", builtInProject, res[1]), nil - } - } - case resolveImageProjectImageShorthand.MatchString(name): // xyz/xyz - res := resolveImageProjectImageShorthand.FindStringSubmatch(name) - if err := sanityTestRegexMatches(2, res, "project image shorthand", name); err != nil { - return "", err - } - if ok, err := resolveImageImageExists(c, res[1], res[2], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("projects/%s/global/images/%s", res[1], res[2]), nil - } - fallthrough // check if it's a family - case resolveImageProjectFamilyShorthand.MatchString(name): // xyz/xyz - res := resolveImageProjectFamilyShorthand.FindStringSubmatch(name) - if err := sanityTestRegexMatches(2, res, "project family shorthand", name); err != nil { - return "", err - } - if ok, err := resolveImageFamilyExists(c, res[1], res[2], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("projects/%s/global/images/family/%s", res[1], res[2]), nil - } - case resolveImageImage.MatchString(name): // xyz - res := resolveImageImage.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "image", name); err != nil { - return "", err - } - if ok, err := resolveImageImageExists(c, project, res[1], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("global/images/%s", res[1]), nil - } - if builtInProject != "" { - // check the images GCP provides - if ok, err := resolveImageImageExists(c, builtInProject, res[1], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("projects/%s/global/images/%s", builtInProject, res[1]), nil - } - } - fallthrough // check if the name is a family, instead of an image - case resolveImageFamily.MatchString(name): // xyz - res := resolveImageFamily.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "family", name); err != nil { - return "", err - } - if ok, err := resolveImageFamilyExists(c, c.Project, res[1], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("global/images/family/%s", res[1]), nil - } - if builtInProject != "" { - // check the families GCP provides - if ok, err := resolveImageFamilyExists(c, builtInProject, res[1], userAgent); err != nil { - return "", err - } else if ok { - return fmt.Sprintf("projects/%s/global/images/family/%s", builtInProject, res[1]), nil - } - } - } - return "", fmt.Errorf("Could not find image or family %s", name) -} - -// resolveImageRefToRelativeURI takes the output of resolveImage and coerces it -// into a relative URI. In the event that a global/images/IMAGE or -// global/images/family/FAMILY reference is returned from resolveImage, -// providerProject will be used as the project for the self_link. -func resolveImageRefToRelativeURI(providerProject, name string) (string, error) { - switch { - case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz - namePath, err := tpgresource.GetRelativePath(name) - if err != nil { - return "", err - } - - return namePath, nil - case resolveImageProjectImage.MatchString(name): // projects/xyz/global/images/xyz - return name, nil - case resolveImageProjectFamily.MatchString(name): // projects/xyz/global/images/family/xyz - res := resolveImageProjectFamily.FindStringSubmatch(name) - if err := sanityTestRegexMatches(2, res, "project family", name); err != nil { - return "", err - } - return fmt.Sprintf("projects/%s/global/images/family/%s", res[1], res[2]), nil - case resolveImageGlobalImage.MatchString(name): // global/images/xyz - res := resolveImageGlobalImage.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "global image", name); err != nil { - return "", err - } - return fmt.Sprintf("projects/%s/global/images/%s", providerProject, res[1]), nil - case resolveImageGlobalFamily.MatchString(name): // global/images/family/xyz - res := resolveImageGlobalFamily.FindStringSubmatch(name) - if err := sanityTestRegexMatches(1, res, "global family", name); err != nil { - return "", err - } - return fmt.Sprintf("projects/%s/global/images/family/%s", providerProject, res[1]), nil - } - return "", fmt.Errorf("Could not expand image or family %q into a relative URI", name) - + return compute.ResolveImage(c, project, name, userAgent) } diff --git a/mmv1/third_party/terraform/utils/metadata.go.erb b/mmv1/third_party/terraform/utils/metadata.go.erb index 3720189609a2..9adf91fc7f64 100644 --- a/mmv1/third_party/terraform/utils/metadata.go.erb +++ b/mmv1/third_party/terraform/utils/metadata.go.erb @@ -2,16 +2,13 @@ package google import ( - "errors" - "sort" - <% if version == "ga" -%> "google.golang.org/api/compute/v1" <% else -%> compute "google.golang.org/api/compute/v0.beta" <% end -%> - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -24,140 +21,29 @@ func MetadataRetryWrapper(update func() error) error { // Update the metadata (serverMD) according to the provided diff (oldMDMap v // newMDMap). +// +// Deprecated: For backward compatibility MetadataUpdate is still working, +// but all new code should use PollCheckKnaMetadataUpdatetiveStatusFunc in the tpgcompute package instead. func MetadataUpdate(oldMDMap map[string]interface{}, newMDMap map[string]interface{}, serverMD *compute.Metadata) { - curMDMap := make(map[string]string) - // Load metadata on server into map - for _, kv := range serverMD.Items { - // If the server state has a key that we had in our old - // state, but not in our new state, we should delete it - _, okOld := oldMDMap[kv.Key] - _, okNew := newMDMap[kv.Key] - if okOld && !okNew { - continue - } else { - curMDMap[kv.Key] = *kv.Value - } - } - - // Insert new metadata into existing metadata (overwriting when needed) - for key, val := range newMDMap { - curMDMap[key] = val.(string) - } - - // Reformat old metadata into a list - serverMD.Items = nil - for key, val := range curMDMap { - v := val - serverMD.Items = append(serverMD.Items, &compute.MetadataItems{ - Key: key, - Value: &v, - }) - } + tpgcompute.MetadataUpdate(oldMDMap, newMDMap, serverMD) } // Update the beta metadata (serverMD) according to the provided diff (oldMDMap v // newMDMap). +// +// Deprecated: For backward compatibility BetaMetadataUpdate is still working, +// but all new code should use BetaMetadataUpdate in the tpgcompute package instead. func BetaMetadataUpdate(oldMDMap map[string]interface{}, newMDMap map[string]interface{}, serverMD *compute.Metadata) { - curMDMap := make(map[string]string) - // Load metadata on server into map - for _, kv := range serverMD.Items { - // If the server state has a key that we had in our old - // state, but not in our new state, we should delete it - _, okOld := oldMDMap[kv.Key] - _, okNew := newMDMap[kv.Key] - if okOld && !okNew { - continue - } else { - curMDMap[kv.Key] = *kv.Value - } - } - - // Insert new metadata into existing metadata (overwriting when needed) - for key, val := range newMDMap { - curMDMap[key] = val.(string) - } - - // Reformat old metadata into a list - serverMD.Items = nil - for key, val := range curMDMap { - v := val - serverMD.Items = append(serverMD.Items, &compute.MetadataItems{ - Key: key, - Value: &v, - }) - } -} - -func expandComputeMetadata(m map[string]interface{}) []*compute.MetadataItems { - metadata := make([]*compute.MetadataItems, len(m)) - var keys []string - for key := range m { - keys = append(keys, key) - } - sort.Strings(keys) - // Append new metadata to existing metadata - for _, key := range keys { - v := m[key].(string) - metadata = append(metadata, &compute.MetadataItems{ - Key: key, - Value: &v, - }) - } - - return metadata -} - -func flattenMetadataBeta(metadata *compute.Metadata) map[string]string { - metadataMap := make(map[string]string) - for _, item := range metadata.Items { - metadataMap[item.Key] = *item.Value - } - return metadataMap + tpgcompute.BetaMetadataUpdate(oldMDMap, newMDMap, serverMD) } // This function differs from flattenMetadataBeta only in that it takes // compute.metadata rather than compute.metadata as an argument. It should // be removed in favour of flattenMetadataBeta if/when all resources using it get // beta support. +// +// Deprecated: For backward compatibility flattenMetadata is still working, +// but all new code should use FlattenMetadata in the tpgcompute package instead. func flattenMetadata(metadata *compute.Metadata) map[string]interface{} { - metadataMap := make(map[string]interface{}) - for _, item := range metadata.Items { - metadataMap[item.Key] = *item.Value - } - return metadataMap -} - -func resourceInstanceMetadata(d tpgresource.TerraformResourceData) (*compute.Metadata, error) { - m := &compute.Metadata{} - mdMap := d.Get("metadata").(map[string]interface{}) - if v, ok := d.GetOk("metadata_startup_script"); ok && v.(string) != "" { - if w, ok := mdMap["startup-script"]; ok { - // metadata.startup-script could be from metadata_startup_script in the first place - if v != w { - return nil, errors.New("Cannot provide both metadata_startup_script and metadata.startup-script.") - } - } - mdMap["startup-script"] = v - } - if len(mdMap) > 0 { - m.Items = make([]*compute.MetadataItems, 0, len(mdMap)) - var keys []string - for k := range mdMap { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - v := mdMap[k].(string) - m.Items = append(m.Items, &compute.MetadataItems{ - Key: k, - Value: &v, - }) - } - - // Set the fingerprint. If the metadata has never been set before - // then this will just be blank. - m.Fingerprint = d.Get("metadata_fingerprint").(string) - } - - return m, nil + return tpgcompute.FlattenMetadata(metadata) } diff --git a/mmv1/third_party/terraform/utils/network_security_address_group_operation.go.erb b/mmv1/third_party/terraform/utils/network_security_address_group_operation.go.erb index 89ec25caa0f3..aa4b6aef5e9b 100644 --- a/mmv1/third_party/terraform/utils/network_security_address_group_operation.go.erb +++ b/mmv1/third_party/terraform/utils/network_security_address_group_operation.go.erb @@ -5,12 +5,13 @@ package google import ( "time" + "github.com/hashicorp/terraform-provider-google/google/services/networksecurity" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) // NetworkSecurityAddressGroupOperationWaitTime is specific for address group resource because the only difference is that it does not need project param. func NetworkSecurityAddressGroupOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent string, timeout time.Duration) error { // project is not necessary for this operation. - return NetworkSecurityOperationWaitTime(config, op, "", activity, userAgent, timeout) + return networksecurity.NetworkSecurityAddressGroupOperationWaitTime(config, op, activity, userAgent, timeout) } <% end -%> diff --git a/mmv1/third_party/terraform/utils/provider.go.erb b/mmv1/third_party/terraform/utils/provider.go.erb index d668b2df9e1f..6839fba10503 100644 --- a/mmv1/third_party/terraform/utils/provider.go.erb +++ b/mmv1/third_party/terraform/utils/provider.go.erb @@ -11,9 +11,24 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/version" - <% unless version == 'ga' -%> - "github.com/hashicorp/terraform-provider-google/google/services/runtimeconfig" +<% products.each do |product| + product_definition = product[:definitions] + if version == 'ga' + some_resource_in_ga = false + product_definition.objects.each do |object| + if !object.exclude && !object.not_in_version?(product_definition.version_obj_or_closest(version)) + some_resource_in_ga = true + break + end + end + + if some_resource_in_ga -%> + "github.com/hashicorp/terraform-provider-google/google/services/<%= product[:definitions].name.downcase -%>" <% end -%> +<% else -%> + "github.com/hashicorp/terraform-provider-google/google/services/<%= product[:definitions].name.downcase -%>" + <% end -%> +<% end -%> "github.com/hashicorp/terraform-provider-google/google/tpgiamresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -191,61 +206,61 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { "google_active_folder": DataSourceGoogleActiveFolder(), "google_alloydb_locations": DataSourceAlloydbLocations(), "google_alloydb_supported_database_flags": DataSourceAlloydbSupportedDatabaseFlags(), - "google_artifact_registry_repository": DataSourceArtifactRegistryRepository(), + "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), "google_app_engine_default_service_account": DataSourceGoogleAppEngineDefaultServiceAccount(), - "google_beyondcorp_app_connection": DataSourceGoogleBeyondcorpAppConnection(), - "google_beyondcorp_app_connector": DataSourceGoogleBeyondcorpAppConnector(), - "google_beyondcorp_app_gateway": DataSourceGoogleBeyondcorpAppGateway(), + "google_beyondcorp_app_connection": beyondcorp.DataSourceGoogleBeyondcorpAppConnection(), + "google_beyondcorp_app_connector": beyondcorp.DataSourceGoogleBeyondcorpAppConnector(), + "google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(), "google_billing_account": DataSourceGoogleBillingAccount(), "google_bigquery_default_service_account": DataSourceGoogleBigqueryDefaultServiceAccount(), - "google_cloudbuild_trigger": DataSourceGoogleCloudBuildTrigger(), + "google_cloudbuild_trigger": cloudbuild.DataSourceGoogleCloudBuildTrigger(), "google_cloudfunctions_function": DataSourceGoogleCloudFunctionsFunction(), - "google_cloudfunctions2_function": DataSourceGoogleCloudFunctions2Function(), + "google_cloudfunctions2_function": cloudfunctions2.DataSourceGoogleCloudFunctions2Function(), <% unless version == 'ga' -%> "google_cloud_asset_resources_search_all": DataSourceGoogleCloudAssetResourcesSearchAll(), <% end -%> - "google_cloud_identity_groups": DataSourceGoogleCloudIdentityGroups(), - "google_cloud_identity_group_memberships": DataSourceGoogleCloudIdentityGroupMemberships(), + "google_cloud_identity_groups": cloudidentity.DataSourceGoogleCloudIdentityGroups(), + "google_cloud_identity_group_memberships": cloudidentity.DataSourceGoogleCloudIdentityGroupMemberships(), "google_cloud_run_locations": DataSourceGoogleCloudRunLocations(), - "google_cloud_run_service": DataSourceGoogleCloudRunService(), + "google_cloud_run_service": cloudrun.DataSourceGoogleCloudRunService(), "google_composer_environment": DataSourceGoogleComposerEnvironment(), "google_composer_image_versions": DataSourceGoogleComposerImageVersions(), "google_compute_address": DataSourceGoogleComputeAddress(), "google_compute_addresses": DataSourceGoogleComputeAddresses(), - "google_compute_backend_service": DataSourceGoogleComputeBackendService(), - "google_compute_backend_bucket": DataSourceGoogleComputeBackendBucket(), + "google_compute_backend_service": compute.DataSourceGoogleComputeBackendService(), + "google_compute_backend_bucket": compute.DataSourceGoogleComputeBackendBucket(), "google_compute_default_service_account": DataSourceGoogleComputeDefaultServiceAccount(), - "google_compute_disk": DataSourceGoogleComputeDisk(), - "google_compute_forwarding_rule": DataSourceGoogleComputeForwardingRule(), + "google_compute_disk": compute.DataSourceGoogleComputeDisk(), + "google_compute_forwarding_rule": compute.DataSourceGoogleComputeForwardingRule(), "google_compute_global_address": DataSourceGoogleComputeGlobalAddress(), - "google_compute_global_forwarding_rule": DataSourceGoogleComputeGlobalForwardingRule(), - "google_compute_ha_vpn_gateway": DataSourceGoogleComputeHaVpnGateway(), - "google_compute_health_check": DataSourceGoogleComputeHealthCheck(), + "google_compute_global_forwarding_rule": compute.DataSourceGoogleComputeGlobalForwardingRule(), + "google_compute_ha_vpn_gateway": compute.DataSourceGoogleComputeHaVpnGateway(), + "google_compute_health_check": compute.DataSourceGoogleComputeHealthCheck(), "google_compute_image": DataSourceGoogleComputeImage(), - "google_compute_instance": DataSourceGoogleComputeInstance(), - "google_compute_instance_group": DataSourceGoogleComputeInstanceGroup(), - "google_compute_instance_group_manager": DataSourceGoogleComputeInstanceGroupManager(), + "google_compute_instance": compute.DataSourceGoogleComputeInstance(), + "google_compute_instance_group": compute.DataSourceGoogleComputeInstanceGroup(), + "google_compute_instance_group_manager": compute.DataSourceGoogleComputeInstanceGroupManager(), "google_compute_instance_serial_port": DataSourceGoogleComputeInstanceSerialPort(), - "google_compute_instance_template": DataSourceGoogleComputeInstanceTemplate(), + "google_compute_instance_template": compute.DataSourceGoogleComputeInstanceTemplate(), "google_compute_lb_ip_ranges": DataSourceGoogleComputeLbIpRanges(), "google_compute_network": DataSourceGoogleComputeNetwork(), - "google_compute_network_endpoint_group": DataSourceGoogleComputeNetworkEndpointGroup(), + "google_compute_network_endpoint_group": compute.DataSourceGoogleComputeNetworkEndpointGroup(), "google_compute_network_peering": DataSourceComputeNetworkPeering(), "google_compute_node_types": DataSourceGoogleComputeNodeTypes(), "google_compute_regions": DataSourceGoogleComputeRegions(), - "google_compute_region_network_endpoint_group": DataSourceGoogleComputeRegionNetworkEndpointGroup(), - "google_compute_region_instance_group": DataSourceGoogleComputeRegionInstanceGroup(), + "google_compute_region_network_endpoint_group": compute.DataSourceGoogleComputeRegionNetworkEndpointGroup(), + "google_compute_region_instance_group": compute.DataSourceGoogleComputeRegionInstanceGroup(), <% unless version == 'ga' -%> - "google_compute_region_instance_template": DataSourceGoogleComputeRegionInstanceTemplate(), + "google_compute_region_instance_template": compute.DataSourceGoogleComputeRegionInstanceTemplate(), <% end -%> - "google_compute_region_ssl_certificate": DataSourceGoogleRegionComputeSslCertificate(), - "google_compute_resource_policy": DataSourceGoogleComputeResourcePolicy(), - "google_compute_router": DataSourceGoogleComputeRouter(), - "google_compute_router_nat": DataSourceGoogleComputeRouterNat(), - "google_compute_router_status": DataSourceGoogleComputeRouterStatus(), - "google_compute_snapshot": DataSourceGoogleComputeSnapshot(), - "google_compute_ssl_certificate": DataSourceGoogleComputeSslCertificate(), - "google_compute_ssl_policy": DataSourceGoogleComputeSslPolicy(), + "google_compute_region_ssl_certificate": compute.DataSourceGoogleRegionComputeSslCertificate(), + "google_compute_resource_policy": compute.DataSourceGoogleComputeResourcePolicy(), + "google_compute_router": compute.DataSourceGoogleComputeRouter(), + "google_compute_router_nat": compute.DataSourceGoogleComputeRouterNat(), + "google_compute_router_status": compute.DataSourceGoogleComputeRouterStatus(), + "google_compute_snapshot": compute.DataSourceGoogleComputeSnapshot(), + "google_compute_ssl_certificate": compute.DataSourceGoogleComputeSslCertificate(), + "google_compute_ssl_policy": compute.DataSourceGoogleComputeSslPolicy(), "google_compute_subnetwork": DataSourceGoogleComputeSubnetwork(), "google_compute_vpn_gateway": DataSourceGoogleComputeVpnGateway(), "google_compute_zones": DataSourceGoogleComputeZones(), @@ -257,54 +272,54 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { "google_container_engine_versions": DataSourceGoogleContainerEngineVersions(), "google_container_registry_image": DataSourceGoogleContainerImage(), "google_container_registry_repository": DataSourceGoogleContainerRepo(), - "google_dataproc_metastore_service": DataSourceDataprocMetastoreService(), + "google_dataproc_metastore_service": dataprocmetastore.DataSourceDataprocMetastoreService(), "google_datastream_static_ips": DataSourceGoogleDatastreamStaticIps(), - "google_game_services_game_server_deployment_rollout": DataSourceGameServicesGameServerDeploymentRollout(), + "google_game_services_game_server_deployment_rollout": gameservices.DataSourceGameServicesGameServerDeploymentRollout(), "google_iam_policy": DataSourceGoogleIamPolicy(), "google_iam_role": DataSourceGoogleIamRole(), "google_iam_testable_permissions": DataSourceGoogleIamTestablePermissions(), <% unless version == 'ga' -%> - "google_iam_workload_identity_pool": DataSourceIAMBetaWorkloadIdentityPool(), - "google_iam_workload_identity_pool_provider": DataSourceIAMBetaWorkloadIdentityPoolProvider(), + "google_iam_workload_identity_pool": iambeta.DataSourceIAMBetaWorkloadIdentityPool(), + "google_iam_workload_identity_pool_provider": iambeta.DataSourceIAMBetaWorkloadIdentityPoolProvider(), <% end -%> - "google_iap_client": DataSourceGoogleIapClient(), - "google_kms_crypto_key": DataSourceGoogleKmsCryptoKey(), - "google_kms_crypto_key_version": DataSourceGoogleKmsCryptoKeyVersion(), - "google_kms_key_ring": DataSourceGoogleKmsKeyRing(), - "google_kms_secret": DataSourceGoogleKmsSecret(), - "google_kms_secret_ciphertext": DataSourceGoogleKmsSecretCiphertext(), + "google_iap_client": iap.DataSourceGoogleIapClient(), + "google_kms_crypto_key": kms.DataSourceGoogleKmsCryptoKey(), + "google_kms_crypto_key_version": kms.DataSourceGoogleKmsCryptoKeyVersion(), + "google_kms_key_ring": kms.DataSourceGoogleKmsKeyRing(), + "google_kms_secret": kms.DataSourceGoogleKmsSecret(), + "google_kms_secret_ciphertext": kms.DataSourceGoogleKmsSecretCiphertext(), <% unless version == 'ga' -%> "google_kms_secret_asymmetric": DataSourceGoogleKmsSecretAsymmetric(), - "google_firebase_android_app": DataSourceGoogleFirebaseAndroidApp(), - "google_firebase_apple_app": DataSourceGoogleFirebaseAppleApp(), - "google_firebase_hosting_channel": DataSourceGoogleFirebaseHostingChannel(), - "google_firebase_web_app": DataSourceGoogleFirebaseWebApp(), + "google_firebase_android_app": firebase.DataSourceGoogleFirebaseAndroidApp(), + "google_firebase_apple_app": firebase.DataSourceGoogleFirebaseAppleApp(), + "google_firebase_hosting_channel": firebasehosting.DataSourceGoogleFirebaseHostingChannel(), + "google_firebase_web_app": firebase.DataSourceGoogleFirebaseWebApp(), <% end -%> "google_folder": DataSourceGoogleFolder(), "google_folders": DataSourceGoogleFolders(), "google_folder_organization_policy": DataSourceGoogleFolderOrganizationPolicy(), "google_logging_project_cmek_settings": DataSourceGoogleLoggingProjectCmekSettings(), "google_logging_sink": DataSourceGoogleLoggingSink(), - "google_monitoring_notification_channel": DataSourceMonitoringNotificationChannel(), - "google_monitoring_cluster_istio_service": DataSourceMonitoringServiceClusterIstio(), - "google_monitoring_istio_canonical_service": DataSourceMonitoringIstioCanonicalService(), - "google_monitoring_mesh_istio_service": DataSourceMonitoringServiceMeshIstio(), - "google_monitoring_app_engine_service": DataSourceMonitoringServiceAppEngine(), + "google_monitoring_notification_channel": monitoring.DataSourceMonitoringNotificationChannel(), + "google_monitoring_cluster_istio_service": monitoring.DataSourceMonitoringServiceClusterIstio(), + "google_monitoring_istio_canonical_service": monitoring.DataSourceMonitoringIstioCanonicalService(), + "google_monitoring_mesh_istio_service": monitoring.DataSourceMonitoringServiceMeshIstio(), + "google_monitoring_app_engine_service": monitoring.DataSourceMonitoringServiceAppEngine(), "google_monitoring_uptime_check_ips": DataSourceGoogleMonitoringUptimeCheckIps(), "google_netblock_ip_ranges": DataSourceGoogleNetblockIpRanges(), "google_organization": DataSourceGoogleOrganization(), - "google_privateca_certificate_authority": DataSourcePrivatecaCertificateAuthority(), + "google_privateca_certificate_authority": privateca.DataSourcePrivatecaCertificateAuthority(), "google_project": DataSourceGoogleProject(), "google_projects": DataSourceGoogleProjects(), "google_project_organization_policy": DataSourceGoogleProjectOrganizationPolicy(), "google_project_service": DataSourceGoogleProjectService(), - "google_pubsub_subscription": DataSourceGooglePubsubSubscription(), - "google_pubsub_topic": DataSourceGooglePubsubTopic(), + "google_pubsub_subscription": pubsub.DataSourceGooglePubsubSubscription(), + "google_pubsub_topic": pubsub.DataSourceGooglePubsubTopic(), <% unless version == 'ga' -%> "google_runtimeconfig_config": runtimeconfig.DataSourceGoogleRuntimeconfigConfig(), "google_runtimeconfig_variable": runtimeconfig.DataSourceGoogleRuntimeconfigVariable(), <% end -%> - "google_secret_manager_secret": DataSourceSecretManagerSecret(), + "google_secret_manager_secret": secretmanager.DataSourceSecretManagerSecret(), "google_secret_manager_secret_version": DataSourceSecretManagerSecretVersion(), "google_secret_manager_secret_version_access": DataSourceSecretManagerSecretVersionAccess(), "google_service_account": DataSourceGoogleServiceAccount(), @@ -312,13 +327,13 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { "google_service_account_id_token": DataSourceGoogleServiceAccountIdToken(), "google_service_account_jwt": DataSourceGoogleServiceAccountJwt(), "google_service_account_key": DataSourceGoogleServiceAccountKey(), - "google_sourcerepo_repository": DataSourceGoogleSourceRepoRepository(), - "google_spanner_instance": DataSourceSpannerInstance(), + "google_sourcerepo_repository": sourcerepo.DataSourceGoogleSourceRepoRepository(), + "google_spanner_instance": spanner.DataSourceSpannerInstance(), "google_sql_ca_certs": DataSourceGoogleSQLCaCerts(), "google_sql_tiers": DataSourceGoogleSQLTiers(), "google_sql_backup_run": DataSourceSqlBackupRun(), - "google_sql_databases": DataSourceSqlDatabases(), - "google_sql_database": DataSourceSqlDatabase(), + "google_sql_databases": sql.DataSourceSqlDatabases(), + "google_sql_database": sql.DataSourceSqlDatabase(), "google_sql_database_instance": DataSourceSqlDatabaseInstance(), "google_sql_database_instances": DataSourceSqlDatabaseInstances(), "google_service_networking_peered_dns_domain": DataSourceGoogleServiceNetworkingPeeredDNSDomain(), @@ -331,9 +346,12 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { "google_tags_tag_key": DataSourceGoogleTagsTagKey(), "google_tags_tag_value": DataSourceGoogleTagsTagValue(), "google_tpu_tensorflow_versions": DataSourceTpuTensorflowVersions(), - "google_vpc_access_connector": DataSourceVPCAccessConnector(), - "google_redis_instance": DataSourceGoogleRedisInstance(), - "google_vertex_ai_index": dataSourceVertexAIIndex(), + "google_vpc_access_connector": vpcaccess.DataSourceVPCAccessConnector(), + "google_redis_instance": redis.DataSourceGoogleRedisInstance(), + "google_vertex_ai_index": vertexai.DataSourceVertexAIIndex(), + <% unless version == 'ga' -%> + "google_vmwareengine_network": vmwareengine.DataSourceVmwareengineNetwork(), + <% end -%> // ####### END datasources ########### // ####### END handwritten datasources ########### }, @@ -342,6 +360,7 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { <% products.each do |product| product_definition = product[:definitions] + service = product_definition.name.downcase config = product[:overrides] sorted = product_definition.objects.sort_by { |obj| obj.name } sorted.each do |object| @@ -355,7 +374,7 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { (iam_policy.min_version && iam_policy.min_version < version) iam_class_name = product_definition.name + object.name -%> - "<%= terraform_name -%>_iam_policy": tpgiamresource.DataSourceIamPolicy(<%= iam_class_name -%>IamSchema, <%= iam_class_name -%>IamUpdaterProducer), + "<%= terraform_name -%>_iam_policy": tpgiamresource.DataSourceIamPolicy(<%= service -%>.<%= iam_class_name -%>IamSchema, <%= service -%>.<%= iam_class_name -%>IamUpdaterProducer), <% end # unless iam_policy.nil? || iam_policy.exclude end # product_definition.objects.each do @@ -372,12 +391,12 @@ func DatasourceMapWithErrors() (map[string]*schema.Resource, error) { "google_dataproc_cluster_iam_policy": tpgiamresource.DataSourceIamPolicy(IamDataprocClusterSchema, NewDataprocClusterUpdater), "google_dataproc_job_iam_policy": tpgiamresource.DataSourceIamPolicy(IamDataprocJobSchema, NewDataprocJobUpdater), "google_folder_iam_policy": tpgiamresource.DataSourceIamPolicy(IamFolderSchema, NewFolderIamUpdater), - "google_healthcare_dataset_iam_policy": tpgiamresource.DataSourceIamPolicy(IamHealthcareDatasetSchema, NewHealthcareDatasetIamUpdater), - "google_healthcare_dicom_store_iam_policy": tpgiamresource.DataSourceIamPolicy(IamHealthcareDicomStoreSchema, NewHealthcareDicomStoreIamUpdater), - "google_healthcare_fhir_store_iam_policy": tpgiamresource.DataSourceIamPolicy(IamHealthcareFhirStoreSchema, NewHealthcareFhirStoreIamUpdater), - "google_healthcare_hl7_v2_store_iam_policy": tpgiamresource.DataSourceIamPolicy(IamHealthcareHl7V2StoreSchema, NewHealthcareHl7V2StoreIamUpdater), - "google_kms_key_ring_iam_policy": tpgiamresource.DataSourceIamPolicy(IamKmsKeyRingSchema, NewKmsKeyRingIamUpdater), - "google_kms_crypto_key_iam_policy": tpgiamresource.DataSourceIamPolicy(IamKmsCryptoKeySchema, NewKmsCryptoKeyIamUpdater), + "google_healthcare_dataset_iam_policy": tpgiamresource.DataSourceIamPolicy(healthcare.IamHealthcareDatasetSchema, healthcare.NewHealthcareDatasetIamUpdater), + "google_healthcare_dicom_store_iam_policy": tpgiamresource.DataSourceIamPolicy(healthcare.IamHealthcareDicomStoreSchema, healthcare.NewHealthcareDicomStoreIamUpdater), + "google_healthcare_fhir_store_iam_policy": tpgiamresource.DataSourceIamPolicy(healthcare.IamHealthcareFhirStoreSchema, healthcare.NewHealthcareFhirStoreIamUpdater), + "google_healthcare_hl7_v2_store_iam_policy": tpgiamresource.DataSourceIamPolicy(healthcare.IamHealthcareHl7V2StoreSchema, healthcare.NewHealthcareHl7V2StoreIamUpdater), + "google_kms_key_ring_iam_policy": tpgiamresource.DataSourceIamPolicy(kms.IamKmsKeyRingSchema, kms.NewKmsKeyRingIamUpdater), + "google_kms_crypto_key_iam_policy": tpgiamresource.DataSourceIamPolicy(kms.IamKmsCryptoKeySchema, kms.NewKmsCryptoKeyIamUpdater), "google_spanner_instance_iam_policy": tpgiamresource.DataSourceIamPolicy(IamSpannerInstanceSchema, NewSpannerInstanceIamUpdater), "google_spanner_database_iam_policy": tpgiamresource.DataSourceIamPolicy(IamSpannerDatabaseSchema, NewSpannerDatabaseIamUpdater), "google_organization_iam_policy": tpgiamresource.DataSourceIamPolicy(IamOrganizationSchema, NewOrganizationIamUpdater), @@ -417,6 +436,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { <% products.each do |product| product_definition = product[:definitions] + service = product_definition.name.downcase config = product[:overrides] sorted = product_definition.objects.sort_by { |obj| obj.name } sorted.each do |object| @@ -425,7 +445,7 @@ products.each do |product| terraform_name = object.legacy_name || "google_#{tf_product}_#{object.name.underscore}" -%> <% unless object&.exclude_resource -%> - "<%= terraform_name -%>": Resource<%= product_definition.name + object.name -%>(), + "<%= terraform_name -%>": <%= service -%>.Resource<%= product_definition.name + object.name -%>(), <% end -%> <% iam_policy = object&.iam_policy @@ -433,9 +453,9 @@ products.each do |product| (iam_policy.min_version && iam_policy.min_version < version) iam_class_name = product_definition.name + object.name -%> - "<%= terraform_name -%>_iam_binding": tpgiamresource.ResourceIamBinding(<%= iam_class_name -%>IamSchema, <%= iam_class_name -%>IamUpdaterProducer, <%= iam_class_name -%>IdParseFunc), - "<%= terraform_name -%>_iam_member": tpgiamresource.ResourceIamMember(<%= iam_class_name -%>IamSchema, <%= iam_class_name -%>IamUpdaterProducer, <%= iam_class_name -%>IdParseFunc), - "<%= terraform_name -%>_iam_policy": tpgiamresource.ResourceIamPolicy(<%= iam_class_name -%>IamSchema, <%= iam_class_name -%>IamUpdaterProducer, <%= iam_class_name -%>IdParseFunc), + "<%= terraform_name -%>_iam_binding": tpgiamresource.ResourceIamBinding(<%= service -%>.<%= iam_class_name -%>IamSchema, <%= service -%>.<%= iam_class_name -%>IamUpdaterProducer, <%= service -%>.<%= iam_class_name -%>IdParseFunc), + "<%= terraform_name -%>_iam_member": tpgiamresource.ResourceIamMember(<%= service -%>.<%= iam_class_name -%>IamSchema, <%= service -%>.<%= iam_class_name -%>IamUpdaterProducer, <%= service -%>.<%= iam_class_name -%>IdParseFunc), + "<%= terraform_name -%>_iam_policy": tpgiamresource.ResourceIamPolicy(<%= service -%>.<%= iam_class_name -%>IamSchema, <%= service -%>.<%= iam_class_name -%>IamUpdaterProducer, <%= service -%>.<%= iam_class_name -%>IdParseFunc), <% end # unless iam_policy.nil? || iam_policy.exclude end # product_definition.objects.each do @@ -458,24 +478,24 @@ end # products.each do "google_cloudfunctions_function": ResourceCloudFunctionsFunction(), "google_composer_environment": ResourceComposerEnvironment(), "google_compute_attached_disk": ResourceComputeAttachedDisk(), - "google_compute_instance": ResourceComputeInstance(), + "google_compute_instance": compute.ResourceComputeInstance(), <% unless version == 'ga' -%> "google_compute_disk_async_replication": ResourceComputeDiskAsyncReplication(), - "google_compute_instance_from_machine_image": ResourceComputeInstanceFromMachineImage(), + "google_compute_instance_from_machine_image": compute.ResourceComputeInstanceFromMachineImage(), <% end -%> - "google_compute_instance_from_template": ResourceComputeInstanceFromTemplate(), - "google_compute_instance_group": ResourceComputeInstanceGroup(), - "google_compute_instance_group_manager": ResourceComputeInstanceGroupManager(), - "google_compute_instance_template": ResourceComputeInstanceTemplate(), + "google_compute_instance_from_template": compute.ResourceComputeInstanceFromTemplate(), + "google_compute_instance_group": compute.ResourceComputeInstanceGroup(), + "google_compute_instance_group_manager": compute.ResourceComputeInstanceGroupManager(), + "google_compute_instance_template": compute.ResourceComputeInstanceTemplate(), "google_compute_network_peering": ResourceComputeNetworkPeering(), "google_compute_project_default_network_tier": ResourceComputeProjectDefaultNetworkTier(), - "google_compute_project_metadata": ResourceComputeProjectMetadata(), - "google_compute_project_metadata_item": ResourceComputeProjectMetadataItem(), - "google_compute_region_instance_group_manager": ResourceComputeRegionInstanceGroupManager(), + "google_compute_project_metadata": compute.ResourceComputeProjectMetadata(), + "google_compute_project_metadata_item": compute.ResourceComputeProjectMetadataItem(), + "google_compute_region_instance_group_manager": compute.ResourceComputeRegionInstanceGroupManager(), <% unless version == 'ga' -%> - "google_compute_region_instance_template": ResourceComputeRegionInstanceTemplate(), + "google_compute_region_instance_template": compute.ResourceComputeRegionInstanceTemplate(), <% end -%> - "google_compute_router_interface": ResourceComputeRouterInterface(), + "google_compute_router_interface": compute.ResourceComputeRouterInterface(), "google_compute_security_policy": ResourceComputeSecurityPolicy(), "google_compute_shared_vpc_host_project": ResourceComputeSharedVpcHostProject(), "google_compute_shared_vpc_service_project": ResourceComputeSharedVpcServiceProject(), @@ -491,7 +511,7 @@ end # products.each do "google_dataproc_job": ResourceDataprocJob(), "google_dialogflow_cx_version": ResourceDialogflowCXVersion(), "google_dialogflow_cx_environment": ResourceDialogflowCXEnvironment(), - "google_dns_record_set": ResourceDnsRecordSet(), + "google_dns_record_set": dns.ResourceDnsRecordSet(), "google_endpoints_service": ResourceEndpointsService(), "google_folder": ResourceGoogleFolder(), "google_folder_organization_policy": ResourceGoogleFolderOrganizationPolicy(), @@ -564,24 +584,24 @@ end # products.each do "google_folder_iam_member": tpgiamresource.ResourceIamMember(IamFolderSchema, NewFolderIamUpdater, FolderIdParseFunc), "google_folder_iam_policy": tpgiamresource.ResourceIamPolicy(IamFolderSchema, NewFolderIamUpdater, FolderIdParseFunc), "google_folder_iam_audit_config": tpgiamresource.ResourceIamAuditConfig(IamFolderSchema, NewFolderIamUpdater, FolderIdParseFunc), - "google_healthcare_dataset_iam_binding": tpgiamresource.ResourceIamBinding(IamHealthcareDatasetSchema, NewHealthcareDatasetIamUpdater, DatasetIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_dataset_iam_member": tpgiamresource.ResourceIamMember(IamHealthcareDatasetSchema, NewHealthcareDatasetIamUpdater, DatasetIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_dataset_iam_policy": tpgiamresource.ResourceIamPolicy(IamHealthcareDatasetSchema, NewHealthcareDatasetIamUpdater, DatasetIdParseFunc), - "google_healthcare_dicom_store_iam_binding": tpgiamresource.ResourceIamBinding(IamHealthcareDicomStoreSchema, NewHealthcareDicomStoreIamUpdater, DicomStoreIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_dicom_store_iam_member": tpgiamresource.ResourceIamMember(IamHealthcareDicomStoreSchema, NewHealthcareDicomStoreIamUpdater, DicomStoreIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_dicom_store_iam_policy": tpgiamresource.ResourceIamPolicy(IamHealthcareDicomStoreSchema, NewHealthcareDicomStoreIamUpdater, DicomStoreIdParseFunc), - "google_healthcare_fhir_store_iam_binding": tpgiamresource.ResourceIamBinding(IamHealthcareFhirStoreSchema, NewHealthcareFhirStoreIamUpdater, FhirStoreIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_fhir_store_iam_member": tpgiamresource.ResourceIamMember(IamHealthcareFhirStoreSchema, NewHealthcareFhirStoreIamUpdater, FhirStoreIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_fhir_store_iam_policy": tpgiamresource.ResourceIamPolicy(IamHealthcareFhirStoreSchema, NewHealthcareFhirStoreIamUpdater, FhirStoreIdParseFunc), - "google_healthcare_hl7_v2_store_iam_binding": tpgiamresource.ResourceIamBinding(IamHealthcareHl7V2StoreSchema, NewHealthcareHl7V2StoreIamUpdater, Hl7V2StoreIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_hl7_v2_store_iam_member": tpgiamresource.ResourceIamMember(IamHealthcareHl7V2StoreSchema, NewHealthcareHl7V2StoreIamUpdater, Hl7V2StoreIdParseFunc, tpgiamresource.IamWithBatching), - "google_healthcare_hl7_v2_store_iam_policy": tpgiamresource.ResourceIamPolicy(IamHealthcareHl7V2StoreSchema, NewHealthcareHl7V2StoreIamUpdater, Hl7V2StoreIdParseFunc), - "google_kms_key_ring_iam_binding": tpgiamresource.ResourceIamBinding(IamKmsKeyRingSchema, NewKmsKeyRingIamUpdater, KeyRingIdParseFunc), - "google_kms_key_ring_iam_member": tpgiamresource.ResourceIamMember(IamKmsKeyRingSchema, NewKmsKeyRingIamUpdater, KeyRingIdParseFunc), - "google_kms_key_ring_iam_policy": tpgiamresource.ResourceIamPolicy(IamKmsKeyRingSchema, NewKmsKeyRingIamUpdater, KeyRingIdParseFunc), - "google_kms_crypto_key_iam_binding": tpgiamresource.ResourceIamBinding(IamKmsCryptoKeySchema, NewKmsCryptoKeyIamUpdater, CryptoIdParseFunc), - "google_kms_crypto_key_iam_member": tpgiamresource.ResourceIamMember(IamKmsCryptoKeySchema, NewKmsCryptoKeyIamUpdater, CryptoIdParseFunc), - "google_kms_crypto_key_iam_policy": tpgiamresource.ResourceIamPolicy(IamKmsCryptoKeySchema, NewKmsCryptoKeyIamUpdater, CryptoIdParseFunc), + "google_healthcare_dataset_iam_binding": tpgiamresource.ResourceIamBinding(healthcare.IamHealthcareDatasetSchema, healthcare.NewHealthcareDatasetIamUpdater, healthcare.DatasetIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_dataset_iam_member": tpgiamresource.ResourceIamMember(healthcare.IamHealthcareDatasetSchema, healthcare.NewHealthcareDatasetIamUpdater, healthcare.DatasetIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_dataset_iam_policy": tpgiamresource.ResourceIamPolicy(healthcare.IamHealthcareDatasetSchema, healthcare.NewHealthcareDatasetIamUpdater, healthcare.DatasetIdParseFunc), + "google_healthcare_dicom_store_iam_binding": tpgiamresource.ResourceIamBinding(healthcare.IamHealthcareDicomStoreSchema, healthcare.NewHealthcareDicomStoreIamUpdater, healthcare.DicomStoreIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_dicom_store_iam_member": tpgiamresource.ResourceIamMember(healthcare.IamHealthcareDicomStoreSchema, healthcare.NewHealthcareDicomStoreIamUpdater, healthcare.DicomStoreIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_dicom_store_iam_policy": tpgiamresource.ResourceIamPolicy(healthcare.IamHealthcareDicomStoreSchema, healthcare.NewHealthcareDicomStoreIamUpdater, healthcare.DicomStoreIdParseFunc), + "google_healthcare_fhir_store_iam_binding": tpgiamresource.ResourceIamBinding(healthcare.IamHealthcareFhirStoreSchema, healthcare.NewHealthcareFhirStoreIamUpdater, healthcare.FhirStoreIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_fhir_store_iam_member": tpgiamresource.ResourceIamMember(healthcare.IamHealthcareFhirStoreSchema, healthcare.NewHealthcareFhirStoreIamUpdater, healthcare.FhirStoreIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_fhir_store_iam_policy": tpgiamresource.ResourceIamPolicy(healthcare.IamHealthcareFhirStoreSchema, healthcare.NewHealthcareFhirStoreIamUpdater, healthcare.FhirStoreIdParseFunc), + "google_healthcare_hl7_v2_store_iam_binding": tpgiamresource.ResourceIamBinding(healthcare.IamHealthcareHl7V2StoreSchema, healthcare.NewHealthcareHl7V2StoreIamUpdater, healthcare.Hl7V2StoreIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_hl7_v2_store_iam_member": tpgiamresource.ResourceIamMember(healthcare.IamHealthcareHl7V2StoreSchema, healthcare.NewHealthcareHl7V2StoreIamUpdater, healthcare.Hl7V2StoreIdParseFunc, tpgiamresource.IamWithBatching), + "google_healthcare_hl7_v2_store_iam_policy": tpgiamresource.ResourceIamPolicy(healthcare.IamHealthcareHl7V2StoreSchema, healthcare.NewHealthcareHl7V2StoreIamUpdater, healthcare.Hl7V2StoreIdParseFunc), + "google_kms_key_ring_iam_binding": tpgiamresource.ResourceIamBinding(kms.IamKmsKeyRingSchema, kms.NewKmsKeyRingIamUpdater, kms.KeyRingIdParseFunc), + "google_kms_key_ring_iam_member": tpgiamresource.ResourceIamMember(kms.IamKmsKeyRingSchema, kms.NewKmsKeyRingIamUpdater, kms.KeyRingIdParseFunc), + "google_kms_key_ring_iam_policy": tpgiamresource.ResourceIamPolicy(kms.IamKmsKeyRingSchema, kms.NewKmsKeyRingIamUpdater, kms.KeyRingIdParseFunc), + "google_kms_crypto_key_iam_binding": tpgiamresource.ResourceIamBinding(kms.IamKmsCryptoKeySchema, kms.NewKmsCryptoKeyIamUpdater, kms.CryptoIdParseFunc), + "google_kms_crypto_key_iam_member": tpgiamresource.ResourceIamMember(kms.IamKmsCryptoKeySchema, kms.NewKmsCryptoKeyIamUpdater, kms.CryptoIdParseFunc), + "google_kms_crypto_key_iam_policy": tpgiamresource.ResourceIamPolicy(kms.IamKmsCryptoKeySchema, kms.NewKmsCryptoKeyIamUpdater, kms.CryptoIdParseFunc), "google_spanner_instance_iam_binding": tpgiamresource.ResourceIamBinding(IamSpannerInstanceSchema, NewSpannerInstanceIamUpdater, SpannerInstanceIdParseFunc), "google_spanner_instance_iam_member": tpgiamresource.ResourceIamMember(IamSpannerInstanceSchema, NewSpannerInstanceIamUpdater, SpannerInstanceIdParseFunc), "google_spanner_instance_iam_policy": tpgiamresource.ResourceIamPolicy(IamSpannerInstanceSchema, NewSpannerInstanceIamUpdater, SpannerInstanceIdParseFunc), diff --git a/mmv1/third_party/terraform/utils/pubsub_utils.go b/mmv1/third_party/terraform/utils/pubsub_utils.go index 64d1137e4f21..4568bbd18473 100644 --- a/mmv1/third_party/terraform/utils/pubsub_utils.go +++ b/mmv1/third_party/terraform/utils/pubsub_utils.go @@ -1,24 +1,15 @@ package google import ( - "fmt" - "regexp" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" ) -const PubsubTopicRegex = "projects\\/.*\\/topics\\/.*" +const PubsubTopicRegex = pubsub.PubsubTopicRegex func getComputedSubscriptionName(project, subscription string) string { - match, _ := regexp.MatchString("projects\\/.*\\/subscriptions\\/.*", subscription) - if match { - return subscription - } - return fmt.Sprintf("projects/%s/subscriptions/%s", project, subscription) + return pubsub.GetComputedSubscriptionName(project, subscription) } func getComputedTopicName(project, topic string) string { - match, _ := regexp.MatchString(PubsubTopicRegex, topic) - if match { - return topic - } - return fmt.Sprintf("projects/%s/topics/%s", project, topic) + return pubsub.GetComputedTopicName(project, topic) } diff --git a/mmv1/third_party/terraform/utils/retry_utils.go b/mmv1/third_party/terraform/utils/retry_utils.go index 6c4f0bdb3776..4640deb63a43 100644 --- a/mmv1/third_party/terraform/utils/retry_utils.go +++ b/mmv1/third_party/terraform/utils/retry_utils.go @@ -9,25 +9,31 @@ import ( // Deprecated: For backward compatibility retry is still working, // but all new code should use Retry in the transport_tpg package instead. func retry(retryFunc func() error) error { - return transport_tpg.Retry(retryFunc) + return transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: retryFunc, + }) } // Deprecated: For backward compatibility retryTime is still working, // but all new code should use RetryTime in the transport_tpg package instead. func retryTime(retryFunc func() error, minutes int) error { - return transport_tpg.RetryTime(retryFunc, minutes) + return RetryTimeDuration(retryFunc, time.Duration(minutes)*time.Minute) } // Deprecated: For backward compatibility RetryTimeDuration is still working, // but all new code should use RetryTimeDuration in the transport_tpg package instead. func RetryTimeDuration(retryFunc func() error, duration time.Duration, errorRetryPredicates ...transport_tpg.RetryErrorPredicateFunc) error { - return transport_tpg.RetryTimeDuration(retryFunc, duration, errorRetryPredicates...) + return transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: retryFunc, + Timeout: duration, + ErrorRetryPredicates: errorRetryPredicates, + }) } // Deprecated: For backward compatibility isRetryableError is still working, // but all new code should use IsRetryableError in the transport_tpg package instead. func isRetryableError(topErr error, customPredicates ...transport_tpg.RetryErrorPredicateFunc) bool { - return transport_tpg.IsRetryableError(topErr, customPredicates...) + return transport_tpg.IsRetryableError(topErr, customPredicates, nil) } // The polling overrides the default backoff logic with max backoff of 10s. The poll interval can be greater than 10s. @@ -35,5 +41,13 @@ func isRetryableError(topErr error, customPredicates ...transport_tpg.RetryError // Deprecated: For backward compatibility retryWithPolling is still working, // but all new code should use RetryWithPolling in the transport_tpg package instead. func retryWithPolling(retryFunc func() (interface{}, error), timeout time.Duration, pollInterval time.Duration, errorRetryPredicates ...transport_tpg.RetryErrorPredicateFunc) (interface{}, error) { - return transport_tpg.RetryWithPolling(retryFunc, timeout, pollInterval, errorRetryPredicates...) + return "", transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + _, err := retryFunc() + return err + }, + Timeout: timeout, + PollInterval: pollInterval, + ErrorRetryPredicates: errorRetryPredicates, + }) } diff --git a/mmv1/third_party/terraform/utils/runadminv3_operation.go b/mmv1/third_party/terraform/utils/runadminv3_operation.go index c5767c2337cd..2a34a339342a 100644 --- a/mmv1/third_party/terraform/utils/runadminv3_operation.go +++ b/mmv1/third_party/terraform/utils/runadminv3_operation.go @@ -1,68 +1,22 @@ package google import ( - "encoding/json" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/cloudrunv2" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "google.golang.org/api/run/v2" ) -type RunAdminV2OperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - tpgresource.CommonOperationWaiter -} - -func (w *RunAdminV2OperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - url := fmt.Sprintf("%s%s", w.Config.CloudRunV2BasePath, w.CommonOperationWaiter.Op.Name) - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func createRunAdminV2Waiter(config *transport_tpg.Config, op *run.GoogleLongrunningOperation, project, activity, userAgent string) (*RunAdminV2OperationWaiter, error) { - w := &RunAdminV2OperationWaiter{ - Config: config, - UserAgent: userAgent, - Project: project, - } - if err := w.CommonOperationWaiter.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - +// Deprecated: For backward compatibility runAdminV2OperationWaitTimeWithResponse is still working, +// but all new code should use RunAdminV2OperationWaitTimeWithResponse in the cloudrunv2 package instead. func runAdminV2OperationWaitTimeWithResponse(config *transport_tpg.Config, op *run.GoogleLongrunningOperation, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - w, err := createRunAdminV2Waiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) + return cloudrunv2.RunAdminV2OperationWaitTimeWithResponse(config, op, response, project, activity, userAgent, timeout) } +// Deprecated: For backward compatibility runAdminV2OperationWaitTime is still working, +// but all new code should use RunAdminV2OperationWaitTime in the cloudrunv2 package instead. func runAdminV2OperationWaitTime(config *transport_tpg.Config, op *run.GoogleLongrunningOperation, project, activity, userAgent string, timeout time.Duration) error { - if op.Done { - return nil - } - w, err := createRunAdminV2Waiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return cloudrunv2.RunAdminV2OperationWaitTime(config, op, project, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/service_networking_operation.go b/mmv1/third_party/terraform/utils/service_networking_operation.go index ba52e86bc5a6..ef22ca412447 100644 --- a/mmv1/third_party/terraform/utils/service_networking_operation.go +++ b/mmv1/third_party/terraform/utils/service_networking_operation.go @@ -3,35 +3,13 @@ package google import ( "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgservicenetworking "github.com/hashicorp/terraform-provider-google/google/services/servicenetworking" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "google.golang.org/api/servicenetworking/v1" ) -type ServiceNetworkingOperationWaiter struct { - Service *servicenetworking.APIService - Project string - UserProjectOverride bool - tpgresource.CommonOperationWaiter -} - -func (w *ServiceNetworkingOperationWaiter) QueryOp() (interface{}, error) { - opGetCall := w.Service.Operations.Get(w.Op.Name) - if w.UserProjectOverride { - opGetCall.Header().Add("X-Goog-User-Project", w.Project) - } - return opGetCall.Do() -} - +// Deprecated: For backward compatibility ServiceNetworkingOperationWaitTime is still working, +// but all new code should use ServiceNetworkingOperationWaitTime in the tpgservicenetworking package instead. func ServiceNetworkingOperationWaitTime(config *transport_tpg.Config, op *servicenetworking.Operation, activity, userAgent, project string, timeout time.Duration) error { - w := &ServiceNetworkingOperationWaiter{ - Service: config.NewServiceNetworkingClient(userAgent), - Project: project, - UserProjectOverride: config.UserProjectOverride, - } - - if err := w.SetOp(op); err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return tpgservicenetworking.ServiceNetworkingOperationWaitTime(config, op, activity, userAgent, project, timeout) } diff --git a/mmv1/third_party/terraform/utils/serviceman_operation.go b/mmv1/third_party/terraform/utils/serviceman_operation.go index 9bad15c3f122..a99834034716 100644 --- a/mmv1/third_party/terraform/utils/serviceman_operation.go +++ b/mmv1/third_party/terraform/utils/serviceman_operation.go @@ -1,38 +1,16 @@ package google import ( - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgservicemanagement "github.com/hashicorp/terraform-provider-google/google/services/servicemanagement" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "google.golang.org/api/googleapi" "google.golang.org/api/servicemanagement/v1" ) -type ServiceManagementOperationWaiter struct { - Service *servicemanagement.APIService - tpgresource.CommonOperationWaiter -} - -func (w *ServiceManagementOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - return w.Service.Operations.Get(w.Op.Name).Do() -} - +// Deprecated: For backward compatibility ServiceManagementOperationWaitTime is still working, +// but all new code should use ServiceManagementOperationWaitTime in the tpgservicemanagement package instead. func ServiceManagementOperationWaitTime(config *transport_tpg.Config, op *servicemanagement.Operation, activity, userAgent string, timeout time.Duration) (googleapi.RawMessage, error) { - w := &ServiceManagementOperationWaiter{ - Service: config.NewServiceManClient(userAgent), - } - - if err := w.SetOp(op); err != nil { - return nil, err - } - - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return nil, err - } - return w.Op.Response, nil + return tpgservicemanagement.ServiceManagementOperationWaitTime(config, op, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/utils/sqladmin_operation.go b/mmv1/third_party/terraform/utils/sqladmin_operation.go index 1ae9366bc70a..cff7170820a7 100644 --- a/mmv1/third_party/terraform/utils/sqladmin_operation.go +++ b/mmv1/third_party/terraform/utils/sqladmin_operation.go @@ -1,152 +1,23 @@ package google import ( - "bytes" - "fmt" - "log" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/sql" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - - sqladmin "google.golang.org/api/sqladmin/v1beta4" ) -type SqlAdminOperationWaiter struct { - Service *sqladmin.Service - Op *sqladmin.Operation - Project string -} - -func (w *SqlAdminOperationWaiter) State() string { - if w == nil { - return "Operation Waiter is nil!" - } - - if w.Op == nil { - return "Operation is nil!" - } - - return w.Op.Status -} - -func (w *SqlAdminOperationWaiter) Error() error { - if w != nil && w.Op != nil && w.Op.Error != nil { - return SqlAdminOperationError(*w.Op.Error) - } - return nil -} - -func (w *SqlAdminOperationWaiter) IsRetryable(error) bool { - return false -} - -func (w *SqlAdminOperationWaiter) SetOp(op interface{}) error { - if op == nil { - // Starting as a log statement, this may be a useful error in the future - log.Printf("[DEBUG] attempted to set nil op") - } - - sqlOp, ok := op.(*sqladmin.Operation) - w.Op = sqlOp - if !ok { - return fmt.Errorf("Unable to set operation. Bad type!") - } - - return nil -} - -func (w *SqlAdminOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, waiter is unset or nil.") - } - - if w.Op == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - - if w.Service == nil { - return nil, fmt.Errorf("Cannot query operation, service is nil.") - } - - var op interface{} - var err error - err = transport_tpg.RetryTimeDuration( - func() error { - op, err = w.Service.Operations.Get(w.Project, w.Op.Name).Do() - return err - }, - - transport_tpg.DefaultRequestTimeout, - ) - - return op, err -} - -func (w *SqlAdminOperationWaiter) OpName() string { - if w == nil { - return "" - } - - if w.Op == nil { - return "" - } - - return w.Op.Name -} - -func (w *SqlAdminOperationWaiter) PendingStates() []string { - return []string{"PENDING", "RUNNING"} -} - -func (w *SqlAdminOperationWaiter) TargetStates() []string { - return []string{"DONE"} -} - +// Deprecated: For backward compatibility SqlAdminOperationWaitTime is still working, +// but all new code should use SqlAdminOperationWaitTime in the sql package instead. func SqlAdminOperationWaitTime(config *transport_tpg.Config, res interface{}, project, activity, userAgent string, timeout time.Duration) error { - op := &sqladmin.Operation{} - err := tpgresource.Convert(res, op) - if err != nil { - return err - } - - w := &SqlAdminOperationWaiter{ - Service: config.NewSqlAdminClient(userAgent), - Op: op, - Project: project, - } - if err := w.SetOp(op); err != nil { - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) -} - -// SqlAdminOperationError wraps sqladmin.OperationError and implements the -// error interface so it can be returned. -type SqlAdminOperationError sqladmin.OperationErrors - -func (e SqlAdminOperationError) Error() string { - var buf bytes.Buffer - - for _, err := range e.Errors { - buf.WriteString(err.Message + "\n") - } - - return buf.String() + return sql.SqlAdminOperationWaitTime(config, res, project, activity, userAgent, timeout) } // Retry if Cloud SQL operation returns a 429 with a specific message for // concurrent operations. +// +// Deprecated: For backward compatibility IsSqlInternalError is still working, +// but all new code should use IsSqlInternalError in the sql package instead. func IsSqlInternalError(err error) (bool, string) { - if gerr, ok := err.(*SqlAdminOperationError); ok { - // SqlAdminOperationError is a non-interface type so we need to cast it through - // a layer of interface{}. :) - var ierr interface{} - ierr = gerr - if serr, ok := ierr.(*sqladmin.OperationErrors); ok && serr.Errors[0].Code == "INTERNAL_ERROR" { - return true, "Received an internal error, which is sometimes retryable for some SQL resources. Optimistically retrying." - } - - } - return false, "" + return sql.IsSqlInternalError(err) } diff --git a/mmv1/third_party/terraform/utils/stateful_mig_polling.go b/mmv1/third_party/terraform/utils/stateful_mig_polling.go index 90b1f571035b..8cb0a777b5bf 100644 --- a/mmv1/third_party/terraform/utils/stateful_mig_polling.go +++ b/mmv1/third_party/terraform/utils/stateful_mig_polling.go @@ -1,169 +1,12 @@ package google import ( - "fmt" - "strings" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -// PerInstanceConfig needs both regular operation polling AND custom polling for deletion which is why this is not generated -func resourceComputePerInstanceConfigPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { - return func() (map[string]interface{}, error) { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return nil, err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/zones/{{zone}}/instanceGroupManagers/{{instance_group_manager}}/listPerInstanceConfigs") - if err != nil { - return nil, err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return nil, err - } - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return res, err - } - res, err = flattenNestedComputePerInstanceConfig(d, meta, res) - if err != nil { - return nil, err - } - - // Returns nil res if nested object is not found - return res, nil - } -} - -// RegionPerInstanceConfig needs both regular operation polling AND custom polling for deletion which is why this is not generated -func resourceComputeRegionPerInstanceConfigPollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { - return func() (map[string]interface{}, error) { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return nil, err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/regions/{{region}}/instanceGroupManagers/{{region_instance_group_manager}}/listPerInstanceConfigs") - if err != nil { - return nil, err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return nil, err - } - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return res, err - } - res, err = flattenNestedComputeRegionPerInstanceConfig(d, meta, res) - if err != nil { - return nil, err - } - - // Returns nil res if nested object is not found - return res, nil - } -} - -// Returns an instance name in the form zones/{zone}/instances/{instance} for the managed -// instance matching the name of a PerInstanceConfig -func findInstanceName(d *schema.ResourceData, config *transport_tpg.Config) (string, error) { - url, err := tpgresource.ReplaceVars(d, config, "{{ComputeBasePath}}projects/{{project}}/regions/{{region}}/instanceGroupManagers/{{region_instance_group_manager}}/listManagedInstances") - if err != nil { - return "", err - } - - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return "", err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return "", err - } - instanceNameToFind := fmt.Sprintf("/%s", d.Get("name").(string)) - - token := "" - for paginate := true; paginate; { - urlWithToken := "" - if token != "" { - urlWithToken = fmt.Sprintf("%s?maxResults=1&pageToken=%s", url, token) - } else { - urlWithToken = fmt.Sprintf("%s?maxResults=1", url) - } - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "POST", - Project: project, - RawURL: urlWithToken, - UserAgent: userAgent, - }) - if err != nil { - return "", err - } - - managedInstances, ok := res["managedInstances"] - if !ok { - return "", fmt.Errorf("Failed to parse response for listManagedInstances for %s", d.Id()) - } - - managedInstancesArr := managedInstances.([]interface{}) - for _, managedInstanceRaw := range managedInstancesArr { - instance := managedInstanceRaw.(map[string]interface{}) - name, ok := instance["instance"] - if !ok { - return "", fmt.Errorf("Failed to read instance name for managed instance: %#v", instance) - } - if strings.HasSuffix(name.(string), instanceNameToFind) { - return name.(string), nil - } - } - - tokenRaw, paginate := res["nextPageToken"] - if paginate { - token = tokenRaw.(string) - } - } - - return "", fmt.Errorf("Failed to find managed instance with name: %s", instanceNameToFind) -} - +// Deprecated: For backward compatibility PollCheckInstanceConfigDeleted is still working, +// but all new code should use PollCheckInstanceConfigDeleted in the tpgcompute package instead. func PollCheckInstanceConfigDeleted(resp map[string]interface{}, respErr error) transport_tpg.PollResult { - if respErr != nil { - return ErrorPollResult(respErr) - } - - // Nested object 404 appears as nil response - if resp == nil { - // Config no longer exists - return SuccessPollResult() - } - - // Read status - status := resp["status"].(string) - if status == "DELETING" { - return PendingStatusPollResult("Still deleting") - } - return ErrorPollResult(fmt.Errorf("Expected PerInstanceConfig to be deleting but status is: %s", status)) + return tpgcompute.PollCheckInstanceConfigDeleted(resp, respErr) } diff --git a/mmv1/third_party/terraform/utils/tags_location_operation.go b/mmv1/third_party/terraform/utils/tags_location_operation.go index 792d48c69230..bc69e9317863 100644 --- a/mmv1/third_party/terraform/utils/tags_location_operation.go +++ b/mmv1/third_party/terraform/utils/tags_location_operation.go @@ -1,89 +1,26 @@ package google import ( - "encoding/json" - "fmt" - "regexp" - "strings" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/tags" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -type TagsLocationOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - tpgresource.CommonOperationWaiter -} - -func (w *TagsLocationOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - location := GetLocationFromOpName(w.CommonOperationWaiter.Op.Name) - if location != w.CommonOperationWaiter.Op.Name { - // Found location in Op.Name, fill it in TagsLocationBasePath and rewrite URL - url := fmt.Sprintf("%s%s", strings.Replace(w.Config.TagsLocationBasePath, "{{location}}", location, 1), w.CommonOperationWaiter.Op.Name) - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - RawURL: url, - UserAgent: w.UserAgent, - }) - } else { - url := fmt.Sprintf("%s%s", w.Config.TagsBasePath, w.CommonOperationWaiter.Op.Name) - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - RawURL: url, - UserAgent: w.UserAgent, - }) - } -} - -func createTagsLocationWaiter(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent string) (*TagsLocationOperationWaiter, error) { - w := &TagsLocationOperationWaiter{ - Config: config, - UserAgent: userAgent, - } - if err := w.CommonOperationWaiter.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - +// Deprecated: For backward compatibility TagsLocationOperationWaitTimeWithResponse is still working, +// but all new code should use TagsLocationOperationWaitTimeWithResponse in the tags package instead. func TagsLocationOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, activity, userAgent string, timeout time.Duration) error { - w, err := createTagsLocationWaiter(config, op, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) + return tags.TagsLocationOperationWaitTimeWithResponse(config, op, response, activity, userAgent, timeout) } +// Deprecated: For backward compatibility TagsLocationOperationWaitTime is still working, +// but all new code should use TagsLocationOperationWaitTime in the tags package instead. func TagsLocationOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, activity, userAgent string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := createTagsLocationWaiter(config, op, activity, userAgent) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return tags.TagsLocationOperationWaitTime(config, op, activity, userAgent, timeout) } +// Deprecated: For backward compatibility GetLocationFromOpName is still working, +// but all new code should use GetLocationFromOpName in the tags package instead. func GetLocationFromOpName(opName string) string { - re := regexp.MustCompile("operations/(?:rctb|rdtb)\\.([a-zA-Z0-9-]*)\\.([0-9]*)") - switch { - case re.MatchString(opName): - if res := re.FindStringSubmatch(opName); len(res) == 3 && res[1] != "" { - return res[1] - } - } - return opName + return tags.GetLocationFromOpName(opName) } diff --git a/mmv1/third_party/terraform/utils/vcr_utils.go b/mmv1/third_party/terraform/utils/vcr_utils.go index 51247d22bb16..1b80858f8ebd 100644 --- a/mmv1/third_party/terraform/utils/vcr_utils.go +++ b/mmv1/third_party/terraform/utils/vcr_utils.go @@ -21,6 +21,7 @@ import ( "time" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" "github.com/dnaeon/go-vcr/cassette" @@ -107,7 +108,7 @@ func readSeedFromFile(fileName string) (int64, error) { // Remove NULL characters from seed data = bytes.Trim(data, "\x00") seed := string(data) - return StringToFixed64(seed) + return tpgresource.StringToFixed64(seed) } func writeSeedToFile(seed int64, fileName string) error { diff --git a/mmv1/third_party/terraform/utils/vertex_ai_operation.go.erb b/mmv1/third_party/terraform/utils/vertex_ai_operation.go.erb index c0e02eba2df3..ba434403e123 100644 --- a/mmv1/third_party/terraform/utils/vertex_ai_operation.go.erb +++ b/mmv1/third_party/terraform/utils/vertex_ai_operation.go.erb @@ -2,77 +2,22 @@ package google import ( - "encoding/json" - "fmt" "time" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" + "github.com/hashicorp/terraform-provider-google/google/services/vertexai" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) -type VertexAIOperationWaiter struct { - Config *transport_tpg.Config - UserAgent string - Project string - tpgresource.CommonOperationWaiter -} - -func (w *VertexAIOperationWaiter) QueryOp() (interface{}, error) { - if w == nil { - return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") - } - - region := tpgresource.GetRegionFromRegionalSelfLink(w.CommonOperationWaiter.Op.Name) - - // Returns the proper get. -<% if version == 'ga' -%> - url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1/%s", region, w.CommonOperationWaiter.Op.Name) -<% else -%> - url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1beta1/%s", region, w.CommonOperationWaiter.Op.Name) -<% end -%> - - return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: w.Config, - Method: "GET", - Project: w.Project, - RawURL: url, - UserAgent: w.UserAgent, - }) -} - -func createVertexAIWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*VertexAIOperationWaiter, error) { - w := &VertexAIOperationWaiter{ - Config: config, - UserAgent: userAgent, - Project: project, - } - if err := w.CommonOperationWaiter.SetOp(op); err != nil { - return nil, err - } - return w, nil -} - // nolint: deadcode,unused +// +// Deprecated: For backward compatibility VertexAIOperationWaitTimeWithResponse is still working, +// but all new code should use VertexAIOperationWaitTimeWithResponse in the vertexai package instead. func VertexAIOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - w, err := createVertexAIWaiter(config, op, project, activity, userAgent) - if err != nil { - return err - } - if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { - return err - } - return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) + return vertexai.VertexAIOperationWaitTimeWithResponse(config, op, response, project, activity, userAgent, timeout) } +// Deprecated: For backward compatibility VertexAIOperationWaitTime is still working, +// but all new code should use VertexAIOperationWaitTime in the vertexai package instead. func VertexAIOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { - if val, ok := op["name"]; !ok || val == "" { - // This was a synchronous call - there is no operation to wait for. - return nil - } - w, err := createVertexAIWaiter(config, op, project, activity, userAgent) - if err != nil { - // If w is nil, the op was synchronous. - return err - } - return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) + return vertexai.VertexAIOperationWaitTime(config, op, project, activity, userAgent, timeout) } diff --git a/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown b/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown index 0ca04d005cea..39b0f61b01a4 100644 --- a/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/billing_account.html.markdown @@ -32,6 +32,8 @@ The following arguments are supported: * `billing_account` (Optional) - The name of the billing account in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`. * `display_name` (Optional) - The display name of the billing account. * `open` (Optional) - `true` if the billing account is open, `false` if the billing account is closed. +* `lookup_projects` (Optional) - `true` if projects associated with the billing account should be read, `false` if this step +should be skipped. Setting `false` may be useful if the user permissions do not allow listing projects. Defaults to `true`. ~> **NOTE:** One of `billing_account` or `display_name` must be specified. @@ -41,4 +43,5 @@ The following additional attributes are exported: * `id` - The billing account ID. * `name` - The resource name of the billing account in the form `billingAccounts/{billing_account_id}`. -* `project_ids` - The IDs of any projects associated with the billing account. +* `project_ids` - The IDs of any projects associated with the billing account. `lookup_projects` must not be false +for this to be populated. diff --git a/mmv1/third_party/terraform/website/docs/d/vmwareengine_network.html.markdown b/mmv1/third_party/terraform/website/docs/d/vmwareengine_network.html.markdown new file mode 100644 index 000000000000..6870f7a4080d --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/vmwareengine_network.html.markdown @@ -0,0 +1,40 @@ +--- +subcategory: "Cloud VMware Engine" +description: |- + Get information about a VMwareEngine network. +--- + +# google\_vmwareengine\_network + +Use this data source to get details about a VMwareEngine network resource. + +~> **Warning:** This data source is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. + +To get more information about VMwareEngine Network, see: +* [API documentation](https://cloud.google.com/vmware-engine/docs/reference/rest/v1/projects.locations.vmwareEngineNetworks) + +## Example Usage + +```hcl +data "google_vmwareengine_network" "my_nw" { + provider = google-beta + name = "us-central1-default" + location = "us-central1" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Name of the resource. +* `location` - (Required) Location of the resource. + +- - - + +* `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +See [google_vmwareengine_network](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/vmwareengine_network#attributes-reference) resource for details of all the available attributes. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_firewall_policy_rule.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_firewall_policy_rule.html.markdown index 0ad0da4e1b30..741feff67645 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_firewall_policy_rule.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_firewall_policy_rule.html.markdown @@ -27,6 +27,18 @@ For more information see the [official documentation](https://cloud.google.com/v ## Example Usage ```hcl +resource "google_network_security_address_group" "basic_global_networksecurity_address_group" { + provider = google-beta + + name = "policy" + parent = "organizations/12345" + description = "Sample global networksecurity_address_group" + location = "global" + items = ["208.80.154.224/32"] + type = "IPV4" + capacity = 100 +} + resource "google_compute_firewall_policy" "default" { parent = "organizations/12345" short_name = "my-policy" @@ -47,6 +59,10 @@ resource "google_compute_firewall_policy_rule" "default" { ports = [80, 8080] } dest_ip_ranges = ["11.100.0.1/32"] + dest_fqdns = ["google.com"] + dest_region_codes = ["US"] + dest_threat_intelligences = ["iplist-public-clouds"] + dest_address_groups = [google_network_security_address_group.basic_global_networksecurity_address_group.id] } } ``` @@ -78,18 +94,50 @@ The following arguments are supported: The `match` block supports: + +* `dest_address_groups` - + (Optional) + Address groups which should be matched against the traffic destination. Maximum number of destination address groups is 10. Destination address groups is only supported in Egress rules. + +* `dest_fqdns` - + (Optional) + Domain names that will be used to match against the resolved domain name of destination of traffic. Can only be specified if DIRECTION is egress. * `dest_ip_ranges` - (Optional) - CIDR IP address range. Maximum number of destination CIDR IP ranges allowed is 256. + CIDR IP address range. Maximum number of destination CIDR IP ranges allowed is 5000. + +* `dest_region_codes` - + (Optional) + The Unicode country codes whose IP addresses will be used to match against the source of traffic. Can only be specified if DIRECTION is egress. + +* `dest_threat_intelligences` - + (Optional) + Name of the Google Cloud Threat Intelligence list. * `layer4_configs` - (Required) Pairs of IP protocols and ports that the rule should match. Structure is [documented below](#nested_layer4_configs). -* `src_ip_ranges` - +* `src_address_groups` - + (Optional) + Address groups which should be matched against the traffic source. Maximum number of source address groups is 10. Source address groups is only supported in Ingress rules. + +* `src_fqdns` - + (Optional) + Domain names that will be used to match against the resolved domain name of source of traffic. Can only be specified if DIRECTION is ingress. + +* `src_ip_ranges` - + (Optional) + CIDR IP address range. Maximum number of source CIDR IP ranges allowed is 5000. + +* `src_region_codes` - + (Optional) + The Unicode country codes whose IP addresses will be used to match against the source of traffic. Can only be specified if DIRECTION is ingress. + +* `src_threat_intelligences` - (Optional) - CIDR IP address range. Maximum number of source CIDR IP ranges allowed is 256. + Name of the Google Cloud Threat Intelligence list. The `layer4_configs` block supports: diff --git a/mmv1/third_party/terraform/website/docs/r/logging_project_sink.html.markdown b/mmv1/third_party/terraform/website/docs/r/logging_project_sink.html.markdown index 52475435fab1..bf178133c8a9 100644 --- a/mmv1/third_party/terraform/website/docs/r/logging_project_sink.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/logging_project_sink.html.markdown @@ -24,7 +24,7 @@ Manages a project-level logging sink. For more information see: resource "google_logging_project_sink" "my-sink" { name = "my-pubsub-instance-sink" - # Can export to pubsub, cloud storage, or bigquery + # Can export to pubsub, cloud storage, bigquery, log bucket, or another project destination = "pubsub.googleapis.com/projects/my-project/topics/instance-activity" # Log all WARN or higher severity messages relating to instances diff --git a/mmv1/third_party/validator/bigquery_dataset_iam.go b/mmv1/third_party/validator/bigquery_dataset_iam.go index b76082b05891..324a080af798 100644 --- a/mmv1/third_party/validator/bigquery_dataset_iam.go +++ b/mmv1/third_party/validator/bigquery_dataset_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterBigqueryDatasetIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterBigqueryDatasetIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "bigquery.googleapis.com/Dataset", Convert: GetBigqueryDatasetIamPolicyCaiObject, MergeCreateUpdate: MergeBigqueryDatasetIamPolicy, } } -func resourceConverterBigqueryDatasetIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterBigqueryDatasetIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "bigquery.googleapis.com/Dataset", Convert: GetBigqueryDatasetIamBindingCaiObject, FetchFullResource: FetchBigqueryDatasetIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterBigqueryDatasetIamBinding() ResourceConverter { } } -func resourceConverterBigqueryDatasetIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterBigqueryDatasetIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "bigquery.googleapis.com/Dataset", Convert: GetBigqueryDatasetIamMemberCaiObject, FetchFullResource: FetchBigqueryDatasetIamPolicy, @@ -35,70 +36,70 @@ func resourceConverterBigqueryDatasetIamMember() ResourceConverter { } } -func GetBigqueryDatasetIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newBigqueryDatasetIamAsset(d, config, expandIamPolicyBindings) +func GetBigqueryDatasetIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newBigqueryDatasetIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetBigqueryDatasetIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newBigqueryDatasetIamAsset(d, config, expandIamRoleBindings) +func GetBigqueryDatasetIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newBigqueryDatasetIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetBigqueryDatasetIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newBigqueryDatasetIamAsset(d, config, expandIamMemberBindings) +func GetBigqueryDatasetIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newBigqueryDatasetIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeBigqueryDatasetIamPolicy(existing, incoming Asset) Asset { +func MergeBigqueryDatasetIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeBigqueryDatasetIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeBigqueryDatasetIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeBigqueryDatasetIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeBigqueryDatasetIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeBigqueryDatasetIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeBigqueryDatasetIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeBigqueryDatasetIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeBigqueryDatasetIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newBigqueryDatasetIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "//bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}}") + name, err := tpgresource.AssetName(d, config, "//bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "bigquery.googleapis.com/Dataset", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchBigqueryDatasetIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchBigqueryDatasetIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("dataset_id"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( NewBigqueryDatasetIamUpdater, d, config, diff --git a/mmv1/third_party/validator/bigquery_table.go b/mmv1/third_party/validator/bigquery_table.go index b5826a9b8fe7..cc0daa162b25 100644 --- a/mmv1/third_party/validator/bigquery_table.go +++ b/mmv1/third_party/validator/bigquery_table.go @@ -9,24 +9,24 @@ import ( const BigQueryTableAssetType string = "bigquery.googleapis.com/Table" -func resourceConverterBigQueryTable() ResourceConverter { - return ResourceConverter{ +func resourceConverterBigQueryTable() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: BigQueryTableAssetType, Convert: GetBigQueryTableCaiObject, } } -func GetBigQueryTableCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}") +func GetBigQueryTableCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//bigquery.googleapis.com/projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetBigQueryTableApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: BigQueryTableAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v2", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/bigquery/v2/rest", DiscoveryName: "Table", @@ -34,7 +34,7 @@ func GetBigQueryTableCaiObject(d tpgresource.TerraformResourceData, config *tran }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/bigtable_cluster.go b/mmv1/third_party/validator/bigtable_cluster.go index bde1bdb924d9..f27a8c969614 100644 --- a/mmv1/third_party/validator/bigtable_cluster.go +++ b/mmv1/third_party/validator/bigtable_cluster.go @@ -7,32 +7,32 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterBigtableCluster() ResourceConverter { - return ResourceConverter{ +func resourceConverterBigtableCluster() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "bigtableadmin.googleapis.com/Cluster", Convert: GetBigtableClusterCaiObject, } } -func GetBigtableClusterCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { +func GetBigtableClusterCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { objs, err := GetBigtableClusterApiObjects(d, config) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - assets := []Asset{} + assets := []tpgresource.Asset{} for _, obj := range objs { - name, err := assetName(d, config, "//bigtable.googleapis.com/projects/{{project}}/instances/{{name}}/clusters/{{cluster_id}}") + name, err := tpgresource.AssetName(d, config, "//bigtable.googleapis.com/projects/{{project}}/instances/{{name}}/clusters/{{cluster_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - asset := Asset{ + asset := tpgresource.Asset{ Name: name, Type: "bigtableadmin.googleapis.com/Cluster", - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v2", DiscoveryDocumentURI: "https://bigtableadmin.googleapis.com/$discovery/rest", DiscoveryName: "Cluster", diff --git a/mmv1/third_party/validator/bigtable_instance.go b/mmv1/third_party/validator/bigtable_instance.go index df96cbceea03..9e9460e76183 100644 --- a/mmv1/third_party/validator/bigtable_instance.go +++ b/mmv1/third_party/validator/bigtable_instance.go @@ -7,24 +7,24 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterBigtableInstance() ResourceConverter { - return ResourceConverter{ +func resourceConverterBigtableInstance() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "bigtableadmin.googleapis.com/Instance", Convert: GetBigtableInstanceCaiObject, } } -func GetBigtableInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//bigtable.googleapis.com/projects/{{project}}/instances/{{name}}") +func GetBigtableInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//bigtable.googleapis.com/projects/{{project}}/instances/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetBigtableInstanceApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "bigtableadmin.googleapis.com/Instance", - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://bigtableadmin.googleapis.com/$discovery/rest", DiscoveryName: "Instance", @@ -32,7 +32,7 @@ func GetBigtableInstanceCaiObject(d tpgresource.TerraformResourceData, config *t }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/cai.go b/mmv1/third_party/validator/cai.go index c76eb4ef8828..22492f4b7739 100644 --- a/mmv1/third_party/validator/cai.go +++ b/mmv1/third_party/validator/cai.go @@ -1,182 +1,59 @@ package google import ( - "fmt" - "math/rand" - "regexp" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -type ConvertFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) -type GetApiObjectFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) - -// FetchFullResourceFunc allows initial data for a resource to be fetched from the API and merged -// with the planned changes. This is useful for resources that are only partially managed -// by Terraform, like IAM policies managed with member/binding resources. -type FetchFullResourceFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) - -// MergeFunc combines multiple terraform resources into a single CAI asset. -// The incoming asset will either be an asset that was created/updated or deleted. -type MergeFunc func(existing, incoming Asset) Asset - -type ResourceConverter struct { - AssetType string - Convert ConvertFunc - FetchFullResource FetchFullResourceFunc - MergeCreateUpdate MergeFunc - MergeDelete MergeFunc -} +type ConvertFunc = tpgresource.ConvertFunc +type GetApiObjectFunc = tpgresource.GetApiObjectFunc +type FetchFullResourceFunc = tpgresource.FetchFullResourceFunc +type MergeFunc = tpgresource.MergeFunc +type ResourceConverter = tpgresource.ResourceConverter // Asset is the CAI representation of a resource. -type Asset struct { - // The name, in a peculiar format: `\\.googleapis.com/` - Name string `json:"name"` - // The type name in `google..` format. - Type string `json:"asset_type"` - Resource *AssetResource `json:"resource,omitempty"` - IAMPolicy *IAMPolicy `json:"iam_policy,omitempty"` - OrgPolicy []*OrgPolicy `json:"org_policy,omitempty"` - V2OrgPolicies []*V2OrgPolicies `json:"v2_org_policies,omitempty"` -} +type Asset = tpgresource.Asset // AssetResource is the Asset's Resource field. -type AssetResource struct { - // Api version - Version string `json:"version"` - // URI including scheme for the discovery doc - assembled from - // product name and version. - DiscoveryDocumentURI string `json:"discovery_document_uri"` - // Resource name. - DiscoveryName string `json:"discovery_name"` - // Actual resource state as per Terraform. Note that this does - // not necessarily correspond perfectly with the CAI representation - // as there are occasional deviations between CAI and API responses. - // This returns the API response values instead. - Data map[string]interface{} `json:"data,omitempty"` -} +type AssetResource = tpgresource.AssetResource -type Folder struct { - Name string `json:"name,omitempty"` - Parent string `json:"parent,omitempty"` - DisplayName string `json:"display_name,omitempty"` - State string `json:"state,omitempty"` - CreateTime *Timestamp `json:"create_time,omitempty"` +// AssetName templates an asset.name by looking up and replacing all instances +// of {{field}}. In the case where a field would resolve to an empty string, a +// generated unique string will be used: "placeholder-" + randomString(). +// This is done to preserve uniqueness of asset.name for a given asset.asset_type. +func AssetName(d tpgresource.TerraformResourceData, config *transport_tpg.Config, linkTmpl string) (string, error) { + return tpgresource.AssetName(d, config, linkTmpl) } -type IAMPolicy struct { - Bindings []IAMBinding `json:"bindings"` -} +type Folder = tpgresource.Folder -type IAMBinding struct { - Role string `json:"role"` - Members []string `json:"members"` -} +type IAMPolicy = tpgresource.IAMPolicy -type OrgPolicy struct { - Constraint string `json:"constraint,omitempty"` - ListPolicy *ListPolicy `json:"listPolicy"` - BooleanPolicy *BooleanPolicy `json:"booleanPolicy"` - RestoreDefault *RestoreDefault `json:"restoreDefault"` - UpdateTime *Timestamp `json:"update_time,omitempty"` -} +type IAMBinding = tpgresource.IAMBinding + +type OrgPolicy = tpgresource.OrgPolicy // V2OrgPolicies is the represtation of V2OrgPolicies -type V2OrgPolicies struct { - Name string `json:"name"` - PolicySpec *PolicySpec `json:"spec,omitempty"` -} +type V2OrgPolicies = tpgresource.V2OrgPolicies // Spec is the representation of Spec for V2OrgPolicy -type PolicySpec struct { - Etag string `json:"etag,omitempty"` - UpdateTime *Timestamp `json:"update_time,omitempty"` - PolicyRules []*PolicyRule `json:"rules,omitempty"` - InheritFromParent bool `json:"inherit_from_parent,omitempty"` - Reset bool `json:"reset,omitempty"` -} - -type PolicyRule struct { - Values *StringValues `json:"values,omitempty"` - AllowAll bool `json:"allow_all,omitempty"` - DenyAll bool `json:"deny_all,omitempty"` - Enforce bool `json:"enforce,omitempty"` - Condition *Expr `json:"condition,omitempty"` -} +type PolicySpec = tpgresource.PolicySpec -type StringValues struct { - AllowedValues []string `json:"allowed_values,omitempty"` - DeniedValues []string `json:"denied_values,omitempty"` -} +type PolicyRule = tpgresource.PolicyRule -type Expr struct { - Expression string `json:"expression,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Location string `json:"location,omitempty"` -} - -type Timestamp struct { - Seconds int64 `json:"seconds,omitempty"` - Nanos int64 `json:"nanos,omitempty"` -} +type StringValues = tpgresource.StringValues -type ListPolicyAllValues int32 +type Expr = tpgresource.Expr -type ListPolicy struct { - AllowedValues []string `json:"allowed_values,omitempty"` - DeniedValues []string `json:"denied_values,omitempty"` - AllValues ListPolicyAllValues `json:"all_values,omitempty"` - SuggestedValue string `json:"suggested_value,omitempty"` - InheritFromParent bool `json:"inherit_from_parent,omitempty"` -} +type Timestamp = tpgresource.Timestamp -type BooleanPolicy struct { - Enforced bool `json:"enforced,omitempty"` -} +type ListPolicyAllValues = tpgresource.ListPolicyAllValues -type RestoreDefault struct { -} +type ListPolicy = tpgresource.ListPolicy -// assetName templates an asset.name by looking up and replacing all instances -// of {{field}}. In the case where a field would resolve to an empty string, a -// generated unique string will be used: "placeholder-" + randomString(). -// This is done to preserve uniqueness of asset.name for a given asset.asset_type. -func assetName(d tpgresource.TerraformResourceData, config *transport_tpg.Config, linkTmpl string) (string, error) { - re := regexp.MustCompile("{{([%[:word:]]+)}}") - - // workaround for empty project - placeholderSet := false - if config.Project == "" { - config.Project = fmt.Sprintf("placeholder-%s", RandString(8)) - placeholderSet = true - } - - f, err := tpgresource.BuildReplacementFunc(re, d, config, linkTmpl, false) - if err != nil { - return "", err - } - if placeholderSet { - config.Project = "" - } - - fWithPlaceholder := func(key string) string { - val := f(key) - if val == "" { - val = fmt.Sprintf("placeholder-%s", RandString(8)) - } - return val - } - - return re.ReplaceAllStringFunc(linkTmpl, fWithPlaceholder), nil -} +type BooleanPolicy = tpgresource.BooleanPolicy +type RestoreDefault = tpgresource.RestoreDefault func RandString(n int) string { - const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" - b := make([]byte, n) - for i := range b { - b[i] = letterBytes[rand.Intn(len(letterBytes))] - } - return string(b) + return tpgresource.RandString(n) } diff --git a/mmv1/third_party/validator/cloudfunctions_cloud_function.go b/mmv1/third_party/validator/cloudfunctions_cloud_function.go index 0e69ddced733..8e25fccb10f7 100644 --- a/mmv1/third_party/validator/cloudfunctions_cloud_function.go +++ b/mmv1/third_party/validator/cloudfunctions_cloud_function.go @@ -9,23 +9,23 @@ import ( const CloudFunctionsCloudFunctionAssetType string = "cloudfunctions.googleapis.com/CloudFunction" -func resourceConverterCloudFunctionsCloudFunction() ResourceConverter { - return ResourceConverter{ +func resourceConverterCloudFunctionsCloudFunction() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: CloudFunctionsCloudFunctionAssetType, Convert: GetCloudFunctionsCloudFunctionCaiObject, } } -func GetCloudFunctionsCloudFunctionCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudfunctions.googleapis.com/projects/{{project}}/locations/{{region}}/functions/{{name}}") +func GetCloudFunctionsCloudFunctionCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudfunctions.googleapis.com/projects/{{project}}/locations/{{region}}/functions/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetCloudFunctionsCloudFunctionApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: CloudFunctionsCloudFunctionAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/cloudfunctions/v1/rest", DiscoveryName: "CloudFunction", @@ -33,7 +33,7 @@ func GetCloudFunctionsCloudFunctionCaiObject(d tpgresource.TerraformResourceData }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/cloudfunctions_function.go b/mmv1/third_party/validator/cloudfunctions_function.go index 22cf551dfc0a..64533c64f77e 100644 --- a/mmv1/third_party/validator/cloudfunctions_function.go +++ b/mmv1/third_party/validator/cloudfunctions_function.go @@ -7,16 +7,16 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func GetCloudFunctionsFunctionCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudfunctions.googleapis.com/projects/{{.Provider.project}}/locations/us-central1/functions/{{name}}") +func GetCloudFunctionsFunctionCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudfunctions.googleapis.com/projects/{{.Provider.project}}/locations/us-central1/functions/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetCloudFunctionsFunctionApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudfunctions.googleapis.com/CloudFunction", - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://cloudfunctions.googleapis.com/$discovery/rest", DiscoveryName: "CloudFunction", @@ -24,7 +24,7 @@ func GetCloudFunctionsFunctionCaiObject(d tpgresource.TerraformResourceData, con }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/compute_instance.go.erb b/mmv1/third_party/validator/compute_instance.go.erb index 93fc3c48aca0..0a4770a25d1e 100644 --- a/mmv1/third_party/validator/compute_instance.go.erb +++ b/mmv1/third_party/validator/compute_instance.go.erb @@ -7,7 +7,7 @@ // third_party/terraform/resources/resource_compute_instance.go // // ---------------------------------------------------------------------------- -package google +package compute import ( "errors" @@ -28,23 +28,23 @@ import ( const ComputeInstanceAssetType string = "compute.googleapis.com/Instance" -func resourceConverterComputeInstance() ResourceConverter { - return ResourceConverter{ +func ResourceConverterComputeInstance() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ComputeInstanceAssetType, Convert: GetComputeInstanceCaiObject, } } -func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/instances/{{name}}") +func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/zones/{{zone}}/instances/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetComputeInstanceApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ComputeInstanceAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Instance", @@ -52,7 +52,7 @@ func GetComputeInstanceCaiObject(d tpgresource.TerraformResourceData, config *tr }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } @@ -67,7 +67,7 @@ func GetComputeInstanceApiObject(d tpgresource.TerraformResourceData, config *tr return nil, err } - return jsonMap(instance) + return tpgresource.JsonMap(instance) } func expandComputeInstance(project string, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (*compute.Instance, error) { @@ -310,7 +310,7 @@ func expandBootDisk(d tpgresource.TerraformResourceData, config *transport_tpg.C if v, ok := d.GetOk("boot_disk.0.initialize_params.0.image"); ok { imageName := v.(string) - imageUrl, err := resolveImage(config, project, imageName, userAgent) + imageUrl, err := ResolveImage(config, project, imageName, userAgent) if err != nil { return nil, fmt.Errorf("Error resolving image name '%s': %s", imageName, err) } diff --git a/mmv1/third_party/validator/compute_security_policy.go b/mmv1/third_party/validator/compute_security_policy.go index b3aced1fbad5..8df3035687b4 100644 --- a/mmv1/third_party/validator/compute_security_policy.go +++ b/mmv1/third_party/validator/compute_security_policy.go @@ -10,23 +10,23 @@ import ( const ComputeSecurityPolicyAssetType string = "compute.googleapis.com/SecurityPolicy" -func resourceConverterComputeSecurityPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterComputeSecurityPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ComputeSecurityPolicyAssetType, Convert: GetComputeSecurityPolicyCaiObject, } } -func GetComputeSecurityPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//compute.googleapis.com/projects/{{project}}/global/securityPolicies/{{name}}") +func GetComputeSecurityPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//compute.googleapis.com/projects/{{project}}/global/securityPolicies/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetComputeSecurityPolicyApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ComputeSecurityPolicyAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "SecurityPolicy", @@ -34,7 +34,7 @@ func GetComputeSecurityPolicyCaiObject(d tpgresource.TerraformResourceData, conf }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/constants.go b/mmv1/third_party/validator/constants.go index 642aef4e3ca1..c1be1f782b54 100644 --- a/mmv1/third_party/validator/constants.go +++ b/mmv1/third_party/validator/constants.go @@ -1,8 +1,7 @@ package google import ( - "errors" - + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) @@ -11,16 +10,16 @@ import ( // because of the current state of the system. // Example: The conversion requires that the resource has already been created // and is now being updated). -var ErrNoConversion = errors.New("no conversion") +var ErrNoConversion = tpgresource.ErrNoConversion // ErrEmptyIdentityField can be returned when fetching a resource is not possible // due to the identity field of that resource returning empty. -var ErrEmptyIdentityField = errors.New("empty identity field") +var ErrEmptyIdentityField = tpgresource.ErrEmptyIdentityField // ErrResourceInaccessible can be returned when fetching an IAM resource // on a project that has not yet been created or if the service account // lacks sufficient permissions -var ErrResourceInaccessible = errors.New("resource does not exist or service account is lacking sufficient permissions") +var ErrResourceInaccessible = tpgresource.ErrResourceInaccessible // Global MutexKV // diff --git a/mmv1/third_party/validator/container.go b/mmv1/third_party/validator/container.go index 8a992ad66aed..f13d020f785c 100644 --- a/mmv1/third_party/validator/container.go +++ b/mmv1/third_party/validator/container.go @@ -18,15 +18,15 @@ import ( const ContainerClusterAssetType string = "container.googleapis.com/Cluster" const ContainerNodePoolAssetType string = "container.googleapis.com/NodePool" -func resourceConverterContainerCluster() ResourceConverter { - return ResourceConverter{ +func resourceConverterContainerCluster() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ContainerClusterAssetType, Convert: GetContainerClusterCaiObject, } } -func resourceConverterContainerNodePool() ResourceConverter { - return ResourceConverter{ +func resourceConverterContainerNodePool() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ContainerNodePoolAssetType, Convert: GetContainerNodePoolCaiObject, } @@ -102,16 +102,16 @@ func expandContainerNodePoolNodeConfigOauthScopes(v interface{}, d tpgresource.T return canonicalizeServiceScopesFromSet(scopesSet) } -func GetContainerClusterCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//container.googleapis.com/projects/{{project}}/locations/{{location}}/clusters/{{name}}") +func GetContainerClusterCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//container.googleapis.com/projects/{{project}}/locations/{{location}}/clusters/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetContainerClusterApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ContainerClusterAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/container/v1/rest", DiscoveryName: "Cluster", @@ -119,7 +119,7 @@ func GetContainerClusterCaiObject(d tpgresource.TerraformResourceData, config *t }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } @@ -1178,16 +1178,16 @@ func expandContainerClusterKubectlContext(v interface{}, d tpgresource.Terraform return v, nil } -func GetContainerNodePoolCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//container.googleapis.com/projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/nodePools/{{name}}") +func GetContainerNodePoolCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//container.googleapis.com/projects/{{project}}/locations/{{location}}/clusters/{{cluster}}/nodePools/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetContainerNodePoolApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ContainerNodePoolAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/container/v1/rest", DiscoveryName: "NodePool", @@ -1195,7 +1195,7 @@ func GetContainerNodePoolCaiObject(d tpgresource.TerraformResourceData, config * }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/folder.go b/mmv1/third_party/validator/folder.go index ae5ffecd2881..5e8be0886bf7 100644 --- a/mmv1/third_party/validator/folder.go +++ b/mmv1/third_party/validator/folder.go @@ -7,25 +7,25 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterFolder() ResourceConverter { - return ResourceConverter{ +func resourceConverterFolder() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Folder", Convert: GetFolderCaiObject, } } -func GetFolderCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/folders/{{folder_id}}") +func GetFolderCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/folders/{{folder_id}}") if err != nil { - return []Asset{}, nil + return []tpgresource.Asset{}, nil } if obj, err := GetFolderApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Folder", - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Folder", @@ -33,13 +33,13 @@ func GetFolderCaiObject(d tpgresource.TerraformResourceData, config *transport_t }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } func GetFolderApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { - folder := &Folder{ + folder := &tpgresource.Folder{ Name: d.Get("name").(string), Parent: d.Get("parent").(string), DisplayName: d.Get("display_name").(string), @@ -50,15 +50,15 @@ func GetFolderApiObject(d tpgresource.TerraformResourceData, config *transport_t folder.CreateTime = constructTime(v.(string)) } - return jsonMap(folder) + return tpgresource.JsonMap(folder) } -func constructTime(create_time string) *Timestamp { +func constructTime(create_time string) *tpgresource.Timestamp { if create_time == "" { - return &Timestamp{} + return &tpgresource.Timestamp{} } t, _ := time.Parse(time.RFC3339, create_time) - return &Timestamp{ + return &tpgresource.Timestamp{ Seconds: t.Unix(), Nanos: t.UnixNano(), } diff --git a/mmv1/third_party/validator/folder_iam.go b/mmv1/third_party/validator/folder_iam.go index de50ab023526..3f3883cd6896 100644 --- a/mmv1/third_party/validator/folder_iam.go +++ b/mmv1/third_party/validator/folder_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterFolderIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterFolderIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Folder", Convert: GetFolderIamPolicyCaiObject, MergeCreateUpdate: MergeFolderIamPolicy, } } -func resourceConverterFolderIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterFolderIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Folder", Convert: GetFolderIamBindingCaiObject, FetchFullResource: FetchFolderIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterFolderIamBinding() ResourceConverter { } } -func resourceConverterFolderIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterFolderIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Folder", Convert: GetFolderIamMemberCaiObject, FetchFullResource: FetchFolderIamPolicy, @@ -35,70 +36,70 @@ func resourceConverterFolderIamMember() ResourceConverter { } } -func GetFolderIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newFolderIamAsset(d, config, expandIamPolicyBindings) +func GetFolderIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newFolderIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetFolderIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newFolderIamAsset(d, config, expandIamRoleBindings) +func GetFolderIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newFolderIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetFolderIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newFolderIamAsset(d, config, expandIamMemberBindings) +func GetFolderIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newFolderIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeFolderIamPolicy(existing, incoming Asset) Asset { +func MergeFolderIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeFolderIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeFolderIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeFolderIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeFolderIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeFolderIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeFolderIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeFolderIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeFolderIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newFolderIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } // The "folder" argument is of the form "folders/12345" - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/{{folder}}") + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/{{folder}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Folder", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchFolderIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchFolderIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { if _, ok := d.GetOk("folder"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( NewFolderIamUpdater, d, config, diff --git a/mmv1/third_party/validator/folder_organization_policy.go b/mmv1/third_party/validator/folder_organization_policy.go index 18df9bd5cecd..01608319a596 100644 --- a/mmv1/third_party/validator/folder_organization_policy.go +++ b/mmv1/third_party/validator/folder_organization_policy.go @@ -5,48 +5,48 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterFolderOrgPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterFolderOrgPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Folder", Convert: GetFolderOrgPolicyCaiObject, MergeCreateUpdate: MergeFolderOrgPolicy, } } -func GetFolderOrgPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/{{folder}}") +func GetFolderOrgPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/{{folder}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetFolderOrgPolicyApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Folder", - OrgPolicy: []*OrgPolicy{&obj}, + OrgPolicy: []*tpgresource.OrgPolicy{&obj}, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } -func MergeFolderOrgPolicy(existing, incoming Asset) Asset { +func MergeFolderOrgPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.OrgPolicy = append(existing.OrgPolicy, incoming.OrgPolicy...) return existing } -func GetFolderOrgPolicyApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (OrgPolicy, error) { +func GetFolderOrgPolicyApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.OrgPolicy, error) { listPolicy, err := expandListOrganizationPolicy(d.Get("list_policy").([]interface{})) if err != nil { - return OrgPolicy{}, err + return tpgresource.OrgPolicy{}, err } restoreDefault, err := expandRestoreOrganizationPolicy(d.Get("restore_policy").([]interface{})) if err != nil { - return OrgPolicy{}, err + return tpgresource.OrgPolicy{}, err } - policy := OrgPolicy{ + policy := tpgresource.OrgPolicy{ Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), ListPolicy: listPolicy, diff --git a/mmv1/third_party/validator/iam_helpers.go b/mmv1/third_party/validator/iam_helpers.go index a1bf54bd6c32..2ac5d987d8e9 100644 --- a/mmv1/third_party/validator/iam_helpers.go +++ b/mmv1/third_party/validator/iam_helpers.go @@ -1,199 +1,72 @@ package google import ( - "encoding/json" - "fmt" - "sort" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - cloudresourcemanager "google.golang.org/api/cloudresourcemanager/v1" ) -// expandIamPolicyBindings is used in google__iam_policy resources. -func expandIamPolicyBindings(d tpgresource.TerraformResourceData) ([]IAMBinding, error) { - ps := d.Get("policy_data").(string) - var bindings []IAMBinding - // policy_data is (known after apply) in terraform plan, hence an empty string - if ps == "" { - return bindings, nil - } - // The policy string is just a marshaled cloudresourcemanager.Policy. - policy := &cloudresourcemanager.Policy{} - if err := json.Unmarshal([]byte(ps), policy); err != nil { - return nil, fmt.Errorf("Could not unmarshal %s: %v", ps, err) - } - - for _, b := range policy.Bindings { - bindings = append(bindings, IAMBinding{ - Role: b.Role, - Members: b.Members, - }) - } - - return bindings, nil +// ExpandIamPolicyBindings is used in google__iam_policy resources. +func expandIamPolicyBindings(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error) { + return tpgiamresource.ExpandIamPolicyBindings(d) } -// expandIamRoleBindings is used in google__iam_binding resources. -func expandIamRoleBindings(d tpgresource.TerraformResourceData) ([]IAMBinding, error) { - var members []string - for _, m := range d.Get("members").(*schema.Set).List() { - members = append(members, m.(string)) - } - return []IAMBinding{ - { - Role: d.Get("role").(string), - Members: members, - }, - }, nil +// ExpandIamRoleBindings is used in google__iam_binding resources. +func expandIamRoleBindings(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error) { + return tpgiamresource.ExpandIamRoleBindings(d) } -// expandIamMemberBindings is used in google__iam_member resources. -func expandIamMemberBindings(d tpgresource.TerraformResourceData) ([]IAMBinding, error) { - return []IAMBinding{ - { - Role: d.Get("role").(string), - Members: []string{d.Get("member").(string)}, - }, - }, nil +// ExpandIamMemberBindings is used in google__iam_member resources. +func expandIamMemberBindings(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error) { + return tpgiamresource.ExpandIamMemberBindings(d) } -// mergeIamAssets merges an existing asset with the IAM bindings of an incoming -// Asset. +// MergeIamAssets merges an existing asset with the IAM bindings of an incoming +// tpgresource.Asset. func mergeIamAssets( - existing, incoming Asset, - MergeBindings func(existing, incoming []IAMBinding) []IAMBinding, -) Asset { - if existing.IAMPolicy != nil { - existing.IAMPolicy.Bindings = MergeBindings(existing.IAMPolicy.Bindings, incoming.IAMPolicy.Bindings) - } else { - existing.IAMPolicy = incoming.IAMPolicy - } - return existing + existing, incoming tpgresource.Asset, + MergeBindings func(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding, +) tpgresource.Asset { + return tpgiamresource.MergeIamAssets( + existing, incoming, + MergeBindings, + ) } // incoming is the last known state of an asset prior to deletion func mergeDeleteIamAssets( - existing, incoming Asset, - MergeBindings func(existing, incoming []IAMBinding) []IAMBinding, -) Asset { - if existing.IAMPolicy != nil { - existing.IAMPolicy.Bindings = MergeBindings(existing.IAMPolicy.Bindings, incoming.IAMPolicy.Bindings) - } - return existing + existing, incoming tpgresource.Asset, + MergeBindings func(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding, +) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets( + existing, incoming, + MergeBindings, + ) } -// mergeAdditiveBindings adds members to bindings with the same roles and adds new +// MergeAdditiveBindings adds members to bindings with the same roles and adds new // bindings for roles that dont exist. -func mergeAdditiveBindings(existing, incoming []IAMBinding) []IAMBinding { - existingIdxs := make(map[string]int) - for i, binding := range existing { - existingIdxs[binding.Role] = i - } - - for _, binding := range incoming { - if ei, ok := existingIdxs[binding.Role]; ok { - memberExists := make(map[string]bool) - for _, m := range existing[ei].Members { - memberExists[m] = true - } - for _, m := range binding.Members { - // Only add members that don't exist. - if !memberExists[m] { - existing[ei].Members = append(existing[ei].Members, m) - } - } - } else { - existing = append(existing, binding) - } - } - - // Sort members - for i := range existing { - sort.Strings(existing[i].Members) - } - - return existing +func mergeAdditiveBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + return tpgiamresource.MergeAdditiveBindings(existing, incoming) } -// mergeDeleteAdditiveBindings eliminates listed members from roles in the +// MergeDeleteAdditiveBindings eliminates listed members from roles in the // existing list. incoming is the last known state of the bindings being deleted. -func mergeDeleteAdditiveBindings(existing, incoming []IAMBinding) []IAMBinding { - toDelete := make(map[string]struct{}) - for _, binding := range incoming { - for _, m := range binding.Members { - key := binding.Role + "-" + m - toDelete[key] = struct{}{} - } - } - - var newExisting []IAMBinding - for _, binding := range existing { - var newMembers []string - for _, m := range binding.Members { - key := binding.Role + "-" + m - _, delete := toDelete[key] - if !delete { - newMembers = append(newMembers, m) - } - } - if newMembers != nil { - newExisting = append(newExisting, IAMBinding{ - Role: binding.Role, - Members: newMembers, - }) - } - } - - return newExisting +func mergeDeleteAdditiveBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + return tpgiamresource.MergeDeleteAdditiveBindings(existing, incoming) } -// mergeAuthoritativeBindings clobbers members to bindings with the same roles +// MergeAuthoritativeBindings clobbers members to bindings with the same roles // and adds new bindings for roles that dont exist. -func mergeAuthoritativeBindings(existing, incoming []IAMBinding) []IAMBinding { - existingIdxs := make(map[string]int) - for i, binding := range existing { - existingIdxs[binding.Role] = i - } - - for _, binding := range incoming { - if ei, ok := existingIdxs[binding.Role]; ok { - existing[ei].Members = binding.Members - } else { - existing = append(existing, binding) - } - } - - // Sort members - for i := range existing { - sort.Strings(existing[i].Members) - } - - return existing +func mergeAuthoritativeBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + return tpgiamresource.MergeAuthoritativeBindings(existing, incoming) } -// mergeDeleteAuthoritativeBindings eliminates any bindings with matching roles +// MergeDeleteAuthoritativeBindings eliminates any bindings with matching roles // in the existing list. incoming is the last known state of the bindings being // deleted. -func mergeDeleteAuthoritativeBindings(existing, incoming []IAMBinding) []IAMBinding { - toDelete := make(map[string]struct{}) - for _, binding := range incoming { - key := binding.Role - toDelete[key] = struct{}{} - } - - var newExisting []IAMBinding - for _, binding := range existing { - key := binding.Role - _, delete := toDelete[key] - if !delete { - newExisting = append(newExisting, binding) - } - } - - return newExisting +func mergeDeleteAuthoritativeBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + return tpgiamresource.MergeDeleteAuthoritativeBindings(existing, incoming) } func fetchIamPolicy( @@ -202,39 +75,12 @@ func fetchIamPolicy( config *transport_tpg.Config, assetNameTmpl string, assetType string, -) (Asset, error) { - updater, err := newUpdaterFunc(d, config) - if err != nil { - return Asset{}, err - } - - iamPolicy, err := updater.GetResourceIamPolicy() - if transport_tpg.IsGoogleApiErrorWithCode(err, 403) || transport_tpg.IsGoogleApiErrorWithCode(err, 404) { - return Asset{}, ErrResourceInaccessible - } - - if err != nil { - return Asset{}, err - } - - var bindings []IAMBinding - for _, b := range iamPolicy.Bindings { - bindings = append( - bindings, - IAMBinding{ - Role: b.Role, - Members: b.Members, - }, - ) - } - - name, err := assetName(d, config, assetNameTmpl) - - return Asset{ - Name: name, - Type: assetType, - IAMPolicy: &IAMPolicy{ - Bindings: bindings, - }, - }, nil +) (tpgresource.Asset, error) { + return tpgiamresource.FetchIamPolicy( + newUpdaterFunc, + d, + config, + assetNameTmpl, + assetType, + ) } diff --git a/mmv1/third_party/validator/json_map.go b/mmv1/third_party/validator/json_map.go index 3f6ee4e0f618..0ccaec521c21 100644 --- a/mmv1/third_party/validator/json_map.go +++ b/mmv1/third_party/validator/json_map.go @@ -1,22 +1,11 @@ package google import ( - "encoding/json" - "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" ) -// jsonMap converts a given value to a map[string]interface{} that +// JsonMap converts a given value to a map[string]interface{} that // matches its JSON format. func jsonMap(x interface{}) (map[string]interface{}, error) { - jsn, err := json.Marshal(x) - if err != nil { - return nil, fmt.Errorf("marshalling: %v", err) - } - - m := make(map[string]interface{}) - if err := json.Unmarshal(jsn, &m); err != nil { - return nil, fmt.Errorf("unmarshalling: %v", err) - } - - return m, nil + return tpgresource.JsonMap(x) } diff --git a/mmv1/third_party/validator/kms_crypto_key_iam.go b/mmv1/third_party/validator/kms_crypto_key_iam.go index d73e73a13bab..3e9b09b638d6 100644 --- a/mmv1/third_party/validator/kms_crypto_key_iam.go +++ b/mmv1/third_party/validator/kms_crypto_key_iam.go @@ -4,20 +4,22 @@ import ( "fmt" "strings" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/services/kms" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterKmsCryptoKeyIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterKmsCryptoKeyIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudkms.googleapis.com/CryptoKey", Convert: GetKmsCryptoKeyIamPolicyCaiObject, MergeCreateUpdate: MergeKmsCryptoKeyIamPolicy, } } -func resourceConverterKmsCryptoKeyIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterKmsCryptoKeyIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudkms.googleapis.com/CryptoKey", Convert: GetKmsCryptoKeyIamBindingCaiObject, FetchFullResource: FetchKmsCryptoKeyIamPolicy, @@ -26,8 +28,8 @@ func resourceConverterKmsCryptoKeyIamBinding() ResourceConverter { } } -func resourceConverterKmsCryptoKeyIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterKmsCryptoKeyIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudkms.googleapis.com/CryptoKey", Convert: GetKmsCryptoKeyIamMemberCaiObject, FetchFullResource: FetchKmsCryptoKeyIamPolicy, @@ -36,75 +38,75 @@ func resourceConverterKmsCryptoKeyIamMember() ResourceConverter { } } -func GetKmsCryptoKeyIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newKmsCryptoKeyIamAsset(d, config, expandIamPolicyBindings) +func GetKmsCryptoKeyIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newKmsCryptoKeyIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetKmsCryptoKeyIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newKmsCryptoKeyIamAsset(d, config, expandIamRoleBindings) +func GetKmsCryptoKeyIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newKmsCryptoKeyIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetKmsCryptoKeyIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newKmsCryptoKeyIamAsset(d, config, expandIamMemberBindings) +func GetKmsCryptoKeyIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newKmsCryptoKeyIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeKmsCryptoKeyIamPolicy(existing, incoming Asset) Asset { +func MergeKmsCryptoKeyIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeKmsCryptoKeyIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeKmsCryptoKeyIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeKmsCryptoKeyIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeKmsCryptoKeyIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeKmsCryptoKeyIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeKmsCryptoKeyIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeKmsCryptoKeyIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeKmsCryptoKeyIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newKmsCryptoKeyIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } assetNameTemplate := constructAssetNameTemplate(d) - name, err := assetName(d, config, assetNameTemplate) + name, err := tpgresource.AssetName(d, config, assetNameTemplate) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudkms.googleapis.com/CryptoKey", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchKmsCryptoKeyIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchKmsCryptoKeyIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("crypto_key_id"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } assetNameTemplate := constructAssetNameTemplate(d) // We use crypto_key_id in the asset name template to be consistent with newKmsCryptoKeyIamAsset. - return fetchIamPolicy( - NewKmsCryptoKeyIamUpdater, + return tpgiamresource.FetchIamPolicy( + kms.NewKmsCryptoKeyIamUpdater, d, config, assetNameTemplate, // asset name diff --git a/mmv1/third_party/validator/kms_key_ring_iam.go b/mmv1/third_party/validator/kms_key_ring_iam.go index 400e1dac0ebc..bc12aab93d30 100644 --- a/mmv1/third_party/validator/kms_key_ring_iam.go +++ b/mmv1/third_party/validator/kms_key_ring_iam.go @@ -4,20 +4,22 @@ import ( "fmt" "strings" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/services/kms" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterKmsKeyRingIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterKmsKeyRingIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudkms.googleapis.com/KeyRing", Convert: GetKmsKeyRingIamPolicyCaiObject, MergeCreateUpdate: MergeKmsKeyRingIamPolicy, } } -func resourceConverterKmsKeyRingIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterKmsKeyRingIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudkms.googleapis.com/KeyRing", Convert: GetKmsKeyRingIamBindingCaiObject, FetchFullResource: FetchKmsKeyRingIamPolicy, @@ -26,8 +28,8 @@ func resourceConverterKmsKeyRingIamBinding() ResourceConverter { } } -func resourceConverterKmsKeyRingIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterKmsKeyRingIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudkms.googleapis.com/KeyRing", Convert: GetKmsKeyRingIamMemberCaiObject, FetchFullResource: FetchKmsKeyRingIamPolicy, @@ -36,75 +38,75 @@ func resourceConverterKmsKeyRingIamMember() ResourceConverter { } } -func GetKmsKeyRingIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newKmsKeyRingIamAsset(d, config, expandIamPolicyBindings) +func GetKmsKeyRingIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newKmsKeyRingIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetKmsKeyRingIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newKmsKeyRingIamAsset(d, config, expandIamRoleBindings) +func GetKmsKeyRingIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newKmsKeyRingIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetKmsKeyRingIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newKmsKeyRingIamAsset(d, config, expandIamMemberBindings) +func GetKmsKeyRingIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newKmsKeyRingIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeKmsKeyRingIamPolicy(existing, incoming Asset) Asset { +func MergeKmsKeyRingIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeKmsKeyRingIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeKmsKeyRingIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeKmsKeyRingIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeKmsKeyRingIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeKmsKeyRingIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeKmsKeyRingIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeKmsKeyRingIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeKmsKeyRingIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newKmsKeyRingIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } assetNameTemplate := constructKmsKeyRingIAMAssetNameTemplate(d) - name, err := assetName(d, config, assetNameTemplate) + name, err := tpgresource.AssetName(d, config, assetNameTemplate) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudkms.googleapis.com/KeyRing", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchKmsKeyRingIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchKmsKeyRingIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("key_ring_id"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } assetNameTemplate := constructKmsKeyRingIAMAssetNameTemplate(d) // We use key_ring_id in the asset name template to be consistent with newKmsKeyRingIamAsset. - return fetchIamPolicy( - NewKmsKeyRingIamUpdater, + return tpgiamresource.FetchIamPolicy( + kms.NewKmsKeyRingIamUpdater, d, config, assetNameTemplate, // asset name diff --git a/mmv1/third_party/validator/monitoring_slo_helper.go b/mmv1/third_party/validator/monitoring_slo_helper.go index 959fe2b08e22..736af0795866 100644 --- a/mmv1/third_party/validator/monitoring_slo_helper.go +++ b/mmv1/third_party/validator/monitoring_slo_helper.go @@ -1,4 +1,4 @@ -package google +package monitoring import ( "fmt" diff --git a/mmv1/third_party/validator/org_policy_policy.go b/mmv1/third_party/validator/org_policy_policy.go index e811d47eca17..b28ea37b7339 100644 --- a/mmv1/third_party/validator/org_policy_policy.go +++ b/mmv1/third_party/validator/org_policy_policy.go @@ -8,49 +8,49 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterOrgPolicyPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterOrgPolicyPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ Convert: GetV2OrgPoliciesCaiObject, MergeCreateUpdate: MergeV2OrgPolicies, } } -func GetV2OrgPoliciesCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { +func GetV2OrgPoliciesCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { assetNamePattern, assetType, err := getAssetNameAndTypeFromParent(d.Get("parent").(string)) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - name, err := assetName(d, config, assetNamePattern) + name, err := tpgresource.AssetName(d, config, assetNamePattern) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetV2OrgPoliciesApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: assetType, - V2OrgPolicies: []*V2OrgPolicies{&obj}, + V2OrgPolicies: []*tpgresource.V2OrgPolicies{&obj}, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } -func GetV2OrgPoliciesApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (V2OrgPolicies, error) { +func GetV2OrgPoliciesApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.V2OrgPolicies, error) { spec, err := expandSpecV2OrgPolicies(d.Get("spec").([]interface{})) if err != nil { - return V2OrgPolicies{}, err + return tpgresource.V2OrgPolicies{}, err } - return V2OrgPolicies{ + return tpgresource.V2OrgPolicies{ Name: d.Get("name").(string), PolicySpec: spec, }, nil } -func MergeV2OrgPolicies(existing, incoming Asset) Asset { +func MergeV2OrgPolicies(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.Resource = incoming.Resource return existing } @@ -68,7 +68,7 @@ func getAssetNameAndTypeFromParent(parent string) (assetName string, assetType s } } -func expandSpecV2OrgPolicies(configured []interface{}) (*PolicySpec, error) { +func expandSpecV2OrgPolicies(configured []interface{}) (*tpgresource.PolicySpec, error) { if len(configured) == 0 || configured[0] == nil { return nil, nil } @@ -77,10 +77,10 @@ func expandSpecV2OrgPolicies(configured []interface{}) (*PolicySpec, error) { policyRules, err := expandPolicyRulesSpec(specMap["rules"].([]interface{})) if err != nil { - return &PolicySpec{}, err + return &tpgresource.PolicySpec{}, err } - return &PolicySpec{ + return &tpgresource.PolicySpec{ Etag: specMap["etag"].(string), PolicyRules: policyRules, InheritFromParent: specMap["inherit_from_parent"].(bool), @@ -89,12 +89,12 @@ func expandSpecV2OrgPolicies(configured []interface{}) (*PolicySpec, error) { } -func expandPolicyRulesSpec(configured []interface{}) ([]*PolicyRule, error) { +func expandPolicyRulesSpec(configured []interface{}) ([]*tpgresource.PolicyRule, error) { if len(configured) == 0 || configured[0] == nil { return nil, nil } - var policyRules []*PolicyRule + var policyRules []*tpgresource.PolicyRule for i := 0; i < len(configured); i++ { policyRule, err := expandPolicyRulePolicyRules(configured[i]) if err != nil { @@ -107,34 +107,34 @@ func expandPolicyRulesSpec(configured []interface{}) ([]*PolicyRule, error) { } -func expandPolicyRulePolicyRules(configured interface{}) (*PolicyRule, error) { +func expandPolicyRulePolicyRules(configured interface{}) (*tpgresource.PolicyRule, error) { policyRuleMap := configured.(map[string]interface{}) values, err := expandValuesPolicyRule(policyRuleMap["values"].([]interface{})) if err != nil { - return &PolicyRule{}, err + return &tpgresource.PolicyRule{}, err } allowAll, err := convertStringToBool(policyRuleMap["allow_all"].(string)) if err != nil { - return &PolicyRule{}, err + return &tpgresource.PolicyRule{}, err } denyAll, err := convertStringToBool(policyRuleMap["deny_all"].(string)) if err != nil { - return &PolicyRule{}, err + return &tpgresource.PolicyRule{}, err } enforce, err := convertStringToBool(policyRuleMap["enforce"].(string)) if err != nil { - return &PolicyRule{}, err + return &tpgresource.PolicyRule{}, err } condition, err := expandConditionPolicyRule(policyRuleMap["condition"].([]interface{})) if err != nil { - return &PolicyRule{}, err + return &tpgresource.PolicyRule{}, err } - return &PolicyRule{ + return &tpgresource.PolicyRule{ Values: values, AllowAll: allowAll, DenyAll: denyAll, @@ -143,23 +143,23 @@ func expandPolicyRulePolicyRules(configured interface{}) (*PolicyRule, error) { }, nil } -func expandValuesPolicyRule(configured []interface{}) (*StringValues, error) { +func expandValuesPolicyRule(configured []interface{}) (*tpgresource.StringValues, error) { if len(configured) == 0 || configured[0] == nil { return nil, nil } valuesMap := configured[0].(map[string]interface{}) - return &StringValues{ + return &tpgresource.StringValues{ AllowedValues: convertInterfaceToStringArray(valuesMap["allowed_values"].([]interface{})), DeniedValues: convertInterfaceToStringArray(valuesMap["denied_values"].([]interface{})), }, nil } -func expandConditionPolicyRule(configured []interface{}) (*Expr, error) { +func expandConditionPolicyRule(configured []interface{}) (*tpgresource.Expr, error) { if len(configured) == 0 || configured[0] == nil { return nil, nil } conditionMap := configured[0].(map[string]interface{}) - return &Expr{ + return &tpgresource.Expr{ Expression: conditionMap["expression"].(string), Title: conditionMap["title"].(string), Description: conditionMap["description"].(string), diff --git a/mmv1/third_party/validator/organization_iam.go b/mmv1/third_party/validator/organization_iam.go index 6739612b15da..3f7e4e59f925 100644 --- a/mmv1/third_party/validator/organization_iam.go +++ b/mmv1/third_party/validator/organization_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterOrganizationIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterOrganizationIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Organization", Convert: GetOrganizationIamPolicyCaiObject, MergeCreateUpdate: MergeOrganizationIamPolicy, } } -func resourceConverterOrganizationIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterOrganizationIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Organization", Convert: GetOrganizationIamBindingCaiObject, FetchFullResource: FetchOrganizationIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterOrganizationIamBinding() ResourceConverter { } } -func resourceConverterOrganizationIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterOrganizationIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Organization", Convert: GetOrganizationIamMemberCaiObject, FetchFullResource: FetchOrganizationIamPolicy, @@ -35,65 +36,65 @@ func resourceConverterOrganizationIamMember() ResourceConverter { } } -func GetOrganizationIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newOrganizationIamAsset(d, config, expandIamPolicyBindings) +func GetOrganizationIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newOrganizationIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetOrganizationIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newOrganizationIamAsset(d, config, expandIamRoleBindings) +func GetOrganizationIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newOrganizationIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetOrganizationIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newOrganizationIamAsset(d, config, expandIamMemberBindings) +func GetOrganizationIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newOrganizationIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeOrganizationIamPolicy(existing, incoming Asset) Asset { +func MergeOrganizationIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeOrganizationIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeOrganizationIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeOrganizationIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeOrganizationIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeOrganizationIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeOrganizationIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeOrganizationIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeOrganizationIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newOrganizationIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/organizations/{{org_id}}") + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/organizations/{{org_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Organization", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchOrganizationIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { - return fetchIamPolicy( +func FetchOrganizationIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { + return tpgiamresource.FetchIamPolicy( NewOrganizationIamUpdater, d, config, diff --git a/mmv1/third_party/validator/organization_iam_custom_role.go b/mmv1/third_party/validator/organization_iam_custom_role.go index b3cab1069e45..5eb6e1aaddab 100644 --- a/mmv1/third_party/validator/organization_iam_custom_role.go +++ b/mmv1/third_party/validator/organization_iam_custom_role.go @@ -11,23 +11,23 @@ import ( const OrganizationIAMCustomRoleAssetType string = "iam.googleapis.com/Role" -func resourceConverterOrganizationIAMCustomRole() ResourceConverter { - return ResourceConverter{ +func resourceConverterOrganizationIAMCustomRole() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: OrganizationIAMCustomRoleAssetType, Convert: GetOrganizationIAMCustomRoleCaiObject, } } -func GetOrganizationIAMCustomRoleCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//iam.googleapis.com/organizations/{{org_id}}/roles/{{role_id}}") +func GetOrganizationIAMCustomRoleCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//iam.googleapis.com/organizations/{{org_id}}/roles/{{role_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetOrganizationIAMCustomRoleApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: OrganizationIAMCustomRoleAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://iam.googleapis.com/$discovery/rest?version=v1", DiscoveryName: "Role", @@ -35,7 +35,7 @@ func GetOrganizationIAMCustomRoleCaiObject(d tpgresource.TerraformResourceData, }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/organization_policy.go b/mmv1/third_party/validator/organization_policy.go index e9676b608fa4..89763b8a00ad 100644 --- a/mmv1/third_party/validator/organization_policy.go +++ b/mmv1/third_party/validator/organization_policy.go @@ -5,48 +5,48 @@ import ( transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterOrganizationPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterOrganizationPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Organization", Convert: GetOrganizationPolicyCaiObject, MergeCreateUpdate: MergeOrganizationPolicy, } } -func GetOrganizationPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/organizations/{{org_id}}") +func GetOrganizationPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/organizations/{{org_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetOrganizationPolicyApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Organization", - OrgPolicy: []*OrgPolicy{&obj}, + OrgPolicy: []*tpgresource.OrgPolicy{&obj}, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } -func MergeOrganizationPolicy(existing, incoming Asset) Asset { +func MergeOrganizationPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.OrgPolicy = append(existing.OrgPolicy, incoming.OrgPolicy...) return existing } -func GetOrganizationPolicyApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (OrgPolicy, error) { +func GetOrganizationPolicyApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.OrgPolicy, error) { listPolicy, err := expandListOrganizationPolicy(d.Get("list_policy").([]interface{})) if err != nil { - return OrgPolicy{}, err + return tpgresource.OrgPolicy{}, err } restoreDefault, err := expandRestoreOrganizationPolicy(d.Get("restore_policy").([]interface{})) if err != nil { - return OrgPolicy{}, err + return tpgresource.OrgPolicy{}, err } - policy := OrgPolicy{ + policy := tpgresource.OrgPolicy{ Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), ListPolicy: listPolicy, diff --git a/mmv1/third_party/validator/project.go b/mmv1/third_party/validator/project.go index 20a9c8b6cf20..0e89015476dc 100644 --- a/mmv1/third_party/validator/project.go +++ b/mmv1/third_party/validator/project.go @@ -10,22 +10,22 @@ import ( "google.golang.org/api/cloudresourcemanager/v1" ) -func resourceConverterProject() ResourceConverter { - return ResourceConverter{ +func resourceConverterProject() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Project", Convert: GetProjectCaiObject, MergeCreateUpdate: MergeProject, } } -func resourceConverterProjectBillingInfo() ResourceConverter { - return ResourceConverter{ +func resourceConverterProjectBillingInfo() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudbilling.googleapis.com/ProjectBillingInfo", Convert: GetProjectBillingInfoCaiObject, } } -func GetProjectCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { +func GetProjectCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { // use project number if it's available; otherwise, fill in project id so that we // keep the CAI assets apart for different uncreated projects. var linkTmpl string @@ -34,15 +34,15 @@ func GetProjectCaiObject(d tpgresource.TerraformResourceData, config *transport_ } else { linkTmpl = "//cloudresourcemanager.googleapis.com/projects/{{project_id_or_project}}" } - name, err := assetName(d, config, linkTmpl) + name, err := tpgresource.AssetName(d, config, linkTmpl) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetProjectApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Project", - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/compute/v1/rest", DiscoveryName: "Project", @@ -50,7 +50,7 @@ func GetProjectCaiObject(d tpgresource.TerraformResourceData, config *transport_ }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } @@ -70,7 +70,7 @@ func GetProjectApiObject(d tpgresource.TerraformResourceData, config *transport_ project.Labels = tpgresource.ExpandLabels(d) } - return jsonMap(project) + return tpgresource.JsonMap(project) } func getParentResourceId(d tpgresource.TerraformResourceData, p *cloudresourcemanager.Project) error { @@ -98,7 +98,7 @@ func getParentResourceId(d tpgresource.TerraformResourceData, p *cloudresourcema return nil } -func GetProjectBillingInfoCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { +func GetProjectBillingInfoCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { // use project number if it's available; otherwise, fill in project id so that we // keep the CAI assets apart for different uncreated projects. var linkTmpl string @@ -107,16 +107,16 @@ func GetProjectBillingInfoCaiObject(d tpgresource.TerraformResourceData, config } else { linkTmpl = "//cloudbilling.googleapis.com/projects/{{project_id_or_project}}/billingInfo" } - name, err := assetName(d, config, linkTmpl) + name, err := tpgresource.AssetName(d, config, linkTmpl) if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } project := strings.Split(name, "/")[4] if obj, err := GetProjectBillingInfoApiObject(d, project); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudbilling.googleapis.com/ProjectBillingInfo", - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/cloudbilling/v1/rest", DiscoveryName: "ProjectBillingInfo", @@ -124,7 +124,7 @@ func GetProjectBillingInfoCaiObject(d tpgresource.TerraformResourceData, config }}, }, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } @@ -132,7 +132,7 @@ func GetProjectBillingInfoApiObject(d tpgresource.TerraformResourceData, project if _, ok := d.GetOk("billing_account"); !ok { // TODO: If the project already exists, we could ask the API about it's // billing info here. - return nil, ErrNoConversion + return nil, tpgresource.ErrNoConversion } ba := &cloudbilling.ProjectBillingInfo{ @@ -141,10 +141,10 @@ func GetProjectBillingInfoApiObject(d tpgresource.TerraformResourceData, project ProjectId: d.Get("project_id").(string), } - return jsonMap(ba) + return tpgresource.JsonMap(ba) } -func MergeProject(existing, incoming Asset) Asset { +func MergeProject(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.Resource = incoming.Resource return existing } diff --git a/mmv1/third_party/validator/project_iam.go b/mmv1/third_party/validator/project_iam.go index d8214024dd99..5a0fcd6d1332 100644 --- a/mmv1/third_party/validator/project_iam.go +++ b/mmv1/third_party/validator/project_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterProjectIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterProjectIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Project", Convert: GetProjectIamPolicyCaiObject, MergeCreateUpdate: MergeProjectIamPolicy, } } -func resourceConverterProjectIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterProjectIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Project", Convert: GetProjectIamBindingCaiObject, FetchFullResource: FetchProjectIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterProjectIamBinding() ResourceConverter { } } -func resourceConverterProjectIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterProjectIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Project", Convert: GetProjectIamMemberCaiObject, FetchFullResource: FetchProjectIamPolicy, @@ -35,72 +36,72 @@ func resourceConverterProjectIamMember() ResourceConverter { } } -func GetProjectIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newProjectIamAsset(d, config, expandIamPolicyBindings) +func GetProjectIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newProjectIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetProjectIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newProjectIamAsset(d, config, expandIamRoleBindings) +func GetProjectIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newProjectIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetProjectIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newProjectIamAsset(d, config, expandIamMemberBindings) +func GetProjectIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newProjectIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeProjectIamPolicy(existing, incoming Asset) Asset { +func MergeProjectIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeProjectIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeProjectIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeProjectIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeProjectIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeProjectIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeProjectIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeProjectIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeProjectIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newProjectIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } // Ideally we should use project_number, but since that is generated server-side, // we substitute project_id. - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/projects/{{project}}") + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/projects/{{project}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Project", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchProjectIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchProjectIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { if _, ok := d.GetOk("project"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } // We use project_id in the asset name template to be consistent with newProjectIamAsset. - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( NewProjectIamUpdater, d, config, diff --git a/mmv1/third_party/validator/project_iam_custom_role.go b/mmv1/third_party/validator/project_iam_custom_role.go index 2bace691c9ba..ffe35666897b 100644 --- a/mmv1/third_party/validator/project_iam_custom_role.go +++ b/mmv1/third_party/validator/project_iam_custom_role.go @@ -10,23 +10,23 @@ import ( const ProjectIAMCustomRoleAssetType string = "iam.googleapis.com/Role" -func resourceConverterProjectIAMCustomRole() ResourceConverter { - return ResourceConverter{ +func resourceConverterProjectIAMCustomRole() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ProjectIAMCustomRoleAssetType, Convert: GetProjectIAMCustomRoleCaiObject, } } -func GetProjectIAMCustomRoleCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//iam.googleapis.com/projects/{{project}}/roles/{{role_id}}") +func GetProjectIAMCustomRoleCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//iam.googleapis.com/projects/{{project}}/roles/{{role_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetProjectIAMCustomRoleApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ProjectIAMCustomRoleAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://iam.googleapis.com/$discovery/rest?version=v1", DiscoveryName: "Role", @@ -34,7 +34,7 @@ func GetProjectIAMCustomRoleCaiObject(d tpgresource.TerraformResourceData, confi }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/project_organization_policy.go b/mmv1/third_party/validator/project_organization_policy.go index 0e5c560a97e2..13bb227f89e3 100644 --- a/mmv1/third_party/validator/project_organization_policy.go +++ b/mmv1/third_party/validator/project_organization_policy.go @@ -9,48 +9,48 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -func resourceConverterProjectOrgPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterProjectOrgPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "cloudresourcemanager.googleapis.com/Project", Convert: GetProjectOrgPolicyCaiObject, MergeCreateUpdate: MergeProjectOrgPolicy, } } -func GetProjectOrgPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudresourcemanager.googleapis.com/projects/{{project}}") +func GetProjectOrgPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudresourcemanager.googleapis.com/projects/{{project}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetProjectOrgPolicyApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "cloudresourcemanager.googleapis.com/Project", - OrgPolicy: []*OrgPolicy{&obj}, + OrgPolicy: []*tpgresource.OrgPolicy{&obj}, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } -func MergeProjectOrgPolicy(existing, incoming Asset) Asset { +func MergeProjectOrgPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.OrgPolicy = append(existing.OrgPolicy, incoming.OrgPolicy...) return existing } -func GetProjectOrgPolicyApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (OrgPolicy, error) { +func GetProjectOrgPolicyApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.OrgPolicy, error) { listPolicy, err := expandListOrganizationPolicy(d.Get("list_policy").([]interface{})) if err != nil { - return OrgPolicy{}, err + return tpgresource.OrgPolicy{}, err } restoreDefault, err := expandRestoreOrganizationPolicy(d.Get("restore_policy").([]interface{})) if err != nil { - return OrgPolicy{}, err + return tpgresource.OrgPolicy{}, err } - policy := OrgPolicy{ + policy := tpgresource.OrgPolicy{ Constraint: canonicalOrgPolicyConstraint(d.Get("constraint").(string)), BooleanPolicy: expandBooleanOrganizationPolicy(d.Get("boolean_policy").([]interface{})), ListPolicy: listPolicy, @@ -60,7 +60,7 @@ func GetProjectOrgPolicyApiObject(d tpgresource.TerraformResourceData, config *t return policy, nil } -func expandListOrganizationPolicy(configured []interface{}) (*ListPolicy, error) { +func expandListOrganizationPolicy(configured []interface{}) (*tpgresource.ListPolicy, error) { if len(configured) == 0 || configured[0] == nil { return nil, nil } @@ -98,8 +98,8 @@ func expandListOrganizationPolicy(configured []interface{}) (*ListPolicy, error) } listPolicy := configured[0].(map[string]interface{}) - return &ListPolicy{ - AllValues: ListPolicyAllValues(allValues), + return &tpgresource.ListPolicy{ + AllValues: tpgresource.ListPolicyAllValues(allValues), AllowedValues: allowedValues, DeniedValues: deniedValues, SuggestedValue: listPolicy["suggested_value"].(string), @@ -107,7 +107,7 @@ func expandListOrganizationPolicy(configured []interface{}) (*ListPolicy, error) }, nil } -func expandRestoreOrganizationPolicy(configured []interface{}) (*RestoreDefault, error) { +func expandRestoreOrganizationPolicy(configured []interface{}) (*tpgresource.RestoreDefault, error) { if len(configured) == 0 || configured[0] == nil { return nil, nil } @@ -116,19 +116,19 @@ func expandRestoreOrganizationPolicy(configured []interface{}) (*RestoreDefault, defaultValue := restoreDefaultMap["default"].(bool) if defaultValue { - return &RestoreDefault{}, nil + return &tpgresource.RestoreDefault{}, nil } - return &RestoreDefault{}, fmt.Errorf("Invalid value for restore_policy. Expecting default = true") + return &tpgresource.RestoreDefault{}, fmt.Errorf("Invalid value for restore_policy. Expecting default = true") } -func expandBooleanOrganizationPolicy(configured []interface{}) *BooleanPolicy { +func expandBooleanOrganizationPolicy(configured []interface{}) *tpgresource.BooleanPolicy { if len(configured) == 0 || configured[0] == nil { return nil } booleanPolicy := configured[0].(map[string]interface{}) - return &BooleanPolicy{ + return &tpgresource.BooleanPolicy{ Enforced: booleanPolicy["enforced"].(bool), } } diff --git a/mmv1/third_party/validator/project_service.go b/mmv1/third_party/validator/project_service.go index ca6aa832c57b..ffbac397cf6b 100644 --- a/mmv1/third_party/validator/project_service.go +++ b/mmv1/third_party/validator/project_service.go @@ -10,23 +10,23 @@ import ( const ServiceUsageAssetType string = "serviceusage.googleapis.com/Service" -func resourceConverterServiceUsage() ResourceConverter { - return ResourceConverter{ +func resourceConverterServiceUsage() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ServiceUsageAssetType, Convert: GetServiceUsageCaiObject, } } -func GetServiceUsageCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//serviceusage.googleapis.com/projects/{{project}}/services/{{service}}") +func GetServiceUsageCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//serviceusage.googleapis.com/projects/{{project}}/services/{{service}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetServiceUsageApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ServiceUsageAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/serviceusage/v1/rest", DiscoveryName: "Service", @@ -34,7 +34,7 @@ func GetServiceUsageCaiObject(d tpgresource.TerraformResourceData, config *trans }}, }, nil } - return []Asset{}, err + return []tpgresource.Asset{}, err } func GetServiceUsageApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { diff --git a/mmv1/third_party/validator/pubsub_subscription_iam.go b/mmv1/third_party/validator/pubsub_subscription_iam.go index 16d46953a82b..166ccd5fee01 100644 --- a/mmv1/third_party/validator/pubsub_subscription_iam.go +++ b/mmv1/third_party/validator/pubsub_subscription_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterPubsubSubscriptionIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterPubsubSubscriptionIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "pubsub.googleapis.com/Subscription", Convert: GetPubsubSubscriptionIamPolicyCaiObject, MergeCreateUpdate: MergePubsubSubscriptionIamPolicy, } } -func resourceConverterPubsubSubscriptionIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterPubsubSubscriptionIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "pubsub.googleapis.com/Subscription", Convert: GetPubsubSubscriptionIamBindingCaiObject, FetchFullResource: FetchPubsubSubscriptionIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterPubsubSubscriptionIamBinding() ResourceConverter { } } -func resourceConverterPubsubSubscriptionIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterPubsubSubscriptionIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "pubsub.googleapis.com/Subscription", Convert: GetPubsubSubscriptionIamMemberCaiObject, FetchFullResource: FetchPubsubSubscriptionIamPolicy, @@ -35,70 +36,70 @@ func resourceConverterPubsubSubscriptionIamMember() ResourceConverter { } } -func GetPubsubSubscriptionIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newPubsubSubscriptionIamAsset(d, config, expandIamPolicyBindings) +func GetPubsubSubscriptionIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newPubsubSubscriptionIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetPubsubSubscriptionIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newPubsubSubscriptionIamAsset(d, config, expandIamRoleBindings) +func GetPubsubSubscriptionIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newPubsubSubscriptionIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetPubsubSubscriptionIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newPubsubSubscriptionIamAsset(d, config, expandIamMemberBindings) +func GetPubsubSubscriptionIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newPubsubSubscriptionIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergePubsubSubscriptionIamPolicy(existing, incoming Asset) Asset { +func MergePubsubSubscriptionIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergePubsubSubscriptionIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergePubsubSubscriptionIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergePubsubSubscriptionIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergePubsubSubscriptionIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergePubsubSubscriptionIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergePubsubSubscriptionIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergePubsubSubscriptionIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergePubsubSubscriptionIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newPubsubSubscriptionIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "//pubsub.googleapis.com/projects/{{project}}/subscriptions/{{subscription}}") + name, err := tpgresource.AssetName(d, config, "//pubsub.googleapis.com/projects/{{project}}/subscriptions/{{subscription}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "pubsub.googleapis.com/Subscription", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchPubsubSubscriptionIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchPubsubSubscriptionIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("subscription"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( NewPubsubSubscriptionIamUpdater, d, config, diff --git a/mmv1/third_party/validator/service_account.go b/mmv1/third_party/validator/service_account.go index ccb06e6da8d8..004780802fe8 100644 --- a/mmv1/third_party/validator/service_account.go +++ b/mmv1/third_party/validator/service_account.go @@ -10,23 +10,23 @@ import ( const ServiceAccountAssetType string = "iam.googleapis.com/ServiceAccount" -func resourceConverterServiceAccount() ResourceConverter { - return ResourceConverter{ +func resourceConverterServiceAccount() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: ServiceAccountAssetType, Convert: GetServiceAccountCaiObject, } } -func GetServiceAccountCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//iam.googleapis.com/projects/{{project}}/serviceAccounts/{{unique_id}}") +func GetServiceAccountCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//iam.googleapis.com/projects/{{project}}/serviceAccounts/{{unique_id}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetServiceAccountApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: ServiceAccountAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://iam.googleapis.com/$discovery/rest", DiscoveryName: "ServiceAccount", @@ -34,7 +34,7 @@ func GetServiceAccountCaiObject(d tpgresource.TerraformResourceData, config *tra }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } diff --git a/mmv1/third_party/validator/spanner_database_iam.go b/mmv1/third_party/validator/spanner_database_iam.go index a4e67261ca48..409b73c2abfc 100644 --- a/mmv1/third_party/validator/spanner_database_iam.go +++ b/mmv1/third_party/validator/spanner_database_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterSpannerDatabaseIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterSpannerDatabaseIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "spanner.googleapis.com/Database", Convert: GetSpannerDatabaseIamPolicyCaiObject, MergeCreateUpdate: MergeSpannerDatabaseIamPolicy, } } -func resourceConverterSpannerDatabaseIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterSpannerDatabaseIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "spanner.googleapis.com/Database", Convert: GetSpannerDatabaseIamBindingCaiObject, FetchFullResource: FetchSpannerDatabaseIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterSpannerDatabaseIamBinding() ResourceConverter { } } -func resourceConverterSpannerDatabaseIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterSpannerDatabaseIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "spanner.googleapis.com/Database", Convert: GetSpannerDatabaseIamMemberCaiObject, FetchFullResource: FetchSpannerDatabaseIamPolicy, @@ -35,74 +36,74 @@ func resourceConverterSpannerDatabaseIamMember() ResourceConverter { } } -func GetSpannerDatabaseIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newSpannerDatabaseIamAsset(d, config, expandIamPolicyBindings) +func GetSpannerDatabaseIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newSpannerDatabaseIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetSpannerDatabaseIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newSpannerDatabaseIamAsset(d, config, expandIamRoleBindings) +func GetSpannerDatabaseIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newSpannerDatabaseIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetSpannerDatabaseIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newSpannerDatabaseIamAsset(d, config, expandIamMemberBindings) +func GetSpannerDatabaseIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newSpannerDatabaseIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeSpannerDatabaseIamPolicy(existing, incoming Asset) Asset { +func MergeSpannerDatabaseIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeSpannerDatabaseIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeSpannerDatabaseIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeSpannerDatabaseIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeSpannerDatabaseIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeSpannerDatabaseIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeSpannerDatabaseIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeSpannerDatabaseIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeSpannerDatabaseIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newSpannerDatabaseIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "//spanner.googleapis.com/projects/{{project}}/instances/{{instance}}/databases/{{database}}") + name, err := tpgresource.AssetName(d, config, "//spanner.googleapis.com/projects/{{project}}/instances/{{instance}}/databases/{{database}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "spanner.googleapis.com/Database", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchSpannerDatabaseIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchSpannerDatabaseIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("instance"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } if _, ok := d.GetOk("database"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( NewSpannerDatabaseIamUpdater, d, config, diff --git a/mmv1/third_party/validator/spanner_instance_iam.go b/mmv1/third_party/validator/spanner_instance_iam.go index 0f22417202e4..e3b4910b2a9d 100644 --- a/mmv1/third_party/validator/spanner_instance_iam.go +++ b/mmv1/third_party/validator/spanner_instance_iam.go @@ -3,20 +3,21 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) -func resourceConverterSpannerInstanceIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterSpannerInstanceIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "spanner.googleapis.com/Instance", Convert: GetSpannerInstanceIamPolicyCaiObject, MergeCreateUpdate: MergeSpannerInstanceIamPolicy, } } -func resourceConverterSpannerInstanceIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterSpannerInstanceIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "spanner.googleapis.com/Instance", Convert: GetSpannerInstanceIamBindingCaiObject, FetchFullResource: FetchSpannerInstanceIamPolicy, @@ -25,8 +26,8 @@ func resourceConverterSpannerInstanceIamBinding() ResourceConverter { } } -func resourceConverterSpannerInstanceIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterSpannerInstanceIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: "spanner.googleapis.com/Instance", Convert: GetSpannerInstanceIamMemberCaiObject, FetchFullResource: FetchSpannerInstanceIamPolicy, @@ -35,70 +36,70 @@ func resourceConverterSpannerInstanceIamMember() ResourceConverter { } } -func GetSpannerInstanceIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newSpannerInstanceIamAsset(d, config, expandIamPolicyBindings) +func GetSpannerInstanceIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newSpannerInstanceIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetSpannerInstanceIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newSpannerInstanceIamAsset(d, config, expandIamRoleBindings) +func GetSpannerInstanceIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newSpannerInstanceIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetSpannerInstanceIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newSpannerInstanceIamAsset(d, config, expandIamMemberBindings) +func GetSpannerInstanceIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newSpannerInstanceIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeSpannerInstanceIamPolicy(existing, incoming Asset) Asset { +func MergeSpannerInstanceIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeSpannerInstanceIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeSpannerInstanceIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeSpannerInstanceIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeSpannerInstanceIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeSpannerInstanceIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeSpannerInstanceIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeSpannerInstanceIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeSpannerInstanceIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newSpannerInstanceIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "//spanner.googleapis.com/projects/{{project}}/instances/{{instance}}") + name, err := tpgresource.AssetName(d, config, "//spanner.googleapis.com/projects/{{project}}/instances/{{instance}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: "spanner.googleapis.com/Instance", - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchSpannerInstanceIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchSpannerInstanceIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("instance"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( NewSpannerInstanceIamUpdater, d, config, diff --git a/mmv1/third_party/validator/sql_database_instance.go b/mmv1/third_party/validator/sql_database_instance.go index b2787250dcbb..c9ef41ac0d71 100644 --- a/mmv1/third_party/validator/sql_database_instance.go +++ b/mmv1/third_party/validator/sql_database_instance.go @@ -21,23 +21,23 @@ import ( const SQLDatabaseInstanceAssetType string = "sqladmin.googleapis.com/Instance" -func resourceConverterSQLDatabaseInstance() ResourceConverter { - return ResourceConverter{ +func resourceConverterSQLDatabaseInstance() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: SQLDatabaseInstanceAssetType, Convert: GetSQLDatabaseInstanceCaiObject, } } -func GetSQLDatabaseInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//cloudsql.googleapis.com/projects/{{project}}/instances/{{name}}") +func GetSQLDatabaseInstanceCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//cloudsql.googleapis.com/projects/{{project}}/instances/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetSQLDatabaseInstanceApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: SQLDatabaseInstanceAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1beta4", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/sqladmin/v1beta4/rest", DiscoveryName: "DatabaseInstance", @@ -45,7 +45,7 @@ func GetSQLDatabaseInstanceCaiObject(d tpgresource.TerraformResourceData, config }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } @@ -77,7 +77,7 @@ func GetSQLDatabaseInstanceApiObject(d tpgresource.TerraformResourceData, config ReplicaConfiguration: expandReplicaConfiguration(d.Get("replica_configuration").([]interface{})), } - return jsonMap(instance) + return tpgresource.JsonMap(instance) } // Detects whether a database is 1st Generation by inspecting the tier name diff --git a/mmv1/third_party/validator/storage_bucket.go b/mmv1/third_party/validator/storage_bucket.go index d351bca09819..e5c64d261a31 100644 --- a/mmv1/third_party/validator/storage_bucket.go +++ b/mmv1/third_party/validator/storage_bucket.go @@ -20,23 +20,23 @@ import ( const StorageBucketAssetType string = "storage.googleapis.com/Bucket" -func resourceConverterStorageBucket() ResourceConverter { - return ResourceConverter{ +func resourceConverterStorageBucket() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: StorageBucketAssetType, Convert: GetStorageBucketCaiObject, } } -func GetStorageBucketCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - name, err := assetName(d, config, "//storage.googleapis.com/{{name}}") +func GetStorageBucketCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + name, err := tpgresource.AssetName(d, config, "//storage.googleapis.com/{{name}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } if obj, err := GetStorageBucketApiObject(d, config); err == nil { - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: StorageBucketAssetType, - Resource: &AssetResource{ + Resource: &tpgresource.AssetResource{ Version: "v1", DiscoveryDocumentURI: "https://www.googleapis.com/discovery/v1/apis/storage/v1/rest", DiscoveryName: "Bucket", @@ -44,7 +44,7 @@ func GetStorageBucketCaiObject(d tpgresource.TerraformResourceData, config *tran }, }}, nil } else { - return []Asset{}, err + return []tpgresource.Asset{}, err } } @@ -105,7 +105,7 @@ func GetStorageBucketApiObject(d tpgresource.TerraformResourceData, config *tran } } - m, err := jsonMap(sb) + m, err := tpgresource.JsonMap(sb) if err != nil { return nil, err } diff --git a/mmv1/third_party/validator/storage_bucket_iam.go b/mmv1/third_party/validator/storage_bucket_iam.go index 0f5e03d9916e..66cc1d9b2992 100644 --- a/mmv1/third_party/validator/storage_bucket_iam.go +++ b/mmv1/third_party/validator/storage_bucket_iam.go @@ -3,6 +3,7 @@ package google import ( "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgiamresource" "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) @@ -10,16 +11,16 @@ import ( // Provide a separate asset type constant so we don't have to worry about name conflicts between IAM and non-IAM converter files const StorageBucketIAMAssetType string = "storage.googleapis.com/Bucket" -func resourceConverterStorageBucketIamPolicy() ResourceConverter { - return ResourceConverter{ +func resourceConverterStorageBucketIamPolicy() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: StorageBucketIAMAssetType, Convert: GetStorageBucketIamPolicyCaiObject, MergeCreateUpdate: MergeStorageBucketIamPolicy, } } -func resourceConverterStorageBucketIamBinding() ResourceConverter { - return ResourceConverter{ +func resourceConverterStorageBucketIamBinding() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: StorageBucketIAMAssetType, Convert: GetStorageBucketIamBindingCaiObject, FetchFullResource: FetchStorageBucketIamPolicy, @@ -28,8 +29,8 @@ func resourceConverterStorageBucketIamBinding() ResourceConverter { } } -func resourceConverterStorageBucketIamMember() ResourceConverter { - return ResourceConverter{ +func resourceConverterStorageBucketIamMember() tpgresource.ResourceConverter { + return tpgresource.ResourceConverter{ AssetType: StorageBucketIAMAssetType, Convert: GetStorageBucketIamMemberCaiObject, FetchFullResource: FetchStorageBucketIamPolicy, @@ -38,70 +39,70 @@ func resourceConverterStorageBucketIamMember() ResourceConverter { } } -func GetStorageBucketIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newStorageBucketIamAsset(d, config, expandIamPolicyBindings) +func GetStorageBucketIamPolicyCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newStorageBucketIamAsset(d, config, tpgiamresource.ExpandIamPolicyBindings) } -func GetStorageBucketIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newStorageBucketIamAsset(d, config, expandIamRoleBindings) +func GetStorageBucketIamBindingCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newStorageBucketIamAsset(d, config, tpgiamresource.ExpandIamRoleBindings) } -func GetStorageBucketIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) { - return newStorageBucketIamAsset(d, config, expandIamMemberBindings) +func GetStorageBucketIamMemberCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]tpgresource.Asset, error) { + return newStorageBucketIamAsset(d, config, tpgiamresource.ExpandIamMemberBindings) } -func MergeStorageBucketIamPolicy(existing, incoming Asset) Asset { +func MergeStorageBucketIamPolicy(existing, incoming tpgresource.Asset) tpgresource.Asset { existing.IAMPolicy = incoming.IAMPolicy return existing } -func MergeStorageBucketIamBinding(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAuthoritativeBindings) +func MergeStorageBucketIamBinding(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAuthoritativeBindings) } -func MergeStorageBucketIamBindingDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAuthoritativeBindings) +func MergeStorageBucketIamBindingDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAuthoritativeBindings) } -func MergeStorageBucketIamMember(existing, incoming Asset) Asset { - return mergeIamAssets(existing, incoming, mergeAdditiveBindings) +func MergeStorageBucketIamMember(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeIamAssets(existing, incoming, tpgiamresource.MergeAdditiveBindings) } -func MergeStorageBucketIamMemberDelete(existing, incoming Asset) Asset { - return mergeDeleteIamAssets(existing, incoming, mergeDeleteAdditiveBindings) +func MergeStorageBucketIamMemberDelete(existing, incoming tpgresource.Asset) tpgresource.Asset { + return tpgiamresource.MergeDeleteIamAssets(existing, incoming, tpgiamresource.MergeDeleteAdditiveBindings) } func newStorageBucketIamAsset( d tpgresource.TerraformResourceData, config *transport_tpg.Config, - expandBindings func(d tpgresource.TerraformResourceData) ([]IAMBinding, error), -) ([]Asset, error) { + expandBindings func(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error), +) ([]tpgresource.Asset, error) { bindings, err := expandBindings(d) if err != nil { - return []Asset{}, fmt.Errorf("expanding bindings: %v", err) + return []tpgresource.Asset{}, fmt.Errorf("expanding bindings: %v", err) } - name, err := assetName(d, config, "//storage.googleapis.com/{{bucket}}") + name, err := tpgresource.AssetName(d, config, "//storage.googleapis.com/{{bucket}}") if err != nil { - return []Asset{}, err + return []tpgresource.Asset{}, err } - return []Asset{{ + return []tpgresource.Asset{{ Name: name, Type: StorageBucketIAMAssetType, - IAMPolicy: &IAMPolicy{ + IAMPolicy: &tpgresource.IAMPolicy{ Bindings: bindings, }, }}, nil } -func FetchStorageBucketIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (Asset, error) { +func FetchStorageBucketIamPolicy(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgresource.Asset, error) { // Check if the identity field returns a value if _, ok := d.GetOk("bucket"); !ok { - return Asset{}, ErrEmptyIdentityField + return tpgresource.Asset{}, tpgresource.ErrEmptyIdentityField } - return fetchIamPolicy( + return tpgiamresource.FetchIamPolicy( StorageBucketIamUpdaterProducer, d, config, diff --git a/mmv1/third_party/validator/tests/data/example_vpc_access_connector.tfplan.json b/mmv1/third_party/validator/tests/data/example_vpc_access_connector.tfplan.json index e113206f0feb..24fdb4c5ebc1 100644 --- a/mmv1/third_party/validator/tests/data/example_vpc_access_connector.tfplan.json +++ b/mmv1/third_party/validator/tests/data/example_vpc_access_connector.tfplan.json @@ -49,7 +49,8 @@ "id": true, "project": true, "self_link": true, - "state": true + "state": true, + "connected_projects": true } } } diff --git a/mmv1/third_party/validator/tpgiamresource/iam_helpers.go b/mmv1/third_party/validator/tpgiamresource/iam_helpers.go new file mode 100644 index 000000000000..827ac99d6f52 --- /dev/null +++ b/mmv1/third_party/validator/tpgiamresource/iam_helpers.go @@ -0,0 +1,239 @@ +package tpgiamresource + +import ( + "encoding/json" + "fmt" + "sort" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + cloudresourcemanager "google.golang.org/api/cloudresourcemanager/v1" +) + +// ExpandIamPolicyBindings is used in google__iam_policy resources. +func ExpandIamPolicyBindings(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error) { + ps := d.Get("policy_data").(string) + var bindings []tpgresource.IAMBinding + // policy_data is (known after apply) in terraform plan, hence an empty string + if ps == "" { + return bindings, nil + } + // The policy string is just a marshaled cloudresourcemanager.Policy. + policy := &cloudresourcemanager.Policy{} + if err := json.Unmarshal([]byte(ps), policy); err != nil { + return nil, fmt.Errorf("Could not unmarshal %s: %v", ps, err) + } + + for _, b := range policy.Bindings { + bindings = append(bindings, tpgresource.IAMBinding{ + Role: b.Role, + Members: b.Members, + }) + } + + return bindings, nil +} + +// ExpandIamRoleBindings is used in google__iam_binding resources. +func ExpandIamRoleBindings(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error) { + var members []string + for _, m := range d.Get("members").(*schema.Set).List() { + members = append(members, m.(string)) + } + return []tpgresource.IAMBinding{ + { + Role: d.Get("role").(string), + Members: members, + }, + }, nil +} + +// ExpandIamMemberBindings is used in google__iam_member resources. +func ExpandIamMemberBindings(d tpgresource.TerraformResourceData) ([]tpgresource.IAMBinding, error) { + return []tpgresource.IAMBinding{ + { + Role: d.Get("role").(string), + Members: []string{d.Get("member").(string)}, + }, + }, nil +} + +// MergeIamAssets merges an existing asset with the IAM bindings of an incoming +// tpgresource.Asset. +func MergeIamAssets( + existing, incoming tpgresource.Asset, + MergeBindings func(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding, +) tpgresource.Asset { + if existing.IAMPolicy != nil { + existing.IAMPolicy.Bindings = MergeBindings(existing.IAMPolicy.Bindings, incoming.IAMPolicy.Bindings) + } else { + existing.IAMPolicy = incoming.IAMPolicy + } + return existing +} + +// incoming is the last known state of an asset prior to deletion +func MergeDeleteIamAssets( + existing, incoming tpgresource.Asset, + MergeBindings func(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding, +) tpgresource.Asset { + if existing.IAMPolicy != nil { + existing.IAMPolicy.Bindings = MergeBindings(existing.IAMPolicy.Bindings, incoming.IAMPolicy.Bindings) + } + return existing +} + +// MergeAdditiveBindings adds members to bindings with the same roles and adds new +// bindings for roles that dont exist. +func MergeAdditiveBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + existingIdxs := make(map[string]int) + for i, binding := range existing { + existingIdxs[binding.Role] = i + } + + for _, binding := range incoming { + if ei, ok := existingIdxs[binding.Role]; ok { + memberExists := make(map[string]bool) + for _, m := range existing[ei].Members { + memberExists[m] = true + } + for _, m := range binding.Members { + // Only add members that don't exist. + if !memberExists[m] { + existing[ei].Members = append(existing[ei].Members, m) + } + } + } else { + existing = append(existing, binding) + } + } + + // Sort members + for i := range existing { + sort.Strings(existing[i].Members) + } + + return existing +} + +// MergeDeleteAdditiveBindings eliminates listed members from roles in the +// existing list. incoming is the last known state of the bindings being deleted. +func MergeDeleteAdditiveBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + toDelete := make(map[string]struct{}) + for _, binding := range incoming { + for _, m := range binding.Members { + key := binding.Role + "-" + m + toDelete[key] = struct{}{} + } + } + + var newExisting []tpgresource.IAMBinding + for _, binding := range existing { + var newMembers []string + for _, m := range binding.Members { + key := binding.Role + "-" + m + _, delete := toDelete[key] + if !delete { + newMembers = append(newMembers, m) + } + } + if newMembers != nil { + newExisting = append(newExisting, tpgresource.IAMBinding{ + Role: binding.Role, + Members: newMembers, + }) + } + } + + return newExisting +} + +// MergeAuthoritativeBindings clobbers members to bindings with the same roles +// and adds new bindings for roles that dont exist. +func MergeAuthoritativeBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + existingIdxs := make(map[string]int) + for i, binding := range existing { + existingIdxs[binding.Role] = i + } + + for _, binding := range incoming { + if ei, ok := existingIdxs[binding.Role]; ok { + existing[ei].Members = binding.Members + } else { + existing = append(existing, binding) + } + } + + // Sort members + for i := range existing { + sort.Strings(existing[i].Members) + } + + return existing +} + +// MergeDeleteAuthoritativeBindings eliminates any bindings with matching roles +// in the existing list. incoming is the last known state of the bindings being +// deleted. +func MergeDeleteAuthoritativeBindings(existing, incoming []tpgresource.IAMBinding) []tpgresource.IAMBinding { + toDelete := make(map[string]struct{}) + for _, binding := range incoming { + key := binding.Role + toDelete[key] = struct{}{} + } + + var newExisting []tpgresource.IAMBinding + for _, binding := range existing { + key := binding.Role + _, delete := toDelete[key] + if !delete { + newExisting = append(newExisting, binding) + } + } + + return newExisting +} + +func FetchIamPolicy( + newUpdaterFunc NewResourceIamUpdaterFunc, + d tpgresource.TerraformResourceData, + config *transport_tpg.Config, + assetNameTmpl string, + assetType string, +) (tpgresource.Asset, error) { + updater, err := newUpdaterFunc(d, config) + if err != nil { + return tpgresource.Asset{}, err + } + + iamPolicy, err := updater.GetResourceIamPolicy() + if transport_tpg.IsGoogleApiErrorWithCode(err, 403) || transport_tpg.IsGoogleApiErrorWithCode(err, 404) { + return tpgresource.Asset{}, tpgresource.ErrResourceInaccessible + } + + if err != nil { + return tpgresource.Asset{}, err + } + + var bindings []tpgresource.IAMBinding + for _, b := range iamPolicy.Bindings { + bindings = append( + bindings, + tpgresource.IAMBinding{ + Role: b.Role, + Members: b.Members, + }, + ) + } + + name, err := tpgresource.AssetName(d, config, assetNameTmpl) + + return tpgresource.Asset{ + Name: name, + Type: assetType, + IAMPolicy: &tpgresource.IAMPolicy{ + Bindings: bindings, + }, + }, nil +} diff --git a/mmv1/third_party/validator/iam_helpers_test.go b/mmv1/third_party/validator/tpgiamresource/iam_helpers_test.go similarity index 61% rename from mmv1/third_party/validator/iam_helpers_test.go rename to mmv1/third_party/validator/tpgiamresource/iam_helpers_test.go index 2bba3a7c8422..f4faa2901f59 100644 --- a/mmv1/third_party/validator/iam_helpers_test.go +++ b/mmv1/third_party/validator/tpgiamresource/iam_helpers_test.go @@ -1,8 +1,9 @@ -package google +package tpgiamresource import ( "testing" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" "github.com/stretchr/testify/assert" ) @@ -10,35 +11,35 @@ func TestMergeBindings(t *testing.T) { cases := []struct { name string // Inputs - existing []IAMBinding - incoming []IAMBinding + existing []tpgresource.IAMBinding + incoming []tpgresource.IAMBinding // Expected outputs - expectedAdditive []IAMBinding - expectedAuthoritative []IAMBinding + expectedAdditive []tpgresource.IAMBinding + expectedAuthoritative []tpgresource.IAMBinding }{ { name: "EmptyAddEmpty", - existing: []IAMBinding{}, - incoming: []IAMBinding{}, - expectedAdditive: []IAMBinding{}, - expectedAuthoritative: []IAMBinding{}, + existing: []tpgresource.IAMBinding{}, + incoming: []tpgresource.IAMBinding{}, + expectedAdditive: []tpgresource.IAMBinding{}, + expectedAuthoritative: []tpgresource.IAMBinding{}, }, { name: "EmptyAddOne", - existing: []IAMBinding{}, - incoming: []IAMBinding{ + existing: []tpgresource.IAMBinding{}, + incoming: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - expectedAdditive: []IAMBinding{ + expectedAdditive: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - expectedAuthoritative: []IAMBinding{ + expectedAuthoritative: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, @@ -47,20 +48,20 @@ func TestMergeBindings(t *testing.T) { }, { name: "OneAddEmpty", - existing: []IAMBinding{ + existing: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - incoming: []IAMBinding{}, - expectedAdditive: []IAMBinding{ + incoming: []tpgresource.IAMBinding{}, + expectedAdditive: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - expectedAuthoritative: []IAMBinding{ + expectedAuthoritative: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, @@ -69,25 +70,25 @@ func TestMergeBindings(t *testing.T) { }, { name: "OneAddOne", - existing: []IAMBinding{ + existing: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - incoming: []IAMBinding{ + incoming: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-b"}, }, }, - expectedAdditive: []IAMBinding{ + expectedAdditive: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b"}, }, }, - expectedAuthoritative: []IAMBinding{ + expectedAuthoritative: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-b"}, @@ -96,7 +97,7 @@ func TestMergeBindings(t *testing.T) { }, { name: "GrandFinale", - existing: []IAMBinding{ + existing: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b"}, @@ -110,7 +111,7 @@ func TestMergeBindings(t *testing.T) { Members: []string{"member-c"}, }, }, - incoming: []IAMBinding{ + incoming: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b", "member-c"}, @@ -120,7 +121,7 @@ func TestMergeBindings(t *testing.T) { Members: []string{"member-b", "member-c"}, }, }, - expectedAdditive: []IAMBinding{ + expectedAdditive: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b", "member-c"}, @@ -134,7 +135,7 @@ func TestMergeBindings(t *testing.T) { Members: []string{"member-c"}, }, }, - expectedAuthoritative: []IAMBinding{ + expectedAuthoritative: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b", "member-c"}, @@ -151,16 +152,16 @@ func TestMergeBindings(t *testing.T) { }, } for _, c := range cases { - t.Run(c.name+"/mergeAdditiveBindings", func(t *testing.T) { + t.Run(c.name+"/MergeAdditiveBindings", func(t *testing.T) { assert.EqualValues(t, c.expectedAdditive, - mergeAdditiveBindings(c.existing, c.incoming), + MergeAdditiveBindings(c.existing, c.incoming), ) }) - t.Run(c.name+"/mergeAuthoritativeBindings", func(t *testing.T) { + t.Run(c.name+"/MergeAuthoritativeBindings", func(t *testing.T) { assert.EqualValues(t, c.expectedAuthoritative, - mergeAuthoritativeBindings(c.existing, c.incoming), + MergeAuthoritativeBindings(c.existing, c.incoming), ) }) } @@ -170,23 +171,23 @@ func TestMergeDeleteBindings(t *testing.T) { cases := []struct { name string // Inputs - existing []IAMBinding - incoming []IAMBinding + existing []tpgresource.IAMBinding + incoming []tpgresource.IAMBinding // Expected outputs - expectedDeleteAdditive []IAMBinding - expectedDeleteAuthoritative []IAMBinding + expectedDeleteAdditive []tpgresource.IAMBinding + expectedDeleteAuthoritative []tpgresource.IAMBinding }{ { name: "EmptyDeleteEmpty", - existing: []IAMBinding{}, - incoming: []IAMBinding{}, + existing: []tpgresource.IAMBinding{}, + incoming: []tpgresource.IAMBinding{}, expectedDeleteAdditive: nil, expectedDeleteAuthoritative: nil, }, { name: "EmptyDeleteOne", - existing: []IAMBinding{}, - incoming: []IAMBinding{ + existing: []tpgresource.IAMBinding{}, + incoming: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, @@ -197,20 +198,20 @@ func TestMergeDeleteBindings(t *testing.T) { }, { name: "OneDeleteEmpty", - existing: []IAMBinding{ + existing: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - incoming: []IAMBinding{}, - expectedDeleteAdditive: []IAMBinding{ + incoming: []tpgresource.IAMBinding{}, + expectedDeleteAdditive: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, }, }, - expectedDeleteAuthoritative: []IAMBinding{ + expectedDeleteAuthoritative: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, @@ -219,19 +220,19 @@ func TestMergeDeleteBindings(t *testing.T) { }, { name: "OneDeleteOne", - existing: []IAMBinding{ + existing: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b"}, }, }, - incoming: []IAMBinding{ + incoming: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-b"}, }, }, - expectedDeleteAdditive: []IAMBinding{ + expectedDeleteAdditive: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a"}, @@ -241,7 +242,7 @@ func TestMergeDeleteBindings(t *testing.T) { }, { name: "GrandFinale", - existing: []IAMBinding{ + existing: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b"}, @@ -255,7 +256,7 @@ func TestMergeDeleteBindings(t *testing.T) { Members: []string{"member-c"}, }, }, - incoming: []IAMBinding{ + incoming: []tpgresource.IAMBinding{ { Role: "role-a", Members: []string{"member-a", "member-b", "member-c"}, @@ -265,7 +266,7 @@ func TestMergeDeleteBindings(t *testing.T) { Members: []string{"member-b", "member-c"}, }, }, - expectedDeleteAdditive: []IAMBinding{ + expectedDeleteAdditive: []tpgresource.IAMBinding{ { Role: "role-b", Members: []string{"member-d"}, @@ -275,7 +276,7 @@ func TestMergeDeleteBindings(t *testing.T) { Members: []string{"member-c"}, }, }, - expectedDeleteAuthoritative: []IAMBinding{ + expectedDeleteAuthoritative: []tpgresource.IAMBinding{ { Role: "role-c", Members: []string{"member-c"}, @@ -284,16 +285,16 @@ func TestMergeDeleteBindings(t *testing.T) { }, } for _, c := range cases { - t.Run(c.name+"/mergeDeleteAdditiveBindings", func(t *testing.T) { + t.Run(c.name+"/MergeDeleteAdditiveBindings", func(t *testing.T) { assert.EqualValues(t, c.expectedDeleteAdditive, - mergeDeleteAdditiveBindings(c.existing, c.incoming), + MergeDeleteAdditiveBindings(c.existing, c.incoming), ) }) - t.Run(c.name+"/mergeDeleteAuthoritativeBindings", func(t *testing.T) { + t.Run(c.name+"/MergeDeleteAuthoritativeBindings", func(t *testing.T) { assert.EqualValues(t, c.expectedDeleteAuthoritative, - mergeDeleteAuthoritativeBindings(c.existing, c.incoming), + MergeDeleteAuthoritativeBindings(c.existing, c.incoming), ) }) } diff --git a/mmv1/third_party/validator/tpgresource/cai.go b/mmv1/third_party/validator/tpgresource/cai.go new file mode 100644 index 000000000000..0dfaa6a9daaa --- /dev/null +++ b/mmv1/third_party/validator/tpgresource/cai.go @@ -0,0 +1,181 @@ +package tpgresource + +import ( + "fmt" + "math/rand" + "regexp" + + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" +) + +type ConvertFunc func(d TerraformResourceData, config *transport_tpg.Config) ([]Asset, error) +type GetApiObjectFunc func(d TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) + +// FetchFullResourceFunc allows initial data for a resource to be fetched from the API and merged +// with the planned changes. This is useful for resources that are only partially managed +// by Terraform, like IAM policies managed with member/binding resources. +type FetchFullResourceFunc func(d TerraformResourceData, config *transport_tpg.Config) (Asset, error) + +// MergeFunc combines multiple terraform resources into a single CAI asset. +// The incoming asset will either be an asset that was created/updated or deleted. +type MergeFunc func(existing, incoming Asset) Asset + +type ResourceConverter struct { + AssetType string + Convert ConvertFunc + FetchFullResource FetchFullResourceFunc + MergeCreateUpdate MergeFunc + MergeDelete MergeFunc +} + +// Asset is the CAI representation of a resource. +type Asset struct { + // The name, in a peculiar format: `\\.googleapis.com/` + Name string `json:"name"` + // The type name in `google..` format. + Type string `json:"asset_type"` + Resource *AssetResource `json:"resource,omitempty"` + IAMPolicy *IAMPolicy `json:"iam_policy,omitempty"` + OrgPolicy []*OrgPolicy `json:"org_policy,omitempty"` + V2OrgPolicies []*V2OrgPolicies `json:"v2_org_policies,omitempty"` +} + +// AssetResource is the Asset's Resource field. +type AssetResource struct { + // Api version + Version string `json:"version"` + // URI including scheme for the discovery doc - assembled from + // product name and version. + DiscoveryDocumentURI string `json:"discovery_document_uri"` + // Resource name. + DiscoveryName string `json:"discovery_name"` + // Actual resource state as per Terraform. Note that this does + // not necessarily correspond perfectly with the CAI representation + // as there are occasional deviations between CAI and API responses. + // This returns the API response values instead. + Data map[string]interface{} `json:"data,omitempty"` +} + +type Folder struct { + Name string `json:"name,omitempty"` + Parent string `json:"parent,omitempty"` + DisplayName string `json:"display_name,omitempty"` + State string `json:"state,omitempty"` + CreateTime *Timestamp `json:"create_time,omitempty"` +} + +type IAMPolicy struct { + Bindings []IAMBinding `json:"bindings"` +} + +type IAMBinding struct { + Role string `json:"role"` + Members []string `json:"members"` +} + +type OrgPolicy struct { + Constraint string `json:"constraint,omitempty"` + ListPolicy *ListPolicy `json:"listPolicy"` + BooleanPolicy *BooleanPolicy `json:"booleanPolicy"` + RestoreDefault *RestoreDefault `json:"restoreDefault"` + UpdateTime *Timestamp `json:"update_time,omitempty"` +} + +// V2OrgPolicies is the represtation of V2OrgPolicies +type V2OrgPolicies struct { + Name string `json:"name"` + PolicySpec *PolicySpec `json:"spec,omitempty"` +} + +// Spec is the representation of Spec for V2OrgPolicy +type PolicySpec struct { + Etag string `json:"etag,omitempty"` + UpdateTime *Timestamp `json:"update_time,omitempty"` + PolicyRules []*PolicyRule `json:"rules,omitempty"` + InheritFromParent bool `json:"inherit_from_parent,omitempty"` + Reset bool `json:"reset,omitempty"` +} + +type PolicyRule struct { + Values *StringValues `json:"values,omitempty"` + AllowAll bool `json:"allow_all,omitempty"` + DenyAll bool `json:"deny_all,omitempty"` + Enforce bool `json:"enforce,omitempty"` + Condition *Expr `json:"condition,omitempty"` +} + +type StringValues struct { + AllowedValues []string `json:"allowed_values,omitempty"` + DeniedValues []string `json:"denied_values,omitempty"` +} + +type Expr struct { + Expression string `json:"expression,omitempty"` + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + Location string `json:"location,omitempty"` +} + +type Timestamp struct { + Seconds int64 `json:"seconds,omitempty"` + Nanos int64 `json:"nanos,omitempty"` +} + +type ListPolicyAllValues int32 + +type ListPolicy struct { + AllowedValues []string `json:"allowed_values,omitempty"` + DeniedValues []string `json:"denied_values,omitempty"` + AllValues ListPolicyAllValues `json:"all_values,omitempty"` + SuggestedValue string `json:"suggested_value,omitempty"` + InheritFromParent bool `json:"inherit_from_parent,omitempty"` +} + +type BooleanPolicy struct { + Enforced bool `json:"enforced,omitempty"` +} + +type RestoreDefault struct { +} + +// AssetName templates an asset.name by looking up and replacing all instances +// of {{field}}. In the case where a field would resolve to an empty string, a +// generated unique string will be used: "placeholder-" + randomString(). +// This is done to preserve uniqueness of asset.name for a given asset.asset_type. +func AssetName(d TerraformResourceData, config *transport_tpg.Config, linkTmpl string) (string, error) { + re := regexp.MustCompile("{{([%[:word:]]+)}}") + + // workaround for empty project + placeholderSet := false + if config.Project == "" { + config.Project = fmt.Sprintf("placeholder-%s", RandString(8)) + placeholderSet = true + } + + f, err := BuildReplacementFunc(re, d, config, linkTmpl, false) + if err != nil { + return "", err + } + if placeholderSet { + config.Project = "" + } + + fWithPlaceholder := func(key string) string { + val := f(key) + if val == "" { + val = fmt.Sprintf("placeholder-%s", RandString(8)) + } + return val + } + + return re.ReplaceAllStringFunc(linkTmpl, fWithPlaceholder), nil +} + +func RandString(n int) string { + const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + b := make([]byte, n) + for i := range b { + b[i] = letterBytes[rand.Intn(len(letterBytes))] + } + return string(b) +} diff --git a/mmv1/third_party/validator/cai_test.go b/mmv1/third_party/validator/tpgresource/cai_test.go similarity index 85% rename from mmv1/third_party/validator/cai_test.go rename to mmv1/third_party/validator/tpgresource/cai_test.go index 45d729c70da4..540ab6b2665c 100644 --- a/mmv1/third_party/validator/cai_test.go +++ b/mmv1/third_party/validator/tpgresource/cai_test.go @@ -1,10 +1,9 @@ -package google +package tpgresource import ( "regexp" "testing" - "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/tpgresource" transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" ) @@ -13,7 +12,7 @@ func TestAssetName(t *testing.T) { name string template string expectedPattern string - data tpgresource.TerraformResourceData + data TerraformResourceData }{ { name: "PresentValues", @@ -42,7 +41,7 @@ func TestAssetName(t *testing.T) { t.Run(c.name, func(t *testing.T) { r := regexp.MustCompile(c.expectedPattern) - output, err := assetName(c.data, &transport_tpg.Config{}, c.template) + output, err := AssetName(c.data, &transport_tpg.Config{}, c.template) if err != nil { t.Fatal(err) } @@ -70,7 +69,7 @@ func TestRandString(t *testing.T) { type mockTerraformResourceData struct { m map[string]interface{} - tpgresource.TerraformResourceData + TerraformResourceData } func (d *mockTerraformResourceData) GetOkExists(k string) (interface{}, bool) { diff --git a/mmv1/third_party/validator/tpgresource/constants.go b/mmv1/third_party/validator/tpgresource/constants.go new file mode 100644 index 000000000000..69c3c50b0ee1 --- /dev/null +++ b/mmv1/third_party/validator/tpgresource/constants.go @@ -0,0 +1,29 @@ +package tpgresource + +import ( + "errors" + + transport_tpg "github.com/GoogleCloudPlatform/terraform-google-conversion/v2/tfplan2cai/converters/google/resources/transport" +) + +// ErrNoConversion can be returned if a conversion is unable to be returned. + +// because of the current state of the system. +// Example: The conversion requires that the resource has already been created +// and is now being updated). +var ErrNoConversion = errors.New("no conversion") + +// ErrEmptyIdentityField can be returned when fetching a resource is not possible +// due to the identity field of that resource returning empty. +var ErrEmptyIdentityField = errors.New("empty identity field") + +// ErrResourceInaccessible can be returned when fetching an IAM resource +// on a project that has not yet been created or if the service account +// lacks sufficient permissions +var ErrResourceInaccessible = errors.New("resource does not exist or service account is lacking sufficient permissions") + +// Global MutexKV +// +// Deprecated: For backward compatibility mutexKV is still working, +// but all new code should use MutexStore in the transport_tpg package instead. +var mutexKV = transport_tpg.MutexStore diff --git a/mmv1/third_party/validator/tpgresource/json_map.go b/mmv1/third_party/validator/tpgresource/json_map.go new file mode 100644 index 000000000000..d34956463b16 --- /dev/null +++ b/mmv1/third_party/validator/tpgresource/json_map.go @@ -0,0 +1,22 @@ +package tpgresource + +import ( + "encoding/json" + "fmt" +) + +// JsonMap converts a given value to a map[string]interface{} that +// matches its JSON format. +func JsonMap(x interface{}) (map[string]interface{}, error) { + jsn, err := json.Marshal(x) + if err != nil { + return nil, fmt.Errorf("marshalling: %v", err) + } + + m := make(map[string]interface{}) + if err := json.Unmarshal(jsn, &m); err != nil { + return nil, fmt.Errorf("unmarshalling: %v", err) + } + + return m, nil +} diff --git a/tools/missing-test-detector/reader.go b/tools/missing-test-detector/reader.go index 86d4c3fc0f31..024e4b1ee43a 100644 --- a/tools/missing-test-detector/reader.go +++ b/tools/missing-test-detector/reader.go @@ -7,6 +7,7 @@ import ( "go/token" "os" "path/filepath" + "regexp" "strconv" "strings" @@ -204,7 +205,9 @@ func readConfigBasicLit(configBasicLit *ast.BasicLit) (Step, error) { return nil, err } else { // Remove template variables because they interfere with hcl parsing. - configStr = strings.ReplaceAll(configStr, "%", "") + pattern := regexp.MustCompile("%{[^{}]*}") + // Replace with a value that can be parsed outside quotation marks. + configStr = pattern.ReplaceAllString(configStr, "true") parser := hclparse.NewParser() file, diagnostics := parser.ParseHCL([]byte(configStr), "config.hcl") if diagnostics.HasErrors() { diff --git a/tools/missing-test-detector/reader_test.go b/tools/missing-test-detector/reader_test.go index 8465051b6305..7ec8aee789ce 100644 --- a/tools/missing-test-detector/reader_test.go +++ b/tools/missing-test-detector/reader_test.go @@ -37,6 +37,7 @@ func TestReadCoveredResourceTestFile(t *testing.T) { "field_six": "\"value-three\"", }, }, + "field_seven": "true", }); !reflect.DeepEqual(coveredResource, expectedResource) { t.Errorf("found wrong fields in covered resource config: %#v, expected %#v", coveredResource, expectedResource) } diff --git a/tools/missing-test-detector/testdata/covered_resource_test.go b/tools/missing-test-detector/testdata/covered_resource_test.go index 588db3f134f7..6bfcfc8db8cd 100644 --- a/tools/missing-test-detector/testdata/covered_resource_test.go +++ b/tools/missing-test-detector/testdata/covered_resource_test.go @@ -28,6 +28,7 @@ resource "covered_resource" "resource" { field_six = "value-three" } } + field_seven = %{bool} } `, context) } diff --git a/tpgtools/go.mod b/tpgtools/go.mod index c1dff65959cf..36404b1e5ac7 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.19 require ( bitbucket.org/creachadair/stringset v0.0.9 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.40.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.42.0 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b github.com/hashicorp/errwrap v1.0.0 github.com/hashicorp/hcl v1.0.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index f22f203365df..a8d4fd23fc34 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -35,10 +35,8 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.39.0 h1:js9wqpbWMsjRUWCezgiw+bmZ4Jke9nkcQJLjdnQZwpc= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.39.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.40.0 h1:ONa3+D4jOZd+B0JAFlAGf6kxY8ymjpX4L7UhyZ3Z1XA= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.40.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.42.0 h1:ClnwLCqnr8/exvPWhBLJOj16oa8bvw8Fhu45wCjvQbU= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.42.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= diff --git a/tpgtools/handwritten.go b/tpgtools/handwritten.go index 48b6f99cb858..ec89143ea7ae 100644 --- a/tpgtools/handwritten.go +++ b/tpgtools/handwritten.go @@ -40,6 +40,13 @@ func copyHandwrittenFiles(inPath string, outPath string) { } } + // Log warning about unexpected outPath values before adding copyright headers + // Matches equivalent in MMv1, see below: + // https://github.com/hashicorp/magic-modules/blob/48ce1004bafd4b4ef1be7565eaa6727adabd0670/mmv1/provider/core.rb#L202-L206 + if !isOutputFolderExpected(outPath) { + glog.Infof("Unexpected output folder (%s) detected when deciding to add HashiCorp copyright headers. Watch out for unexpected changes to copied files", outPath) + } + fs, err := ioutil.ReadDir(inPath) if err != nil { glog.Fatal(err) @@ -64,6 +71,14 @@ func copyHandwrittenFiles(inPath string, outPath string) { continue } + // Add HashiCorp copyright header only if generating TPG/TPGB + if strings.HasSuffix(outPath, "/terraform-provider-google") || strings.HasSuffix(outPath, "/terraform-provider-google-beta") { + if strings.HasSuffix(f.Name(), ".go") { + copyrightHeader := []byte("// Copyright (c) HashiCorp, Inc.\n// SPDX-License-Identifier: MPL-2.0\n") + b = append(copyrightHeader, b...) + } + } + // Format file if ending in .go if strings.HasSuffix(f.Name(), ".go") { b, err = formatSource(bytes.NewBuffer(b)) @@ -74,9 +89,30 @@ func copyHandwrittenFiles(inPath string, outPath string) { } // Write copied file. - err = ioutil.WriteFile(path.Join(outPath, terraformResourceDirectory, f.Name()), b, 0644) + err = ioutil.WriteFile(path.Join(outPath, terraformResourceDirectory, "tpgdclresource", f.Name()), b, 0644) if err != nil { glog.Exit(err) } } } + +// isOutputFolderExpected returns a boolean indicating if the output folder is present in an allow list. +// Intention is to warn users about unexpected diffs if they have renamed their cloned copy of downstream repos, +// as this affects detecting which downstream they're building and whether to add copyright headers. +// Written to match `expected_output_folder?` method in MMv1, see below: +// https://github.com/hashicorp/magic-modules/blob/48ce1004bafd4b4ef1be7565eaa6727adabd0670/mmv1/provider/core.rb#L266-L282 +func isOutputFolderExpected(outPath string) bool { + pathComponents := strings.Split(outPath, "/") + folderName := pathComponents[len(pathComponents)-1] // last element + + switch folderName { + case "terraform-provider-google", + "terraform-provider-google-beta", + "terraform-next", + "terraform-google-conversion", + "tfplan2cai": + return true + default: + return false + } +} diff --git a/tpgtools/handwritten/dcl.go b/tpgtools/handwritten/dcl.go index 4ec72490b7f8..ad5c5f2ce219 100644 --- a/tpgtools/handwritten/dcl.go +++ b/tpgtools/handwritten/dcl.go @@ -1,4 +1,4 @@ -package google +package tpgdclresource import ( dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" diff --git a/tpgtools/handwritten/expanders.go b/tpgtools/handwritten/expanders.go index 3f47248811a9..b73a1594d975 100644 --- a/tpgtools/handwritten/expanders.go +++ b/tpgtools/handwritten/expanders.go @@ -1,12 +1,13 @@ -package google +package tpgdclresource import ( "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" ) -func expandStringArray(v interface{}) []string { +func ExpandStringArray(v interface{}) []string { arr, ok := v.([]string) if ok { @@ -14,10 +15,10 @@ func expandStringArray(v interface{}) []string { } if arr, ok := v.(*schema.Set); ok { - return convertStringSet(arr) + return tpgresource.ConvertStringSet(arr) } - arr = convertStringArr(v.([]interface{})) + arr = tpgresource.ConvertStringArr(v.([]interface{})) if arr == nil { // Send empty array specifically instead of nil return make([]string, 0) @@ -25,7 +26,7 @@ func expandStringArray(v interface{}) []string { return arr } -func expandIntegerArray(v interface{}) []int64 { +func ExpandIntegerArray(v interface{}) []int64 { arr, ok := v.([]int64) if ok { @@ -52,7 +53,7 @@ func convertIntegerArr(v []interface{}) []int64 { } // Returns the DCL representation of a three-state boolean value represented by a string in terraform. -func expandEnumBool(v interface{}) *bool { +func ExpandEnumBool(v interface{}) *bool { s, ok := v.(string) if !ok { return nil diff --git a/tpgtools/handwritten/expanders_test.go b/tpgtools/handwritten/expanders_test.go index e3170fe2b6d2..4d5ff02ba871 100644 --- a/tpgtools/handwritten/expanders_test.go +++ b/tpgtools/handwritten/expanders_test.go @@ -1,4 +1,4 @@ -package google +package tpgdclresource import ( "reflect" @@ -66,7 +66,7 @@ func TestExpandEnumBool(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - if got, want := expandEnumBool(tc.input), tc.exp; !reflect.DeepEqual(got, want) { + if got, want := ExpandEnumBool(tc.input), tc.exp; !reflect.DeepEqual(got, want) { t.Errorf("expected %v to be %v", got, want) } }) diff --git a/tpgtools/handwritten/flatteners.go b/tpgtools/handwritten/flatteners.go index 707d823b112e..b2bffefe9045 100644 --- a/tpgtools/handwritten/flatteners.go +++ b/tpgtools/handwritten/flatteners.go @@ -1,7 +1,7 @@ -package google +package tpgdclresource // Returns the terraform representation of a three-state boolean value represented by a pointer to bool in DCL. -func flattenEnumBool(v interface{}) string { +func FlattenEnumBool(v interface{}) string { b, ok := v.(*bool) if !ok || b == nil { return "" diff --git a/tpgtools/handwritten/orgpolicy_utils.go b/tpgtools/handwritten/orgpolicy_utils.go index f39af61946a8..24e0d70283bf 100644 --- a/tpgtools/handwritten/orgpolicy_utils.go +++ b/tpgtools/handwritten/orgpolicy_utils.go @@ -1,4 +1,4 @@ -package google +package tpgdclresource import ( "fmt" @@ -10,7 +10,7 @@ import ( // OrgPolicyPolicy has a custom import method because the parent field needs to allow an additional forward slash // to represent the type of parent (e.g. projects/{project_id}). -func resourceOrgPolicyPolicyCustomImport(d *schema.ResourceData, meta interface{}) error { +func ResourceOrgPolicyPolicyCustomImport(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ "^(?P[^/]+/?[^/]*)/policies/(?P[^/]+)", diff --git a/tpgtools/handwritten/tpgtools_utils.go b/tpgtools/handwritten/tpgtools_utils.go index cdcb287d27b1..68fa33e1d0e7 100644 --- a/tpgtools/handwritten/tpgtools_utils.go +++ b/tpgtools/handwritten/tpgtools_utils.go @@ -1,4 +1,4 @@ -package google +package tpgdclresource import ( "fmt" @@ -9,11 +9,11 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -func oldValue(old, new interface{}) interface{} { +func OldValue(old, new interface{}) interface{} { return old } -func handleNotFoundDCLError(err error, d *schema.ResourceData, resourceName string) error { +func HandleNotFoundDCLError(err error, d *schema.ResourceData, resourceName string) error { if dcl.IsNotFound(err) { log.Printf("[WARN] Removing %s because it's gone", resourceName) // The resource doesn't exist anymore diff --git a/tpgtools/ignored_handwritten/disk_utils.go b/tpgtools/ignored_handwritten/disk_utils.go index c43260a4704a..aa6b29971e09 100644 --- a/tpgtools/ignored_handwritten/disk_utils.go +++ b/tpgtools/ignored_handwritten/disk_utils.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + tpgcompute "github.com/hashicorp/terraform-provider-google/google/services/compute" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -108,7 +109,7 @@ func DiskImageDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { func diskImageProjectNameEquals(project1, project2 string) bool { // Convert short project name to full name // For instance, centos => centos-cloud - fullProjectName, ok := imageMap[project2] + fullProjectName, ok := tpgcompute.ImageMap[project2] if ok { project2 = fullProjectName } @@ -228,7 +229,7 @@ func expandComputeDiskSourceImage(v interface{}, d tpgresource.TerraformResource return nil } - f, err := resolveImage(config, project, v.(string)) + f, err := tpgcompute.ResolveImage(config, project, v.(string)) if err != nil { return nil } @@ -299,7 +300,7 @@ func flattenComputeDiskImage(v interface{}, d *schema.ResourceData, meta interfa return nil } - f, err := resolveImage(config, project, *vptr) + f, err := tpgcompute.ResolveImage(config, project, *vptr) if err != nil { return nil } diff --git a/tpgtools/main.go b/tpgtools/main.go index 16d2e72bc177..1c6dd8e03661 100644 --- a/tpgtools/main.go +++ b/tpgtools/main.go @@ -123,6 +123,12 @@ func main() { return } + // Copy DCL helper files into the folder tpgdclresource to make it easier to remove these files later. + dirPath := path.Join(*oPath, terraformResourceDirectory, "tpgdclresource") + if err := os.MkdirAll(dirPath, os.ModePerm); err != nil { + glog.Error(fmt.Errorf("error creating Terraform tpgdclresource directory %v: %v", dirPath, err)) + } + copyHandwrittenFiles(*cPath, *oPath) } @@ -360,6 +366,14 @@ func loadOverrides(packagePath Filepath, fileName string) Overrides { return overrides } +func getParentDir(res *Resource) string { + servicePath := path.Join(*oPath, terraformResourceDirectory, "services", string(res.Package())) + if err := os.MkdirAll(servicePath, os.ModePerm); err != nil { + glog.Error(fmt.Errorf("error creating Terraform the service directory %v: %v", servicePath, err)) + } + return servicePath +} + func generateResourceFile(res *Resource) { // Generate resource file tmplInput := ResourceInput{ @@ -391,7 +405,8 @@ func generateResourceFile(res *Resource) { fmt.Printf("%v", string(formatted)) } else { outname := fmt.Sprintf("resource_%s_%s.go", res.ProductName(), res.Name()) - err := ioutil.WriteFile(path.Join(*oPath, terraformResourceDirectory, outname), formatted, 0644) + parentDir := getParentDir(res) + err = ioutil.WriteFile(path.Join(parentDir, outname), formatted, 0644) if err != nil { glog.Exit(err) } diff --git a/tpgtools/overrides/cloudbuild/beta/worker_pool.yaml b/tpgtools/overrides/cloudbuild/beta/worker_pool.yaml index e077a3db6f77..4deb00daaee1 100644 --- a/tpgtools/overrides/cloudbuild/beta/worker_pool.yaml +++ b/tpgtools/overrides/cloudbuild/beta/worker_pool.yaml @@ -1,6 +1,6 @@ - type: DIFF_SUPPRESS_FUNC field: network_config.peered_network details: - diffsuppressfunc: compareResourceNames + diffsuppressfunc: tpgresource.CompareResourceNames - type: EXCLUDE field: etag diff --git a/tpgtools/overrides/cloudbuild/worker_pool.yaml b/tpgtools/overrides/cloudbuild/worker_pool.yaml index e077a3db6f77..4deb00daaee1 100644 --- a/tpgtools/overrides/cloudbuild/worker_pool.yaml +++ b/tpgtools/overrides/cloudbuild/worker_pool.yaml @@ -1,6 +1,6 @@ - type: DIFF_SUPPRESS_FUNC field: network_config.peered_network details: - diffsuppressfunc: compareResourceNames + diffsuppressfunc: tpgresource.CompareResourceNames - type: EXCLUDE field: etag diff --git a/tpgtools/overrides/compute/beta/firewall_policy_rule.yaml b/tpgtools/overrides/compute/beta/firewall_policy_rule.yaml index 09206fdc5ec8..e743d253be12 100644 --- a/tpgtools/overrides/compute/beta/firewall_policy_rule.yaml +++ b/tpgtools/overrides/compute/beta/firewall_policy_rule.yaml @@ -1,4 +1,4 @@ - type: DIFF_SUPPRESS_FUNC field: target_resources details: - diffsuppressfunc: compareSelfLinkOrResourceName + diffsuppressfunc: tpgresource.CompareSelfLinkOrResourceName diff --git a/tpgtools/overrides/compute/firewall_policy_rule.yaml b/tpgtools/overrides/compute/firewall_policy_rule.yaml index 09206fdc5ec8..e743d253be12 100644 --- a/tpgtools/overrides/compute/firewall_policy_rule.yaml +++ b/tpgtools/overrides/compute/firewall_policy_rule.yaml @@ -1,4 +1,4 @@ - type: DIFF_SUPPRESS_FUNC field: target_resources details: - diffsuppressfunc: compareSelfLinkOrResourceName + diffsuppressfunc: tpgresource.CompareSelfLinkOrResourceName diff --git a/tpgtools/overrides/orgpolicy/beta/policy.yaml b/tpgtools/overrides/orgpolicy/beta/policy.yaml index 495fab27c0c2..c27653ff9807 100644 --- a/tpgtools/overrides/orgpolicy/beta/policy.yaml +++ b/tpgtools/overrides/orgpolicy/beta/policy.yaml @@ -1,6 +1,6 @@ - type: CUSTOM_IMPORT_FUNCTION details: - function: resourceOrgPolicyPolicyCustomImport + function: tpgdclresource.ResourceOrgPolicyPolicyCustomImport - type: ENUM_BOOL field: spec.rules.allow_all - type: ENUM_BOOL diff --git a/tpgtools/overrides/orgpolicy/policy.yaml b/tpgtools/overrides/orgpolicy/policy.yaml index 495fab27c0c2..c27653ff9807 100644 --- a/tpgtools/overrides/orgpolicy/policy.yaml +++ b/tpgtools/overrides/orgpolicy/policy.yaml @@ -1,6 +1,6 @@ - type: CUSTOM_IMPORT_FUNCTION details: - function: resourceOrgPolicyPolicyCustomImport + function: tpgdclresource.ResourceOrgPolicyPolicyCustomImport - type: ENUM_BOOL field: spec.rules.allow_all - type: ENUM_BOOL diff --git a/tpgtools/property.go b/tpgtools/property.go index 19f9df439b61..0ee26f2af299 100644 --- a/tpgtools/property.go +++ b/tpgtools/property.go @@ -222,7 +222,7 @@ func (p Property) DefaultStateGetter() string { } func (p Property) ChangeStateGetter() string { - return buildGetter(p, fmt.Sprintf("oldValue(d.GetChange(%q))", p.Name())) + return buildGetter(p, fmt.Sprintf("tpgdclresource.OldValue(d.GetChange(%q))", p.Name())) } // Builds a Getter for constructing a shallow @@ -256,7 +256,7 @@ func buildGetter(p Property, rawGetter string) string { return fmt.Sprintf("%s.%sEnumRef(%s.(string))", p.resource.Package(), p.ObjectType(), rawGetter) } if p.EnumBool { - return fmt.Sprintf("expandEnumBool(%s.(string))", rawGetter) + return fmt.Sprintf("tpgdclresource.ExpandEnumBool(%s.(string))", rawGetter) } if p.Computed { return fmt.Sprintf("dcl.StringOrNil(%s.(string))", rawGetter) @@ -279,11 +279,11 @@ func buildGetter(p Property, rawGetter string) string { return fmt.Sprintf("expand%s%sArray(%s)", p.resource.PathType(), p.PackagePath(), rawGetter) } if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "string" { - return fmt.Sprintf("expandStringArray(%s)", rawGetter) + return fmt.Sprintf("tpgdclresource.ExpandStringArray(%s)", rawGetter) } if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "integer" { - return fmt.Sprintf("expandIntegerArray(%s)", rawGetter) + return fmt.Sprintf("tpgdclresource.ExpandIntegerArray(%s)", rawGetter) } if p.Type.typ.Items != nil && len(p.Properties) > 0 { @@ -354,7 +354,7 @@ func (p Property) flattenGetterWithParent(parent string) string { fallthrough case SchemaTypeMap: if p.EnumBool { - return fmt.Sprintf("flattenEnumBool(%s.%s)", parent, p.PackageName) + return fmt.Sprintf("tpgdclresource.FlattenEnumBool(%s.%s)", parent, p.PackageName) } return fmt.Sprintf("%s.%s", parent, p.PackageName) case SchemaTypeList, SchemaTypeSet: @@ -396,7 +396,7 @@ func (p Property) DefaultDiffSuppress() *string { case SchemaTypeString: // Field is reference to another resource if _, ok := p.typ.Extension["x-dcl-references"]; ok { - dsf := "compareSelfLinkOrResourceName" + dsf := "tpgresource.CompareSelfLinkOrResourceName" return &dsf } } @@ -629,7 +629,7 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher i := Type{typ: v.Items} e := fmt.Sprintf("&schema.Schema{Type: schema.%s}", i.String()) if _, ok := v.Extension["x-dcl-references"]; ok { - e = fmt.Sprintf("&schema.Schema{Type: schema.%s, DiffSuppressFunc: compareSelfLinkOrResourceName, }", i.String()) + e = fmt.Sprintf("&schema.Schema{Type: schema.%s, DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, }", i.String()) } p.Elem = &e p.ElemIsBasicType = true @@ -697,7 +697,7 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher if p.customName != "" { propertyName = p.customName } - ig := fmt.Sprintf("get%s(d, config)", renderSnakeAsTitle(miscellaneousNameSnakeCase(propertyName))) + ig := fmt.Sprintf("tpgresource.Get%s(d, config)", renderSnakeAsTitle(miscellaneousNameSnakeCase(propertyName))) if cigOk { ig = fmt.Sprintf("%s(d, config)", cig.Function) } @@ -837,9 +837,9 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher } else { parent = "obj" } - enumBoolSS := fmt.Sprintf("d.Set(%q, flattenEnumBool(%s.%s))", p.Name(), parent, p.PackageName) + enumBoolSS := fmt.Sprintf("d.Set(%q, tpgdclresource.FlattenEnumBool(%s.%s))", p.Name(), parent, p.PackageName) p.StateSetter = &enumBoolSS - enumBoolSG := fmt.Sprintf("expandEnumBool(d.Get(%q))", p.Name()) + enumBoolSG := fmt.Sprintf("tpgdclresource.ExpandEnumBool(d.Get(%q))", p.Name()) p.StateGetter = &enumBoolSG } diff --git a/tpgtools/templates/provider_dcl_client_creation.go.tmpl b/tpgtools/templates/provider_dcl_client_creation.go.tmpl index 601641205fb4..4a64338743fd 100644 --- a/tpgtools/templates/provider_dcl_client_creation.go.tmpl +++ b/tpgtools/templates/provider_dcl_client_creation.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL *** diff --git a/tpgtools/templates/provider_dcl_endpoints.go.tmpl b/tpgtools/templates/provider_dcl_endpoints.go.tmpl index 46788c8793a8..23caf7c6aac6 100644 --- a/tpgtools/templates/provider_dcl_endpoints.go.tmpl +++ b/tpgtools/templates/provider_dcl_endpoints.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL *** diff --git a/tpgtools/templates/provider_dcl_resources.go.tmpl b/tpgtools/templates/provider_dcl_resources.go.tmpl index 8402f1e8e582..7b6f3520bf37 100644 --- a/tpgtools/templates/provider_dcl_resources.go.tmpl +++ b/tpgtools/templates/provider_dcl_resources.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL *** @@ -30,12 +33,17 @@ package google import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + {{- range $res := . }} + {{- if not $res.SkipInProvider }} + "github.com/hashicorp/terraform-provider-google/google/services/{{$res.Package}}" + {{- end }} + {{- end }} ) var dclResources = map[string]*schema.Resource{ {{- range $res := . }} {{- if not $res.SkipInProvider }} - "{{$res.TerraformName}}": Resource{{$res.PathType}}(), + "{{$res.TerraformName}}": {{$res.Package}}.Resource{{$res.PathType}}(), {{- end }} {{- end }} } diff --git a/tpgtools/templates/resource.go.tmpl b/tpgtools/templates/resource.go.tmpl index 5fda987f0d47..532dd4c4c75b 100644 --- a/tpgtools/templates/resource.go.tmpl +++ b/tpgtools/templates/resource.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL *** @@ -26,7 +29,7 @@ // // ---------------------------------------------------------------------------- -package google +package {{$.Package}} import( "context" @@ -48,6 +51,7 @@ import( dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" {{$.Package}} "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/{{$.DCLPackage}}" + "github.com/hashicorp/terraform-provider-google/google/tpgdclresource" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -255,10 +259,10 @@ should be converted to use the DCL's ID method, so normalization can be uniform. {{- if $.CustomCreateDirectiveFunction }} directive := {{ $.CustomCreateDirectiveFunction }}(obj) {{- else if $.HasCreate }} - directive := CreateDirective + directive := tpgdclresource.CreateDirective {{- else }} {{/* Resource has no create method, so we skip the BlockModification parameter. */}} - directive := UpdateDirective + directive := tpgdclresource.UpdateDirective {{- end }} userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { @@ -380,7 +384,7 @@ func resource{{$.PathType}}Read(d *schema.ResourceData, meta interface{}) error res, err := client.Get{{$.DCLTitle}}(context.Background(), obj) if err != nil { resourceName := fmt.Sprintf("{{$.PathType}} %q", d.Id()) - return handleNotFoundDCLError(err, d, resourceName) + return tpgdclresource.HandleNotFoundDCLError(err, d, resourceName) } {{ range $v := .Properties -}} @@ -435,7 +439,7 @@ func resource{{$.PathType}}Update(d *schema.ResourceData, meta interface{}) erro defer transport_tpg.MutexStore.Unlock(lockName) {{ end }} - directive := UpdateDirective + directive := tpgdclresource.UpdateDirective {{- if $.StateHint }} directive = append(directive, dcl.WithStateHint(old)) {{- end }} diff --git a/tpgtools/templates/serialization.go.tmpl b/tpgtools/templates/serialization.go.tmpl index f6ed2536a299..7936e1a0119c 100644 --- a/tpgtools/templates/serialization.go.tmpl +++ b/tpgtools/templates/serialization.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL *** diff --git a/tpgtools/templates/sweeper.go.tmpl b/tpgtools/templates/sweeper.go.tmpl index e03ee17bc198..a4bbede86161 100644 --- a/tpgtools/templates/sweeper.go.tmpl +++ b/tpgtools/templates/sweeper.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL *** diff --git a/tpgtools/templates/test_file.go.tmpl b/tpgtools/templates/test_file.go.tmpl index 48430836c8ea..f26f29b70502 100644 --- a/tpgtools/templates/test_file.go.tmpl +++ b/tpgtools/templates/test_file.go.tmpl @@ -11,6 +11,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + // ---------------------------------------------------------------------------- // // *** AUTO GENERATED CODE *** Type: DCL ***