diff --git a/.github/workflows/dataproc.yaml b/.github/workflows/dataproc.yaml new file mode 100644 index 0000000000..5097747472 --- /dev/null +++ b/.github/workflows/dataproc.yaml @@ -0,0 +1,68 @@ +name: dataproc +on: + push: + branches: + - main + paths: + - 'dataproc/**' + pull_request: + paths: + - 'dataproc/**' + pull_request_target: + types: [labeled] + paths: + - 'dataproc/**' + schedule: + - cron: '0 0 * * 0' +jobs: + test: + if: ${{ github.event.action != 'labeled' || github.event.label.name == 'actions:force-run' }} + runs-on: ubuntu-latest + timeout-minutes: 60 + permissions: + contents: 'write' + pull-requests: 'write' + id-token: 'write' + steps: + - uses: actions/checkout@v3.1.0 + with: + ref: ${{github.event.pull_request.head.sha}} + - uses: 'google-github-actions/auth@v1.0.0' + with: + workload_identity_provider: 'projects/1046198160504/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider' + service_account: 'kokoro-system-test@long-door-651.iam.gserviceaccount.com' + create_credentials_file: 'true' + access_token_lifetime: 600s + - uses: actions/setup-node@v3.5.1 + with: + node-version: 16 + - run: npm install + working-directory: dataproc + - run: npm test + working-directory: dataproc + env: + MOCHA_REPORTER_SUITENAME: dataproc + MOCHA_REPORTER_OUTPUT: dataproc_sponge_log.xml + MOCHA_REPORTER: xunit + - if: ${{ github.event.action == 'labeled' && github.event.label.name == 'actions:force-run' }} + uses: actions/github-script@v6 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + try { + await github.rest.issues.removeLabel({ + name: 'actions:force-run', + owner: 'GoogleCloudPlatform', + repo: 'nodejs-docs-samples', + issue_number: context.payload.pull_request.number + }); + } catch (e) { + if (!e.message.includes('Label does not exist')) { + throw e; + } + } + - if: ${{ github.event_name == 'schedule'}} + run: | + curl https://github.com/googleapis/repo-automation-bots/releases/download/flakybot-1.1.0/flakybot -o flakybot -s -L + chmod +x ./flakybot + ./flakybot --repo GoogleCloudPlatform/nodejs-docs-samples --commit_hash ${{github.sha}} --build_url https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} diff --git a/.github/workflows/workflows.json b/.github/workflows/workflows.json index a97accdaa4..48f7845f7d 100644 --- a/.github/workflows/workflows.json +++ b/.github/workflows/workflows.json @@ -30,6 +30,7 @@ "datacatalog/cloud-client", "datacatalog/snippets", "datalabeling", + "dataproc", "datastore/functions", "datacatalog/quickstart", "dialogflow", diff --git a/dataproc/.eslintrc.yml b/dataproc/.eslintrc.yml new file mode 100644 index 0000000000..282535f55f --- /dev/null +++ b/dataproc/.eslintrc.yml @@ -0,0 +1,3 @@ +--- +rules: + no-console: off diff --git a/dataproc/createCluster.js b/dataproc/createCluster.js new file mode 100644 index 0000000000..af12ce5976 --- /dev/null +++ b/dataproc/createCluster.js @@ -0,0 +1,74 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Create a Dataproc cluster with the Node.js Client Library. + +// sample-metadata: +// title: Create Cluster +// usage: node createCluster.js + +/*eslint no-warning-comments: [0, { "terms": ["todo", "fixme"], "location": "anywhere" }]*/ + +function main( + projectId = 'YOUR_PROJECT_ID', + region = 'YOUR_CLUSTER_REGION', + clusterName = 'YOUR_CLUSTER_NAME' +) { + // [START dataproc_create_cluster] + const dataproc = require('@google-cloud/dataproc'); + + // TODO(developer): Uncomment and set the following variables + // projectId = 'YOUR_PROJECT_ID' + // region = 'YOUR_CLUSTER_REGION' + // clusterName = 'YOUR_CLUSTER_NAME' + + // Create a client with the endpoint set to the desired cluster region + const client = new dataproc.v1.ClusterControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, + projectId: projectId, + }); + + async function createCluster() { + // Create the cluster config + const request = { + projectId: projectId, + region: region, + cluster: { + clusterName: clusterName, + config: { + masterConfig: { + numInstances: 1, + machineTypeUri: 'n1-standard-2', + }, + workerConfig: { + numInstances: 2, + machineTypeUri: 'n1-standard-2', + }, + }, + }, + }; + + // Create the cluster + const [operation] = await client.createCluster(request); + const [response] = await operation.promise(); + + // Output a success message + console.log(`Cluster created successfully: ${response.clusterName}`); + // [END dataproc_create_cluster] + } + + createCluster(); +} + +main(...process.argv.slice(2)); diff --git a/dataproc/instantiateInlineWorkflowTemplate.js b/dataproc/instantiateInlineWorkflowTemplate.js new file mode 100644 index 0000000000..f66bb3d833 --- /dev/null +++ b/dataproc/instantiateInlineWorkflowTemplate.js @@ -0,0 +1,97 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This sample instantiates an inline workflow template using the client +// library for Dataproc. + +// sample-metadata: +// title: Instantiate an inline workflow template +// usage: node instantiateInlineWorkflowTemplate.js + +/*eslint no-warning-comments: [0, { "terms": ["todo", "fixme"], "location": "anywhere" }]*/ + +async function main(projectId = 'YOUR_PROJECT_ID', region = 'YOUR_REGION') { + // [START dataproc_instantiate_inline_workflow_template] + const dataproc = require('@google-cloud/dataproc'); + + // TODO(developer): Uncomment and set the following variables + // projectId = 'YOUR_PROJECT_ID' + // region = 'YOUR_REGION' + + // Create a client with the endpoint set to the desired region + const client = new dataproc.v1.WorkflowTemplateServiceClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, + projectId: projectId, + }); + + async function instantiateInlineWorkflowTemplate() { + // Create the formatted parent. + const parent = client.regionPath(projectId, region); + + // Create the template + const template = { + jobs: [ + { + hadoopJob: { + mainJarFileUri: + 'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar', + args: ['teragen', '1000', 'hdfs:///gen/'], + }, + stepId: 'teragen', + }, + { + hadoopJob: { + mainJarFileUri: + 'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar', + args: ['terasort', 'hdfs:///gen/', 'hdfs:///sort/'], + }, + stepId: 'terasort', + prerequisiteStepIds: ['teragen'], + }, + ], + placement: { + managedCluster: { + clusterName: 'my-managed-cluster', + config: { + gceClusterConfig: { + // Leave 'zoneUri' empty for 'Auto Zone Placement' + // zoneUri: '' + zoneUri: 'us-central1-a', + }, + }, + }, + }, + }; + + const request = { + parent: parent, + template: template, + }; + + // Submit the request to instantiate the workflow from an inline template. + const [operation] = await client.instantiateInlineWorkflowTemplate(request); + await operation.promise(); + + // Output a success message + console.log('Workflow ran successfully.'); + // [END dataproc_instantiate_inline_workflow_template] + } + + instantiateInlineWorkflowTemplate(); +} + +main(...process.argv.slice(2)).catch(err => { + console.error(err); + process.exitCode = 1; +}); diff --git a/dataproc/package.json b/dataproc/package.json new file mode 100644 index 0000000000..a72bc1a72d --- /dev/null +++ b/dataproc/package.json @@ -0,0 +1,25 @@ +{ + "name": "nodejs-docs-samples-dataproc", + "license": "Apache-2.0", + "author": "Google Inc.", + "files": [ + "*.js" + ], + "engines": { + "node": ">=12.0.0" + }, + "repository": "googleapis/nodejs-dataproc", + "private": true, + "scripts": { + "test": "mocha system-test --timeout 600000" + }, + "dependencies": { + "@google-cloud/dataproc": "^4.1.1", + "@google-cloud/storage": "^6.0.0" + }, + "devDependencies": { + "chai": "^4.2.0", + "mocha": "^8.0.0", + "uuid": "^9.0.0" + } +} diff --git a/dataproc/quickstart.js b/dataproc/quickstart.js new file mode 100644 index 0000000000..01c7e314b0 --- /dev/null +++ b/dataproc/quickstart.js @@ -0,0 +1,128 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// sample-metadata: +// title: Quickstart +// usage: node quickstart.js + +// [START dataproc_quickstart] +// This quickstart sample walks a user through creating a Dataproc +// cluster, submitting a PySpark job from Google Cloud Storage to the +// cluster, reading the output of the job and deleting the cluster, all +// using the Node.js client library. + +'use strict'; + +function main(projectId, region, clusterName, jobFilePath) { + const dataproc = require('@google-cloud/dataproc'); + const {Storage} = require('@google-cloud/storage'); + + // Create a cluster client with the endpoint set to the desired cluster region + const clusterClient = new dataproc.v1.ClusterControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, + projectId: projectId, + }); + + // Create a job client with the endpoint set to the desired cluster region + const jobClient = new dataproc.v1.JobControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, + projectId: projectId, + }); + + async function quickstart() { + // Create the cluster config + const cluster = { + projectId: projectId, + region: region, + cluster: { + clusterName: clusterName, + config: { + masterConfig: { + numInstances: 1, + machineTypeUri: 'n1-standard-2', + }, + workerConfig: { + numInstances: 2, + machineTypeUri: 'n1-standard-2', + }, + }, + }, + }; + + // Create the cluster + const [operation] = await clusterClient.createCluster(cluster); + const [response] = await operation.promise(); + + // Output a success message + console.log(`Cluster created successfully: ${response.clusterName}`); + + const job = { + projectId: projectId, + region: region, + job: { + placement: { + clusterName: clusterName, + }, + pysparkJob: { + mainPythonFileUri: jobFilePath, + }, + }, + }; + + const [jobOperation] = await jobClient.submitJobAsOperation(job); + const [jobResponse] = await jobOperation.promise(); + + const matches = + jobResponse.driverOutputResourceUri.match('gs://(.*?)/(.*)'); + + const storage = new Storage(); + + const output = await storage + .bucket(matches[1]) + .file(`${matches[2]}.000000000`) + .download(); + + // Output a success message. + console.log(`Job finished successfully: ${output}`); + + // Delete the cluster once the job has terminated. + const deleteClusterReq = { + projectId: projectId, + region: region, + clusterName: clusterName, + }; + + const [deleteOperation] = await clusterClient.deleteCluster( + deleteClusterReq + ); + await deleteOperation.promise(); + + // Output a success message + console.log(`Cluster ${clusterName} successfully deleted.`); + } + + quickstart(); +} + +const args = process.argv.slice(2); + +if (args.length !== 4) { + console.log( + 'Insufficient number of parameters provided. Please make sure a ' + + 'PROJECT_ID, REGION, CLUSTER_NAME and JOB_FILE_PATH are provided, in this order.' + ); +} + +main(...args); +// [END dataproc_quickstart] diff --git a/dataproc/submitJob.js b/dataproc/submitJob.js new file mode 100644 index 0000000000..1fd265ea98 --- /dev/null +++ b/dataproc/submitJob.js @@ -0,0 +1,82 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Submit a Spark job to a Dataproc cluster with the Node.js Client Library. + +// sample-metadata: +// title: Submit Job +// usage: node submitJob.js + +/*eslint no-warning-comments: [0, { "terms": ["todo", "fixme"], "location": "anywhere" }]*/ + +function main( + projectId = 'YOUR_PROJECT_ID', + region = 'YOUR_CLUSTER_REGION', + clusterName = 'YOUR_CLUSTER_NAME' +) { + // [START dataproc_submit_job] + const dataproc = require('@google-cloud/dataproc'); + const {Storage} = require('@google-cloud/storage'); + + // TODO(developer): Uncomment and set the following variables + // projectId = 'YOUR_PROJECT_ID' + // region = 'YOUR_CLUSTER_REGION' + // clusterName = 'YOUR_CLUSTER_NAME' + + // Create a client with the endpoint set to the desired cluster region + const jobClient = new dataproc.v1.JobControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, + projectId: projectId, + }); + + async function submitJob() { + const job = { + projectId: projectId, + region: region, + job: { + placement: { + clusterName: clusterName, + }, + sparkJob: { + mainClass: 'org.apache.spark.examples.SparkPi', + jarFileUris: [ + 'file:///usr/lib/spark/examples/jars/spark-examples.jar', + ], + args: ['1000'], + }, + }, + }; + + const [jobOperation] = await jobClient.submitJobAsOperation(job); + const [jobResponse] = await jobOperation.promise(); + + const matches = + jobResponse.driverOutputResourceUri.match('gs://(.*?)/(.*)'); + + const storage = new Storage(); + + const output = await storage + .bucket(matches[1]) + .file(`${matches[2]}.000000000`) + .download(); + + // Output a success message. + console.log(`Job finished successfully: ${output}`); + // [END dataproc_submit_job] + } + + submitJob(); +} + +main(...process.argv.slice(2)); diff --git a/dataproc/system-test/createCluster.test.js b/dataproc/system-test/createCluster.test.js new file mode 100644 index 0000000000..6d6aad852a --- /dev/null +++ b/dataproc/system-test/createCluster.test.js @@ -0,0 +1,52 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it, after} = require('mocha'); +const cp = require('child_process'); +const {v4} = require('uuid'); + +const region = 'us-central1'; +const clusterName = `node-cc-test-${v4()}`; + +const dataproc = require('@google-cloud/dataproc'); +const clusterClient = new dataproc.v1.ClusterControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, +}); + +const projectId = process.env.GCLOUD_PROJECT; + +const execSync = cmd => + cp.execSync(cmd, { + encoding: 'utf-8', + }); + +describe('create a dataproc cluster', () => { + it('should create a dataproc cluster', async () => { + const stdout = execSync( + `node createCluster.js "${projectId}" "${region}" "${clusterName}"` + ); + assert.match(stdout, new RegExp(`${clusterName}`)); + }); + + after(async () => { + await clusterClient.deleteCluster({ + projectId: projectId, + region: region, + clusterName: clusterName, + }); + }); +}); diff --git a/dataproc/system-test/instantiateInlineWorkflowTemplate.test.js b/dataproc/system-test/instantiateInlineWorkflowTemplate.test.js new file mode 100644 index 0000000000..f1669b2d50 --- /dev/null +++ b/dataproc/system-test/instantiateInlineWorkflowTemplate.test.js @@ -0,0 +1,40 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it} = require('mocha'); +const cp = require('child_process'); + +const projectId = process.env.GCLOUD_PROJECT; +const region = 'us-central1'; + +const execSync = cmd => + cp.execSync(cmd, { + encoding: 'utf-8', + }); + +const {delay} = require('./util'); + +describe('instantiate an inline workflow template', () => { + it('should instantiate an inline workflow template', async function () { + this.retries(4); + await delay(this.test); + const stdout = execSync( + `node instantiateInlineWorkflowTemplate.js "${projectId}" "${region}"` + ); + assert.match(stdout, /successfully/); + }); +}); diff --git a/dataproc/system-test/quickstart.test.js b/dataproc/system-test/quickstart.test.js new file mode 100644 index 0000000000..56c1b29963 --- /dev/null +++ b/dataproc/system-test/quickstart.test.js @@ -0,0 +1,92 @@ +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it, beforeEach, afterEach} = require('mocha'); +const cp = require('child_process'); +const {v4} = require('uuid'); + +const dataproc = require('@google-cloud/dataproc'); +const {Storage} = require('@google-cloud/storage'); + +const myUuid = v4(); +const region = 'us-central1'; +const clusterName = `node-qs-test-${myUuid}`; +const bucketName = `node-dataproc-qs-test-${myUuid}`; +const projectId = process.env.GCLOUD_PROJECT; +const jobFileName = 'sum.py'; +const jobFilePath = `gs://${bucketName}/${jobFileName}`; +const sortCode = + 'import pyspark\n' + + 'sc = pyspark.SparkContext()\n' + + 'rdd = sc.parallelize((1,2,3,4,5))\n' + + 'sum = rdd.reduce(lambda x, y: x + y)\n'; + +const clusterClient = new dataproc.v1.ClusterControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, +}); + +const storage = new Storage(); + +const execSync = cmd => + cp.execSync(cmd, { + encoding: 'utf-8', + }); + +const {delay} = require('./util'); + +describe('execute the quickstart', () => { + beforeEach(async () => { + const [bucket] = await storage.createBucket(bucketName); + await bucket.file(jobFileName).save(sortCode); + }); + + it('should execute the quickstart', async function () { + this.retries(4); + await delay(this.test); + const stdout = execSync( + `node quickstart.js "${projectId}" "${region}" "${clusterName}" "${jobFilePath}"` + ); + assert.match(stdout, /Cluster created successfully/); + assert.match(stdout, /Job finished successfully/); + assert.match(stdout, /successfully deleted/); + }); + + afterEach(async () => { + try { + await storage.bucket(bucketName).file(jobFileName).delete(); + await storage.bucket(bucketName).delete(); + + const [clusters] = await clusterClient.listClusters({ + projectId: projectId, + region: region, + }); + + for (const cluster of clusters) { + if (cluster.clusterName === clusterName) { + await clusterClient.deleteCluster({ + projectId: projectId, + region: region, + clusterName: clusterName, + }); + break; + } + } + } catch (err) { + console.error('Cannot clean up resources:', err); + } + }); +}); diff --git a/dataproc/system-test/submitJob.test.js b/dataproc/system-test/submitJob.test.js new file mode 100644 index 0000000000..e62c29e1b5 --- /dev/null +++ b/dataproc/system-test/submitJob.test.js @@ -0,0 +1,73 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +const {assert} = require('chai'); +const {describe, it, before, after} = require('mocha'); +const cp = require('child_process'); +const {v4} = require('uuid'); + +const projectId = process.env.GCLOUD_PROJECT; +const region = 'us-central1'; +const clusterName = `node-sj-test-${v4()}`; +const cluster = { + projectId: projectId, + region: region, + cluster: { + clusterName: clusterName, + config: { + masterConfig: { + numInstances: 1, + machineTypeUri: 'n1-standard-2', + }, + workerConfig: { + numInstances: 2, + machineTypeUri: 'n1-standard-2', + }, + }, + }, +}; + +const dataproc = require('@google-cloud/dataproc'); +const clusterClient = new dataproc.v1.ClusterControllerClient({ + apiEndpoint: `${region}-dataproc.googleapis.com`, +}); + +const execSync = cmd => + cp.execSync(cmd, { + encoding: 'utf-8', + }); + +describe('submit a Spark job to a Dataproc cluster', () => { + before(async () => { + const [operation] = await clusterClient.createCluster(cluster); + await operation.promise(); + }); + + it('should submit a job to a dataproc cluster', async () => { + const stdout = execSync( + `node submitJob.js "${projectId}" "${region}" "${clusterName}"` + ); + assert.match(stdout, new RegExp('Job finished successfully')); + }); + + after(async () => { + await clusterClient.deleteCluster({ + projectId: projectId, + region: region, + clusterName: clusterName, + }); + }); +}); diff --git a/dataproc/system-test/util.js b/dataproc/system-test/util.js new file mode 100644 index 0000000000..97b804a43c --- /dev/null +++ b/dataproc/system-test/util.js @@ -0,0 +1,28 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// ML tests frequently run into concurrency and quota issues, for which +// retrying with a backoff is a good strategy: +module.exports = { + async delay(test) { + const retries = test.currentRetry(); + if (retries === 0) return; // no retry on the first failure. + // see: https://cloud.google.com/storage/docs/exponential-backoff: + const ms = Math.pow(2, retries) * 1000 + Math.random() * 2000; + return new Promise(done => { + console.info(`retrying "${test.title}" in ${ms}ms`); + setTimeout(done, ms); + }); + }, +};