Skip to content

Commit

Permalink
Merge pull request #2997 from GoogleCloudPlatform/nodejs-storage-tran…
Browse files Browse the repository at this point in the history
…sfer-migration

migrate code from googleapis/nodejs-storage-transfer
  • Loading branch information
kweinmeister authored Jan 26, 2023
2 parents 10b9c65 + ade3183 commit ceca4ae
Show file tree
Hide file tree
Showing 32 changed files with 2,587 additions and 2 deletions.
82 changes: 82 additions & 0 deletions .github/workflows/storagetransfer.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
name: storagetransfer
on:
push:
branches:
- main
paths:
- 'storagetransfer/**'
- '.github/workflows/storagetransfer.yaml'
pull_request:
paths:
- 'storagetransfer/**'
- '.github/workflows/storagetransfer.yaml'
pull_request_target:
types: [labeled]
paths:
- 'storagetransfer/**'
- '.github/workflows/storagetransfer.yaml'
schedule:
- cron: '0 0 * * 0'
jobs:
test:
if: ${{ github.event.action != 'labeled' || github.event.label.name == 'actions:force-run' }}
runs-on: ubuntu-latest
timeout-minutes: 60
permissions:
contents: 'write'
pull-requests: 'write'
id-token: 'write'
steps:
- uses: actions/[email protected]
with:
ref: ${{github.event.pull_request.head.sha}}
- uses: 'google-github-actions/[email protected]'
with:
workload_identity_provider: 'projects/1046198160504/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider'
service_account: '[email protected]'
create_credentials_file: 'true'
access_token_lifetime: 600s
- id: secrets
uses: "google-github-actions/get-secretmanager-secrets@v1"
with:
secrets: |-
sts_aws_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-aws
sts_azure_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-azure
- uses: actions/[email protected]
with:
node-version: 16
- run: npm install
working-directory: storagetransfer
- run: npm test
working-directory: storagetransfer
env:
AWS_ACCESS_KEY_ID : ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).AccessKeyId }}
AWS_SECRET_ACCESS_KEY: ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).SecretAccessKey }}
AZURE_STORAGE_ACCOUNT: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).StorageAccount }}
AZURE_CONNECTION_STRING: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).ConnectionString }}
AZURE_SAS_TOKEN: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).SAS }}
MOCHA_REPORTER_SUITENAME: storagetransfer
MOCHA_REPORTER_OUTPUT: storagetransfer_sponge_log.xml
MOCHA_REPORTER: xunit
- if: ${{ github.event.action == 'labeled' && github.event.label.name == 'actions:force-run' }}
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
try {
await github.rest.issues.removeLabel({
name: 'actions:force-run',
owner: 'GoogleCloudPlatform',
repo: 'nodejs-docs-samples',
issue_number: context.payload.pull_request.number
});
} catch (e) {
if (!e.message.includes('Label does not exist')) {
throw e;
}
}
- if: ${{ github.event_name == 'schedule' && always() }}
run: |
curl https://github.com/googleapis/repo-automation-bots/releases/download/flakybot-1.1.0/flakybot -o flakybot -s -L
chmod +x ./flakybot
./flakybot --repo GoogleCloudPlatform/nodejs-docs-samples --commit_hash ${{github.sha}} --build_url https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}
1 change: 1 addition & 0 deletions .github/workflows/workflows.json
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
"service-directory/snippets",
"secret-manager",
"speech",
"storagetransfer",
"talent",
"texttospeech",
"translate",
Expand Down
5 changes: 3 additions & 2 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,9 @@ functions/memorystore @GoogleCloudPlatform/nodejs-samples-reviewers
functions/spanner @jsimonweb @GoogleCloudPlatform/nodejs-samples-reviewers

# SoDa teams
/cloud-sql/**/*.js @GoogleCloudPlatform/infra-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
/datastore/**/*.js @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
cloud-sql @GoogleCloudPlatform/infra-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
datastore @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
storagetransfer @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers

# One-offs
composer @leahecole @sofisl @GoogleCloudPlatform/nodejs-samples-reviewers
Expand Down
110 changes: 110 additions & 0 deletions storagetransfer/aws-request.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/**
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

async function main(
projectId,
description,
awsSourceBucket,
gcsSinkBucket,
awsAccessKeyId = process.env.AWS_ACCESS_KEY_ID,
awsSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY
) {
// [START storagetransfer_transfer_from_aws]

// Imports the Google Cloud client library
const {
StorageTransferServiceClient,
} = require('@google-cloud/storage-transfer');

/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// The ID of the Google Cloud Platform Project that owns the job
// projectId = 'my-project-id'

// A useful description for your transfer job
// description = 'My transfer job'

// AWS S3 source bucket name
// awsSourceBucket = 'my-s3-source-bucket'

// AWS Access Key ID
// awsAccessKeyId = 'AKIA...'

// AWS Secret Access Key
// awsSecretAccessKey = 'HEAoMK2.../...ku8'

// Google Cloud Storage destination bucket name
// gcsSinkBucket = 'my-gcs-destination-bucket'

// Creates a client
const client = new StorageTransferServiceClient();

/**
* Creates a one-time transfer job from Amazon S3 to Google Cloud Storage.
*/
async function transferFromS3() {
// Setting the start date and the end date as the same time creates a
// one-time transfer
const now = new Date();
const oneTimeSchedule = {
day: now.getDate(),
month: now.getMonth() + 1,
year: now.getFullYear(),
};

// Runs the request and creates the job
const [transferJob] = await client.createTransferJob({
transferJob: {
projectId,
description,
status: 'ENABLED',
schedule: {
scheduleStartDate: oneTimeSchedule,
scheduleEndDate: oneTimeSchedule,
},
transferSpec: {
awsS3DataSource: {
bucketName: awsSourceBucket,
awsAccessKey: {
accessKeyId: awsAccessKeyId,
secretAccessKey: awsSecretAccessKey,
},
},
gcsDataSink: {
bucketName: gcsSinkBucket,
},
},
},
});

console.log(
`Created and ran a transfer job from '${awsSourceBucket}' to '${gcsSinkBucket}' with name ${transferJob.name}`
);
}

transferFromS3();
// [END storagetransfer_transfer_from_aws]
}

main(...process.argv.slice(2));

process.on('unhandledRejection', err => {
console.error(err);
process.exitCode = 1;
});
132 changes: 132 additions & 0 deletions storagetransfer/aws-s3-compatible-source-request.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
/**
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

const {protos} = require('@google-cloud/storage-transfer');
const {AuthMethod, NetworkProtocol, RequestModel} =
protos.google.storagetransfer.v1.S3CompatibleMetadata;

async function main(
projectId = 'my-project',
sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default',
sourceBucketName = 'my-bucket-name',
sourcePath = 'path/to/data/',
gcsSinkBucket = 'my-sink-bucket',
gcsPath = 'path/to/data/',
region = 'us-east-1',
endpoint = 'us-east-1.example.com',
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS,
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE,
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4
) {
// [START storagetransfer_transfer_from_s3_compatible_source]

// Imports the Google Cloud client library
const storageTransfer = require('@google-cloud/storage-transfer');

/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// Useful enums for AWS S3-Compatible Transfers
// const {AuthMethod, NetworkProtocol, RequestModel} = storageTransfer.protos.google.storagetransfer.v1.S3CompatibleMetadata;

// Your project id
// const projectId = 'my-project';

// The agent pool associated with the S3-compatible data source. Defaults to the default agent
// const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default';

// The S3-compatible bucket name to transfer data from
// const sourceBucketName = "my-bucket-name";

// The S3-compatible path (object prefix) to transfer data from
// const sourcePath = "path/to/data/";

// The ID of the GCS bucket to transfer data to
// const gcsSinkBucket = "my-sink-bucket";

// The GCS path (object prefix) to transfer data to
// const gcsPath = "path/to/data/";

// The S3 region of the source bucket
// const region = 'us-east-1';

// The S3-compatible endpoint
// const endpoint = "us-east-1.example.com";

// The S3-compatible network protocol
// const protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS;

// The S3-compatible request model
// const requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE;

// The S3-compatible auth method
// const authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4;

// Creates a client
const client = new storageTransfer.StorageTransferServiceClient();

/**
* Creates a transfer from an AWS S3-compatible source to GCS
*/
async function transferFromS3CompatibleSource() {
// Runs the request and creates the job
const [transferJob] = await client.createTransferJob({
transferJob: {
projectId,
transferSpec: {
sourceAgentPoolName,
awsS3CompatibleDataSource: {
region,
s3Metadata: {
authMethod,
protocol,
requestModel,
},
endpoint,
bucketName: sourceBucketName,
path: sourcePath,
},
gcsDataSink: {
bucketName: gcsSinkBucket,
path: gcsPath,
},
},
status: 'ENABLED',
},
});

await client.runTransferJob({
jobName: transferJob.name,
projectId,
});

console.log(
`Created and ran a transfer job from '${sourceBucketName}' to '${gcsSinkBucket}' with name ${transferJob.name}`
);
}

transferFromS3CompatibleSource();
// [END storagetransfer_transfer_from_s3_compatible_source]
}

main(...process.argv.slice(2));

process.on('unhandledRejection', err => {
console.error(err.message);
process.exitCode = 1;
});
Loading

0 comments on commit ceca4ae

Please sign in to comment.