From a58d06de6c7eded0105b893841be7e600de50b60 Mon Sep 17 00:00:00 2001 From: Shun Fan Date: Thu, 17 Sep 2015 20:57:28 -0700 Subject: [PATCH] Update samples to be run from command line without modification Switch to dictionary for JSON string Update description strings --- storage/transfer_service/README.md | 2 +- storage/transfer_service/aws_request.py | 94 +++++++++++++------- storage/transfer_service/nearline_request.py | 84 +++++++++++------ storage/transfer_service/transfer_check.py | 37 ++++---- 4 files changed, 142 insertions(+), 75 deletions(-) diff --git a/storage/transfer_service/README.md b/storage/transfer_service/README.md index 00a92d73b062..14e593b4cb6c 100644 --- a/storage/transfer_service/README.md +++ b/storage/transfer_service/README.md @@ -11,7 +11,7 @@ This app creates two types of transfers using the Transfer Service tool. 1. Select Add credentials > Service account > JSON key. 1. Set the environment variable GOOGLE_APPLICATION_CREDENTIALS to point to your JSON key. 1. Add the Storage Transfer service account as an editor of your project - storage-transfer-5031963314028297433@partnercontent.gserviceaccount.com + storage-transfer-@partnercontent.gserviceaccount.com 1. Set up gcloud for application default credentials. 1. `gcloud components update` 1. `gcloud auth login` diff --git a/storage/transfer_service/aws_request.py b/storage/transfer_service/aws_request.py index 808a5754b956..9b5b0622551b 100644 --- a/storage/transfer_service/aws_request.py +++ b/storage/transfer_service/aws_request.py @@ -12,6 +12,8 @@ # limitations under the License. # # [START all] +import argparse +import datetime import json import logging @@ -22,54 +24,84 @@ logging.basicConfig(level=logging.DEBUG) -def main(): - """Create a one-off transfer from Amazon S3 to GCS.""" +# [START main] +def main(description, project_id, day, month, year, hours, minutes, + source_bucket, access_key, secret_access_key, sink_bucket): + """Create a one-off transfer from Amazon S3 to Google Cloud Storage.""" credentials = GoogleCredentials.get_application_default() storagetransfer = discovery.build( 'storagetransfer', 'v1', credentials=credentials) # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. - transfer_job = ''' - { - "description": "YOUR DESCRIPTION", - "status": "ENABLED", - "projectId": "YOUR_PROJECT_ID", - "schedule": { - "scheduleStartDate": { - "day": 1, - "month": 1, - "year": 2015 + transfer_job = { + 'description': description, + 'status': 'ENABLED', + 'projectId': project_id, + 'schedule': { + 'scheduleStartDate': { + 'day': day, + 'month': month, + 'year': year }, - "scheduleEndDate": { - "day": 1, - "month": 1, - "year": 2015 + 'scheduleEndDate': { + 'day': day, + 'month': month, + 'year': year }, - "startTimeOfDay": { - "hours": 0, - "minutes": 0 + 'startTimeOfDay': { + 'hours': hours, + 'minutes': minutes } }, - "transferSpec": { - "awsS3DataSource": { - "bucketName": "YOUR_SOURCE_BUCKET", - "awsAccessKey": { - "accessKeyId": "YOUR_ACCESS_KEY_ID", - "secretAccessKey": "YOUR_SECRET_ACCESS_KEY" + 'transferSpec': { + 'awsS3DataSource': { + 'bucketName': source_bucket, + 'awsAccessKey': { + 'accessKeyId': access_key, + 'secretAccessKey': secret_access_key } }, - "gcsDataSink": { - "bucketName": "YOUR_SINK_BUCKET" + 'gcsDataSink': { + 'bucketName': sink_bucket } } } - ''' - result = storagetransfer.transferJobs().create(body=json.loads( - transfer_job)).execute() + print(transfer_job) + result = storagetransfer.transferJobs().create(body=transfer_job).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4)) +# [END main] if __name__ == '__main__': - main() + parser = argparse.ArgumentParser( + description='Create a one-off transfer from Amazon S3 to Google Cloud ' + 'Storage.') + parser.add_argument('description', help='Transfer description.') + parser.add_argument('project_id', help='Your Google Cloud project ID.') + parser.add_argument('date', help='Date YYYY/MM/DD.') + parser.add_argument('time', help='Time (24hr) HH:MM.') + parser.add_argument('source_bucket', help='Source bucket name.') + parser.add_argument('access_key', help='Your AWS access key id.') + parser.add_argument('secret_access_key', help='Your AWS secret access ' + 'key.') + parser.add_argument('sink_bucket', help='Sink bucket name.') + + args = parser.parse_args() + date = datetime.datetime.strptime(args.date, '%Y/%m/%d') + time = datetime.datetime.strptime(args.time, '%H:%M') + + main( + args.description, + args.project_id, + date.year, + date.month, + date.day, + time.hour, + time.minute, + args.source_bucket, + args.access_key, + args.secret_access_key, + args.sink_bucket) + # [END all] diff --git a/storage/transfer_service/nearline_request.py b/storage/transfer_service/nearline_request.py index 44e108df0aa4..8339a6f8036b 100644 --- a/storage/transfer_service/nearline_request.py +++ b/storage/transfer_service/nearline_request.py @@ -12,6 +12,8 @@ # limitations under the License. # # [START all] +import argparse +import datetime import json import logging @@ -22,50 +24,76 @@ logging.basicConfig(level=logging.DEBUG) -def main(): - """Transfer from standard Cloud Storage to Cloud Storage Nearline.""" +# [START main] +def main(description, project_id, day, month, year, hours, minutes, + source_bucket, sink_bucket): + """Create a transfer from the Google Cloud Storage Standard class to the + Nearline Storage class.""" credentials = GoogleCredentials.get_application_default() storagetransfer = discovery.build( 'storagetransfer', 'v1', credentials=credentials) # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. - transfer_job = ''' - { - "description": "YOUR DESCRIPTION", - "status": "ENABLED", - "projectId": "YOUR_PROJECT_ID", - "schedule": { - "scheduleStartDate": { - "day": 1, - "month": 1, - "year": 2015 + transfer_job = { + 'description': description, + 'status': 'ENABLED', + 'projectId': project_id, + 'schedule': { + 'scheduleStartDate': { + 'day': day, + 'month': month, + 'year': year }, - "startTimeOfDay": { - "hours": 1, - "minutes": 1 + 'startTimeOfDay': { + 'hours': hours, + 'minutes': minutes } }, - "transferSpec": { - "gcsDataSource": { - "bucketName": "YOUR_SOURCE_BUCKET" + 'transferSpec': { + 'gcsDataSource': { + 'bucketName': source_bucket }, - "gcsDataSink": { - "bucketName": "YOUR_SINK_BUCKET" + 'gcsDataSink': { + 'bucketName': sink_bucket }, - "objectConditions": { - "minTimeElapsedSinceLastModification": "2592000s" + 'objectConditions': { + 'minTimeElapsedSinceLastModification': '2592000s' }, - "transferOptions": { - "deleteObjectsFromSourceAfterTransfer": true + 'transferOptions': { + 'deleteObjectsFromSourceAfterTransfer': 'true' } } } - ''' - result = storagetransfer.transferJobs().create(body=json.loads( - transfer_job)).execute() + + result = storagetransfer.transferJobs().create(body=transfer_job).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4)) +# [END main] if __name__ == '__main__': - main() + parser = argparse.ArgumentParser( + description='Create a transfer from the Google Cloud Storage Standard ' + 'class to the Nearline Storage class.') + parser.add_argument('description', help='Transfer description.') + parser.add_argument('project_id', help='Your Google Cloud project ID.') + parser.add_argument('date', help='Date YYYY/MM/DD.') + parser.add_argument('time', help='Time (24hr) HH:MM.') + parser.add_argument('source_bucket', help='Source bucket name.') + parser.add_argument('sink_bucket', help='Sink bucket name.') + + args = parser.parse_args() + date = datetime.datetime.strptime(args.date, '%Y/%m/%d') + time = datetime.datetime.strptime(args.time, '%H:%M') + + main( + args.description, + args.project_id, + date.year, + date.month, + date.day, + time.hour, + time.minute, + args.source_bucket, + args.sink_bucket) + # [END all] diff --git a/storage/transfer_service/transfer_check.py b/storage/transfer_service/transfer_check.py index 18a87d676438..e6ac67cfb154 100644 --- a/storage/transfer_service/transfer_check.py +++ b/storage/transfer_service/transfer_check.py @@ -12,6 +12,7 @@ # limitations under the License. # # [START all] +import argparse import json import logging @@ -21,31 +22,37 @@ logging.basicConfig(level=logging.DEBUG) -# Edit these values with desired parameters. -PROJECT_ID = 'YOUR_PROJECT_ID' -JOB_NAME = 'YOUR_JOB_NAME' - -def check_operation(storagetransfer, project_id, job_name): +# [START main] +def main(project_id, job_name): """Review the transfer operations associated with a transfer job.""" + credentials = GoogleCredentials.get_application_default() + storagetransfer = discovery.build( + 'storagetransfer', 'v1', credentials=credentials) + filterString = ( '{{"project_id": "{project_id}", ' '"job_names": ["{job_name}"]}}' ).format(project_id=project_id, job_name=job_name) - return storagetransfer.transferOperations().list( + + result = storagetransfer.transferOperations().list( name="transferOperations", filter=filterString).execute() - - -def main(): - credentials = GoogleCredentials.get_application_default() - storagetransfer = discovery.build( - 'storagetransfer', 'v1', credentials=credentials) - - result = check_operation(storagetransfer, PROJECT_ID, JOB_NAME) logging.info('Result of transferOperations/list: %s', json.dumps(result, indent=4, sort_keys=True)) +# [END main] if __name__ == '__main__': - main() + parser = argparse.ArgumentParser( + description='Review the transfer operations associated with a transfer' + ' job.') + parser.add_argument('project_id', help='Your Google Cloud project ID.') + parser.add_argument('job_name', help='Your job name.') + + args = parser.parse_args() + + main( + args.project_id, + args.job_name) + # [END all]