Skip to content

Commit

Permalink
Merge pull request #1659 from api3dao/aws-sdk-update
Browse files Browse the repository at this point in the history
Update AWS SDK
  • Loading branch information
amarthadan authored Mar 15, 2023
2 parents 947a279 + 1ae85d7 commit 0040a9b
Show file tree
Hide file tree
Showing 14 changed files with 2,505 additions and 675 deletions.
6 changes: 6 additions & 0 deletions .changeset/hot-timers-act.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
'@api3/airnode-deployer': minor
'@api3/airnode-node': minor
---

AWS SDK update
6 changes: 5 additions & 1 deletion packages/airnode-deployer/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,10 @@
"@api3/airnode-utilities": "^0.10.0",
"@api3/airnode-validator": "^0.10.0",
"@api3/promise-utils": "^0.3.0",
"@aws-sdk/client-s3": "^3.272.0",
"@aws-sdk/signature-v4-crt": "^3.272.0",
"@google-cloud/storage": "^6.9.4",
"adm-zip": "^0.5.10",
"aws-sdk": "^2.1333.0",
"chalk": "^4.1.2",
"cli-table3": "^0.6.3",
"compare-versions": "^5.0.3",
Expand All @@ -45,12 +46,15 @@
"zod": "^3.20.6"
},
"devDependencies": {
"@aws-sdk/util-stream-node": "^3.272.0",
"@google-cloud/functions-framework": "^3.1.3",
"@types/adm-zip": "^0.5.0",
"@types/aws-lambda": "^8.10.111",
"@types/lodash": "^4.14.191",
"@types/node": "^18.15.0",
"@types/yargs": "^17.0.22",
"aws-sdk-client-mock": "^2.0.1",
"aws-sdk-client-mock-jest": "^2.0.1",
"copyfiles": "^2.4.1",
"esbuild-loader": "^3.0.1",
"jest": "^29.5.0",
Expand Down
357 changes: 213 additions & 144 deletions packages/airnode-deployer/src/infrastructure/aws.test.ts

Large diffs are not rendered by default.

172 changes: 103 additions & 69 deletions packages/airnode-deployer/src/infrastructure/aws.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,47 @@
import * as fs from 'fs';
import AWS from 'aws-sdk';
import {
S3Client,
ListBucketsCommand,
GetBucketLocationCommand,
CreateBucketCommand,
CreateBucketCommandInput,
PutBucketEncryptionCommand,
PutPublicAccessBlockCommand,
ListObjectsV2Command,
ListObjectsV2CommandInput,
PutObjectCommand,
GetObjectCommand,
CopyObjectCommand,
DeleteObjectsCommand,
DeleteBucketCommand,
} from '@aws-sdk/client-s3';
import concat from 'lodash/concat';
import compact from 'lodash/compact';
import isNil from 'lodash/isNil';
import { AwsCloudProvider } from '@api3/airnode-node';
import { go } from '@api3/promise-utils';
import * as logger from '../utils/logger';
import {
BUCKET_NAME_REGEX,
Bucket,
Directory,
FileSystemType,
generateBucketName,
translatePathsToDirectoryStructure,
} from '../utils/infrastructure';

const initializeS3Service = () => {
return new AWS.S3();
const DEFAULT_AWS_REGION = 'us-east-1';

const initializeS3Service = (region: string) => {
return new S3Client({ region });
};

export const getAirnodeBucket = async () => {
const s3 = initializeS3Service();
// We're using a default region here because we don't know where is the bucket stored at this point.
const s3 = initializeS3Service(DEFAULT_AWS_REGION);

logger.debug('Listing S3 buckets');
const goBuckets = await go(() => s3.listBuckets().promise());
const bucketsCommand = new ListBucketsCommand({});
const goBuckets = await go(() => s3.send(bucketsCommand));
if (!goBuckets.success) {
throw new Error(`Failed to list S3 buckets: ${goBuckets.error}`);
}
Expand All @@ -38,20 +57,21 @@ export const getAirnodeBucket = async () => {
return null;
}

const goBucketLocation = await go(() => s3.getBucketLocation({ Bucket: bucketName }).promise());
const bucketLocationCommand = new GetBucketLocationCommand({ Bucket: bucketName });
const goBucketLocation = await go(() => s3.send(bucketLocationCommand));
if (!goBucketLocation.success) {
throw new Error(`Failed to get location for bucket '${bucketName}': ${goBucketLocation.error}`);
}

let region = goBucketLocation.data.LocationConstraint;
// The `EU` option is listed as a possible one in the documentation
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getBucketLocation-property
if (isNil(region) || region === 'EU') {
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetBucketLocation.html#API_GetBucketLocation_ResponseElements
if (region === 'EU') {
throw new Error(`Unknown bucket region '${region}'`);
}
// The documentation says that for buckets in the `us-east-1` region the value of `LocationConstraint` is null but it is actually an empty string...
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getBucketLocation-property
if (region === '') {
// The documentation says that for buckets in the `us-east-1` region the value of `LocationConstraint` is null but it is actually undefined
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetBucketLocation.html#API_GetBucketLocation_ResponseElements
if (region === undefined) {
region = 'us-east-1';
}

Expand All @@ -62,54 +82,50 @@ export const getAirnodeBucket = async () => {
};

export const createAirnodeBucket = async (cloudProvider: AwsCloudProvider) => {
const s3 = initializeS3Service();
// If there's no Airnode bucket already available we create it in the region where the Airnode resources will be deployed
const s3 = initializeS3Service(cloudProvider.region);
const bucketName = generateBucketName();

let createParams: AWS.S3.CreateBucketRequest = { Bucket: bucketName };
let createParams: CreateBucketCommandInput = { Bucket: bucketName };
// If the region is `us-east-1` the configuration must be empty...
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#createBucket-property
// https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateBucket.html#API_CreateBucket_RequestBody
if (cloudProvider.region !== 'us-east-1') {
createParams = { ...createParams, CreateBucketConfiguration: { LocationConstraint: cloudProvider.region } };
}

logger.debug(`Creating S3 bucket '${bucketName}' in '${cloudProvider.region}'`);
const goCreate = await go(() => s3.createBucket(createParams).promise());
const createCommand = new CreateBucketCommand(createParams);
const goCreate = await go(() => s3.send(createCommand));
if (!goCreate.success) {
throw new Error(`Failed to create an S3 bucket: ${goCreate.error}`);
}

// Enable bucket encryption
logger.debug(`Setting encryption for S3 bucket '${bucketName}'`);
const goPutEncryption = await go(() =>
s3
.putBucketEncryption({
Bucket: bucketName,
ServerSideEncryptionConfiguration: {
Rules: [{ ApplyServerSideEncryptionByDefault: { SSEAlgorithm: 'AES256' }, BucketKeyEnabled: true }],
},
})
.promise()
);
const putEncryptionCommand = new PutBucketEncryptionCommand({
Bucket: bucketName,
ServerSideEncryptionConfiguration: {
Rules: [{ ApplyServerSideEncryptionByDefault: { SSEAlgorithm: 'AES256' }, BucketKeyEnabled: true }],
},
});
const goPutEncryption = await go(() => s3.send(putEncryptionCommand));
if (!goPutEncryption.success) {
throw new Error(`Failed to enable encryption for bucket '${bucketName}': ${goPutEncryption.error}`);
}

// Blocking public access to the bucket
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html
logger.debug(`Setting public access block for S3 bucket '${bucketName}'`);
const goPutPublicAccessBlock = await go(() =>
s3
.putPublicAccessBlock({
Bucket: bucketName,
PublicAccessBlockConfiguration: {
BlockPublicAcls: true,
BlockPublicPolicy: true,
IgnorePublicAcls: true,
RestrictPublicBuckets: true,
},
})
.promise()
);
const putPublicAccessBlockCommand = new PutPublicAccessBlockCommand({
Bucket: bucketName,
PublicAccessBlockConfiguration: {
BlockPublicAcls: true,
BlockPublicPolicy: true,
IgnorePublicAcls: true,
RestrictPublicBuckets: true,
},
});
const goPutPublicAccessBlock = await go(() => s3.send(putPublicAccessBlockCommand));
if (!goPutPublicAccessBlock.success) {
throw new Error(
`Failed to setup a public access block for bucket '${bucketName}': ${goPutPublicAccessBlock.error}`
Expand All @@ -122,18 +138,20 @@ export const createAirnodeBucket = async (cloudProvider: AwsCloudProvider) => {
};
};

export const getBucketDirectoryStructure = async (bucketName: string) => {
const s3 = initializeS3Service();
export const getBucketDirectoryStructure = async (bucket: Bucket) => {
const { name: bucketName, region: bucketRegion } = bucket;
const s3 = initializeS3Service(bucketRegion);

let paths: string[] = [];
let truncated = true;
let listParams: AWS.S3.ListObjectsV2Request = {
let listParams: ListObjectsV2CommandInput = {
Bucket: bucketName,
};

while (truncated) {
logger.debug(`Listing objects for S3 bucket '${bucketName}'`);
const goList = await go(() => s3.listObjectsV2(listParams).promise());
const listCommand = new ListObjectsV2Command(listParams);
const goList = await go(() => s3.send(listCommand));
if (!goList.success) {
throw new Error(`Failed to list content of bucket '${bucketName}': ${goList.error}`);
}
Expand All @@ -148,42 +166,55 @@ export const getBucketDirectoryStructure = async (bucketName: string) => {
return translatePathsToDirectoryStructure(paths);
};

export const storeFileToBucket = async (bucketName: string, bucketFilePath: string, filePath: string) => {
const s3 = initializeS3Service();
export const storeFileToBucket = async (bucket: Bucket, bucketFilePath: string, filePath: string) => {
const { name: bucketName, region: bucketRegion } = bucket;
const s3 = initializeS3Service(bucketRegion);

logger.debug(`Storing file '${filePath}' as '${bucketFilePath}' to S3 bucket '${bucketName}'`);
const goPut = await go(() =>
s3
.putObject({ Bucket: bucketName, Key: bucketFilePath, Body: fs.readFileSync(filePath, { encoding: 'utf-8' }) })
.promise()
);
const putCommand = new PutObjectCommand({
Bucket: bucketName,
Key: bucketFilePath,
Body: fs.readFileSync(filePath, { encoding: 'utf-8' }),
});
const goPut = await go(() => s3.send(putCommand));
if (!goPut.success) {
throw new Error(`Failed to store file '${filePath}' to S3 bucket '${bucketName}': ${goPut.error}`);
}
};

export const getFileFromBucket = async (bucketName: string, filePath: string) => {
const s3 = initializeS3Service();
export const getFileFromBucket = async (bucket: Bucket, filePath: string) => {
const { name: bucketName, region: bucketRegion } = bucket;
const s3 = initializeS3Service(bucketRegion);

logger.debug(`Fetching file '${filePath}' from S3 bucket '${bucketName}'`);
const goGet = await go(() => s3.getObject({ Bucket: bucketName, Key: filePath }).promise());
const getCommand = new GetObjectCommand({ Bucket: bucketName, Key: filePath });
const goGet = await go(() => s3.send(getCommand));
if (!goGet.success) {
throw new Error(`Failed to fetch file '${filePath}' from S3 bucket '${bucketName}': ${goGet.error}`);
}
if (!goGet.data.Body) {
throw new Error(`The response for file '${filePath}' from S3 bucket '${bucketName}' contained an empty body`);
}

return goGet.data.Body.toString('utf-8');
const goFileContent = await go(() => goGet.data.Body!.transformToString('utf-8'));
if (!goFileContent.success) {
throw new Error(`The response for file '${filePath}' from S3 bucket '${bucketName}' is not parsable`);
}

return goFileContent.data;
};

export const copyFileInBucket = async (bucketName: string, fromFilePath: string, toFilePath: string) => {
const s3 = initializeS3Service();
export const copyFileInBucket = async (bucket: Bucket, fromFilePath: string, toFilePath: string) => {
const { name: bucketName, region: bucketRegion } = bucket;
const s3 = initializeS3Service(bucketRegion);

logger.debug(`Copying file '${fromFilePath}' to file '${toFilePath}' within S3 bucket '${bucketName}'`);
const goCopy = await go(() =>
s3.copyObject({ Bucket: bucketName, CopySource: `/${bucketName}/${fromFilePath}`, Key: toFilePath }).promise()
);
const copyCommand = new CopyObjectCommand({
Bucket: bucketName,
CopySource: `/${bucketName}/${fromFilePath}`,
Key: toFilePath,
});
const goCopy = await go(() => s3.send(copyCommand));
if (!goCopy.success) {
throw new Error(
`Failed to copy file '${fromFilePath}' to file '${toFilePath}' within S3 bucket '${bucketName}': ${goCopy.error}`
Expand All @@ -202,26 +233,29 @@ const gatherBucketKeys = (directory: Directory): string[] => [
),
];

export const deleteBucketDirectory = async (bucketName: string, directory: Directory) => {
const s3 = initializeS3Service();
export const deleteBucketDirectory = async (bucket: Bucket, directory: Directory) => {
const { name: bucketName, region: bucketRegion } = bucket;
const s3 = initializeS3Service(bucketRegion);

const bucketKeys = gatherBucketKeys(directory);
logger.debug(`Deleting files from S3 bucket '${bucketName}': ${JSON.stringify(bucketKeys)}`);
const goDelete = await go(() =>
s3
.deleteObjects({ Bucket: bucketName, Delete: { Objects: bucketKeys.map((bucketKey) => ({ Key: bucketKey })) } })
.promise()
);
const deleteCommand = new DeleteObjectsCommand({
Bucket: bucketName,
Delete: { Objects: bucketKeys.map((bucketKey) => ({ Key: bucketKey })) },
});
const goDelete = await go(() => s3.send(deleteCommand));
if (!goDelete.success) {
throw new Error(`Failed to delete bucket directory '${directory.bucketKey}' and its content: ${goDelete.error}`);
}
};

export const deleteBucket = async (bucketName: string) => {
const s3 = initializeS3Service();
export const deleteBucket = async (bucket: Bucket) => {
const { name: bucketName, region: bucketRegion } = bucket;
const s3 = initializeS3Service(bucketRegion);

logger.debug(`Deleting S3 bucket '${bucketName}'`);
const goDelete = await go(() => s3.deleteBucket({ Bucket: bucketName }).promise());
const deleteCommand = new DeleteBucketCommand({ Bucket: bucketName });
const goDelete = await go(() => s3.send(deleteCommand));
if (!goDelete.success) {
throw new Error(`Failed to delete S3 bucket '${bucketName}': ${goDelete.error}`);
}
Expand Down
Loading

0 comments on commit 0040a9b

Please sign in to comment.