Skip to content

Commit

Permalink
fix(release-notes): upgrade to AWS SDK v3 (#1460)
Browse files Browse the repository at this point in the history
Refactor various release-notes lambda functions to use AWS SDK v3. Tests
updated accordingly.

----

*By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache-2.0 license*
  • Loading branch information
mrgrain authored Sep 13, 2024
1 parent d9f6f67 commit 25e7f72
Show file tree
Hide file tree
Showing 5 changed files with 94 additions and 103 deletions.
Original file line number Diff line number Diff line change
@@ -1,10 +1,15 @@
import {
GetObjectCommand,
PutObjectCommand,
S3Client,
} from '@aws-sdk/client-s3';
import { metricScope, MetricsLogger, Unit } from 'aws-embedded-metrics';
import * as AWS from 'aws-sdk';
import { AWSError } from 'aws-sdk';
import * as AWSMock from 'aws-sdk-mock';
import { mockClient } from 'aws-sdk-client-mock';
import 'aws-sdk-client-mock-jest';
import * as constants from '../../../backend/release-notes/constants';
import { generateReleaseNotes } from '../../../backend/release-notes/shared/github-changelog-fetcher.lambda-shared';
import { extractObjects } from '../../../backend/shared/tarball.lambda-shared';
import { stringToStream } from '../../streams';

jest.mock('aws-embedded-metrics');
jest.mock('../../../backend/shared/tarball.lambda-shared');
Expand All @@ -14,7 +19,8 @@ jest.mock(

const MOCK_BUCKET_NAME = 'package-data-bucket';
const PACKAGE_TGZ = 'data/@aws-cdk/aws-amplify/v1.144.0/package.tgz';
const FAKE_TAR_GZ = Buffer.from('fake-tarball-content[gzipped]');
const FAKE_TAR_DATA = 'fake-tarball-content[gzipped]';
const FAKE_TAR_GZ = Buffer.from(FAKE_TAR_DATA);
const MOCK_TARBALL_URI = `s3://${MOCK_BUCKET_NAME}.test-bermuda-2.s3.amazonaws.com/${PACKAGE_TGZ}`;
const MOCK_RELEASE_NOTES_KEY =
'data/@aws-cdk/aws-amplify/v1.144.0/release-notes.md';
Expand Down Expand Up @@ -45,6 +51,8 @@ const MOCKED_RELEASE_NOTES = `
* Some other bug
`;

const mockS3 = mockClient(S3Client);

const mockPutMetric = jest
.fn()
.mockName('MetricsLogger.putMetric') as jest.MockedFunction<
Expand All @@ -58,8 +66,6 @@ const mockSetNamespace = jest
>;

let handler: any;
let s3GetObjSpy: jest.Mock;
let s3PutObjSpy: jest.Mock;
const extractObjectMock = <jest.MockedFunction<typeof extractObjects>>(
extractObjects
);
Expand Down Expand Up @@ -97,9 +103,9 @@ beforeEach(async () => {
jest.resetAllMocks();
process.env.BUCKET_NAME = MOCK_BUCKET_NAME;

AWSMock.setSDKInstance(AWS);
s3GetObjSpy = setupPkgTarS3GetObjectMock();
s3PutObjSpy = setupReleaseNotesS3PutObjectMock();
mockS3.reset();
setupPkgTarS3GetObjectMock();
setupReleaseNotesS3PutObjectMock();

extractObjectMock.mockResolvedValue({
packageJson: Buffer.from(JSON.stringify(MOCKED_PACKAGE_JSON)),
Expand All @@ -110,12 +116,8 @@ beforeEach(async () => {
afterEach(async () => {
// clean up the env vars
process.env.BUCKET_NAME = undefined;

AWSMock.restore();
});

type Response<T> = (err: AWS.AWSError | null, data?: T) => void;

test('happy case', async () => {
await expect(
handler({ tarballUri: MOCK_TARBALL_URI }, {} as any)
Expand All @@ -125,8 +127,8 @@ test('happy case', async () => {
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).toHaveBeenCalledTimes(1);
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).toHaveReceivedCommandTimes(PutObjectCommand, 1);

expect(generateReleaseNotes).toHaveBeenCalledTimes(1);
expect(generateReleaseNotes).toHaveBeenCalledWith(
Expand Down Expand Up @@ -160,8 +162,8 @@ test('When repository uses [email protected] url', async () => {
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).toHaveBeenCalledTimes(1);
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).toHaveReceivedCommandTimes(PutObjectCommand, 1);

expect(generateReleaseNotes).toHaveBeenCalledTimes(1);
expect(generateReleaseNotes).toHaveBeenCalledWith(
Expand All @@ -187,8 +189,8 @@ test('When repository info is missing sends "UnSupportedRepo"', async () => {
).resolves.toEqual({ error: 'UnSupportedRepo' });
expect(generateReleaseNotes).not.toHaveBeenCalled();

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);

const PKG_JSON_WITH_GITLAB_REPO = {
...MOCKED_PACKAGE_JSON,
Expand All @@ -202,8 +204,8 @@ test('When repository info is missing sends "UnSupportedRepo"', async () => {
handler({ tarballUri: MOCK_TARBALL_URI }, {} as any)
).resolves.toEqual({ error: 'UnSupportedRepo' });
expect(generateReleaseNotes).not.toHaveBeenCalled();
expect(s3GetObjSpy).toHaveBeenCalledTimes(2);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 2);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);
expect(mockPutMetric).toHaveBeenCalledWith(
constants.UnSupportedRepo,
1,
Expand Down Expand Up @@ -231,8 +233,8 @@ test('sends RequestQuotaExhausted error when GitHub sends error code 403', async
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);
expect(mockPutMetric).toHaveBeenCalledWith(
constants.RequestQuotaExhausted,
1,
Expand All @@ -258,8 +260,8 @@ test('sends InvalidCredentials error when GitHub sends error code 401', async ()
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);
expect(mockPutMetric).toHaveBeenCalledWith(
constants.InvalidCredentials,
1,
Expand All @@ -283,8 +285,8 @@ test('When GH does not have any release notes', async () => {
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);
});

test('when tarball is invalid send InvalidTarball error', async () => {
Expand All @@ -301,8 +303,8 @@ test('when tarball is invalid send InvalidTarball error', async () => {
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);
expect(mockPutMetric).toHaveBeenCalledWith(
constants.InvalidTarball,
1,
Expand Down Expand Up @@ -335,8 +337,8 @@ test('throw error when package.json is not valid', async () => {
packageJson: { path: 'package/package.json', required: true },
});

expect(s3GetObjSpy).toHaveBeenCalledTimes(1);
expect(s3PutObjSpy).not.toHaveBeenCalled();
expect(mockS3).toHaveReceivedCommandTimes(GetObjectCommand, 1);
expect(mockS3).not.toHaveReceivedCommand(PutObjectCommand);
expect(mockPutMetric).toHaveBeenCalledWith(
constants.InvalidPackageJson,
1,
Expand All @@ -346,38 +348,21 @@ test('throw error when package.json is not valid', async () => {

// Helper functions
const setupPkgTarS3GetObjectMock = () => {
const spy = jest.fn().mockResolvedValue({ Body: FAKE_TAR_GZ });
AWSMock.mock(
'S3',
'getObject',
(req: AWS.S3.GetObjectRequest, cb: Response<AWS.S3.GetObjectOutput>) => {
try {
expect(req.Bucket).toBe(MOCK_BUCKET_NAME);
expect(req.Key).toBe(PACKAGE_TGZ);
} catch (e: any) {
return cb(e);
}
return cb(null, spy());
}
);
return spy;
mockS3
.on(GetObjectCommand, {
Bucket: MOCK_BUCKET_NAME,
Key: PACKAGE_TGZ,
})
.callsFake(() => ({ Body: stringToStream(FAKE_TAR_DATA) }));
};

function setupReleaseNotesS3PutObjectMock() {
const spy = jest.fn();
AWSMock.mock(
'S3',
'putObject',
(req: AWS.S3.PutObjectRequest, cb: Response<AWS.S3.PutObjectOutput>) => {
try {
expect(req.Bucket).toBe(MOCK_BUCKET_NAME);
expect(req.Key).toBe(MOCK_RELEASE_NOTES_KEY);
expect(req.Body).toBe(MOCKED_RELEASE_NOTES);
} catch (e) {
return cb(e as AWSError);
}
return cb(null, spy(req));
}
);
return spy;
mockS3
.on(PutObjectCommand, {
Bucket: MOCK_BUCKET_NAME,
Key: MOCK_RELEASE_NOTES_KEY,
Body: MOCKED_RELEASE_NOTES,
ContentType: 'text/markdown',
})
.resolves({});
}
6 changes: 3 additions & 3 deletions src/__tests__/devapp/__snapshots__/snapshot.test.ts.snap

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

17 changes: 9 additions & 8 deletions src/backend/release-notes/generate-release-notes.lambda.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import { GetObjectCommand, PutObjectCommand } from '@aws-sdk/client-s3';
import { metricScope, Unit } from 'aws-embedded-metrics';
import type { Context } from 'aws-lambda';
import * as aws from 'aws-sdk';
import * as constants from './constants';
import { generateReleaseNotes } from './shared/github-changelog-fetcher.lambda-shared';
import { S3_CLIENT } from '../shared/aws.lambda-shared';
import { requireEnv } from '../shared/env.lambda-shared';
import { extractObjects } from '../shared/tarball.lambda-shared';

Expand Down Expand Up @@ -33,20 +34,20 @@ export const handler = metricScope(
metrics.putMetric(constants.UnSupportedTarballUrl, 1, Unit.Count);
return { error: 'UnSupportedTarballUrl' };
}
const tarball = await new aws.S3()
.getObject({
const tarball = await S3_CLIENT.send(
new GetObjectCommand({
// Note: we drop anything after the first `.` in the host, as we only care about the bucket name.
Bucket: tarballUri.host.split('.')[0],
// Note: the pathname part is absolute, so we strip the leading `/`.
Key: tarballUri.pathname.replace(/^\//, ''),
VersionId: tarballUri.searchParams.get('versionId') ?? undefined,
})
.promise();
);
let packageJson: Buffer;

try {
({ packageJson } = await extractObjects(
Buffer.from(tarball.Body! as any),
Buffer.from(await tarball.Body!.transformToByteArray()),
{
packageJson: { path: 'package/package.json', required: true },
}
Expand Down Expand Up @@ -91,8 +92,8 @@ export const handler = metricScope(
console.log(
`storing release notes to s3://${BUCKET_NAME}${releaseNotesPath}`
);
await new aws.S3()
.putObject({
await S3_CLIENT.send(
new PutObjectCommand({
Bucket: BUCKET_NAME,
Key: releaseNotesPath,
Body: releaseNotes,
Expand All @@ -103,7 +104,7 @@ export const handler = metricScope(
'Lambda-Run-Id': context.awsRequestId,
},
})
.promise();
);
} else {
console.log('No release notes found');
metrics.putMetric(constants.PackageWithChangeLog, 0, Unit.Count);
Expand Down
21 changes: 12 additions & 9 deletions src/backend/release-notes/get-messages-from-worker-queue.lambda.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { ListExecutionsCommand } from '@aws-sdk/client-sfn';
import type { Message } from '@aws-sdk/client-sqs';
import { ReceiveMessageCommand } from '@aws-sdk/client-sqs';
import { metricScope, Unit } from 'aws-embedded-metrics';
import { StepFunctions, SQS } from 'aws-sdk';
import * as constants from './constants';
import { getServiceLimits } from './shared/github-changelog-fetcher.lambda-shared';
import { SFN_CLIENT, SQS_CLIENT } from '../shared/aws.lambda-shared';
import { requireEnv } from '../shared/env.lambda-shared';

// Each of the release note fetch task can involve making multiple Github
Expand All @@ -19,7 +22,7 @@ type ServiceLimit = {
type ExecutionResult = {
error?: string;
status?: string;
messages?: SQS.Message[];
messages?: Message[];
};

/**
Expand Down Expand Up @@ -86,15 +89,15 @@ export const handler = async (): Promise<ExecutionResult | ServiceLimit> => {
const sfnArn = requireEnv('STEP_FUNCTION_ARN');

// Ensure only one instance of step function is running
const activities = await new StepFunctions()
.listExecutions({
const activities = await SFN_CLIENT.send(
new ListExecutionsCommand({
stateMachineArn: sfnArn,
maxResults: 1,
statusFilter: 'RUNNING',
})
.promise();
);

if (activities.executions.length > 1) {
if (activities.executions && activities.executions.length > 1) {
return { error: 'MaxConcurrentExecutionError' };
}
if (serviceLimit.remaining <= MAX_GH_REQUEST_PER_PACKAGE) {
Expand All @@ -106,15 +109,15 @@ export const handler = async (): Promise<ExecutionResult | ServiceLimit> => {
};
}

const messages = await new SQS()
.receiveMessage({
const messages = await SQS_CLIENT.send(
new ReceiveMessageCommand({
QueueUrl: process.env.SQS_QUEUE_URL!,
MaxNumberOfMessages: Math.min(
Math.floor(serviceLimit.remaining / MAX_GH_REQUEST_PER_PACKAGE),
10
),
})
.promise();
);

if (messages.Messages?.length) {
return {
Expand Down
Loading

0 comments on commit 25e7f72

Please sign in to comment.