diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 1fcb6ba4f..df8c3a4dc 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -14,9 +14,9 @@ branchProtectionRules: - "ci/kokoro: System test" - docs - lint - - test (10) - test (12) - test (14) + - test (16) - cla/google - windows - OwlBot Post Processor diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 25251dbb7..f447b84ab 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [10, 12, 14] + node: [12, 14, 16] steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 diff --git a/.kokoro/continuous/node10/common.cfg b/.kokoro/continuous/node10/common.cfg deleted file mode 100644 index 160faa90e..000000000 --- a/.kokoro/continuous/node10/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/test.sh" -} diff --git a/.kokoro/continuous/node10/docs.cfg b/.kokoro/continuous/node10/docs.cfg deleted file mode 100644 index dd810b799..000000000 --- a/.kokoro/continuous/node10/docs.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/docs.sh" -} diff --git a/.kokoro/continuous/node10/test.cfg b/.kokoro/continuous/node10/test.cfg deleted file mode 100644 index 609c0cf0a..000000000 --- a/.kokoro/continuous/node10/test.cfg +++ /dev/null @@ -1,9 +0,0 @@ -# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} diff --git a/.kokoro/continuous/node8/common.cfg b/.kokoro/continuous/node8/common.cfg deleted file mode 100644 index 6a5bb734e..000000000 --- a/.kokoro/continuous/node8/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-storage/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/test.sh" -} diff --git a/.kokoro/continuous/node8/test.cfg b/.kokoro/continuous/node8/test.cfg deleted file mode 100644 index e69de29bb..000000000 diff --git a/.kokoro/presubmit/node10/common.cfg b/.kokoro/presubmit/node10/common.cfg deleted file mode 100644 index 160faa90e..000000000 --- a/.kokoro/presubmit/node10/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Bring in codecov.io token into the build as $KOKORO_KEYSTORE_DIR/73713_dpebot_codecov_token -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "dpebot_codecov_token" - } - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-storage/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/test.sh" -} diff --git a/.kokoro/presubmit/node10/docs.cfg b/.kokoro/presubmit/node10/docs.cfg deleted file mode 100644 index dd810b799..000000000 --- a/.kokoro/presubmit/node10/docs.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/docs.sh" -} diff --git a/.kokoro/presubmit/node10/lint.cfg b/.kokoro/presubmit/node10/lint.cfg deleted file mode 100644 index 72c4ad241..000000000 --- a/.kokoro/presubmit/node10/lint.cfg +++ /dev/null @@ -1,4 +0,0 @@ -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/lint.sh" -} diff --git a/.kokoro/presubmit/node10/test.cfg b/.kokoro/presubmit/node10/test.cfg deleted file mode 100644 index e69de29bb..000000000 diff --git a/.kokoro/presubmit/node8/common.cfg b/.kokoro/presubmit/node8/common.cfg deleted file mode 100644 index 6a5bb734e..000000000 --- a/.kokoro/presubmit/node8/common.cfg +++ /dev/null @@ -1,24 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "nodejs-storage/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:8-user" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/nodejs-storage/.kokoro/test.sh" -} diff --git a/.kokoro/presubmit/node8/test.cfg b/.kokoro/presubmit/node8/test.cfg deleted file mode 100644 index e69de29bb..000000000 diff --git a/internal-tooling/README.md b/internal-tooling/README.md new file mode 100644 index 000000000..9a40bb4c9 --- /dev/null +++ b/internal-tooling/README.md @@ -0,0 +1,43 @@ +# nodejs-storage benchmarking + +**This is not a supported Google product** + +This benchmarking script intended for use by Storage client library maintainers to benchmark various workloads and collect metrics in order to improve performance of the library. Currently the benchmarking runs a Write-1-Read-3 workload and measures throughput. + +## Run example: +This runs 10K iterations of Write-1-Read-3 on 5KiB to 2GiB files, and generates output to a CSV file: +```bash +$ cd nodejs-storage +$ npm install +$ cd build/internal-tooling +$ node performanceTest.js --iterations 10000 +``` + +## CLI parameters + +| Parameter | Description | Possible values | Default | +| --------- | ----------- | --------------- |:-------:| +| --iterations | number of iterations to run | any positive integer | `100` | +| --numthreads | number of threads to run | any positive integer | `1` | +| --bucket | bucket to upload/download to/from | any string bucket name | `nodejs-perf-metrics` | +| --small | number of bytes for lower bound file size | any positive integer | `5120` | +| --large | number of bytes for upper bound file size | any positive integer | `2.147e9` | +| --projectid | project ID to use | any string project ID | `undefined` | + + +## Workload definition and CSV headers + +For each invocation of the benchmark, write a new object of random size between `small` and `large` . After the successful write, download the object in full three times. For each of the 4 operations record the following fields: + +| Field | Description | +| ----- | ----------- | +| Op | the name of the operations (WRITE, READ[{0,1,2}]) | +| ObjectSize | the number of bytes of the object | +| LibBufferSize | configured to use the library default of 100 MiB | +| Crc32cEnabled | whether crc32c was computed for the operation | +| MD5Enabled | whether MD5 was computed for the operation | +| ApiName | default to JSON| +| ElapsedTimeUs | the elapsed time in microseconds the operation took | +| Status | completion state of the operation [OK, FAIL] | +| AppBufferSize | N/A | +| CpuTimeUs | N/A | \ No newline at end of file diff --git a/internal-tooling/performPerformanceTest.ts b/internal-tooling/performPerformanceTest.ts new file mode 100644 index 000000000..946339daa --- /dev/null +++ b/internal-tooling/performPerformanceTest.ts @@ -0,0 +1,223 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import yargs from 'yargs'; +import * as uuid from 'uuid'; +import {execSync} from 'child_process'; +import {unlinkSync} from 'fs'; +import {Storage} from '../src'; +import {performance} from 'perf_hooks'; +// eslint-disable-next-line node/no-unsupported-features/node-builtins +import {parentPort} from 'worker_threads'; +import path = require('path'); + +const TEST_NAME_STRING = 'nodejs-perf-metrics'; +const DEFAULT_NUMBER_OF_WRITES = 1; +const DEFAULT_NUMBER_OF_READS = 3; +const DEFAULT_BUCKET_NAME = 'nodejs-perf-metrics'; +const DEFAULT_SMALL_FILE_SIZE_BYTES = 5120; +const DEFAULT_LARGE_FILE_SIZE_BYTES = 2.147e9; +const BLOCK_SIZE_IN_BYTES = 1024; +const NODE_DEFAULT_HIGHWATER_MARK_BYTES = 16384; + +export interface TestResult { + op: string; + objectSize: number; + appBufferSize: number; + libBufferSize: number; + crc32Enabled: boolean; + md5Enabled: boolean; + apiName: 'JSON' | 'XML'; + elapsedTimeUs: number; + cpuTimeUs: number; + status: '[OK]'; +} + +/** + * Create a uniformly distributed random integer beween the inclusive min and max provided. + * + * @param {number} minInclusive lower bound (inclusive) of the range of random integer to return. + * @param {number} maxInclusive upper bound (inclusive) of the range of random integer to return. + * @returns {number} returns a random integer between minInclusive and maxInclusive + */ +const randomInteger = (minInclusive: number, maxInclusive: number) => { + // Utilizing Math.random will generate uniformly distributed random numbers. + return ( + Math.floor(Math.random() * (maxInclusive - minInclusive + 1)) + minInclusive + ); +}; + +const argv = yargs(process.argv.slice(2)) + .options({ + bucket: {type: 'string', default: DEFAULT_BUCKET_NAME}, + small: {type: 'number', default: DEFAULT_SMALL_FILE_SIZE_BYTES}, + large: {type: 'number', default: DEFAULT_LARGE_FILE_SIZE_BYTES}, + projectid: {type: 'string'}, + }) + .parseSync(); + +/** + * Main entry point. This function performs a test iteration and posts the message back + * to the parent thread. + */ +async function main() { + const results = await performWriteReadTest(); + parentPort?.postMessage(results); +} + +/** + * Performs an iteration of the Write 1 / Read 3 performance measuring test. + * + * @returns {Promise { + const results: TestResult[] = []; + const fileName = generateRandomFileName(); + const sizeInBytes = generateRandomFile(fileName); + const checkType = randomInteger(0, 2); + + const stg = new Storage({ + projectId: argv.projectid, + }); + + let bucket = stg.bucket(argv.bucket); + if (!(await bucket.exists())[0]) { + await bucket.create(); + } + + for (let j = 0; j < DEFAULT_NUMBER_OF_WRITES; j++) { + let start = 0; + let end = 0; + + const iterationResult: TestResult = { + op: 'WRITE', + objectSize: sizeInBytes, + appBufferSize: BLOCK_SIZE_IN_BYTES, + libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES, + crc32Enabled: false, + md5Enabled: false, + apiName: 'JSON', + elapsedTimeUs: 0, + cpuTimeUs: -1, + status: '[OK]', + }; + + bucket = stg.bucket(argv.bucket, { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }); + + if (checkType === 0) { + start = performance.now(); + await bucket.upload(`${__dirname}/${fileName}`, {validation: false}); + end = performance.now(); + } else if (checkType === 1) { + iterationResult.crc32Enabled = true; + start = performance.now(); + await bucket.upload(`${__dirname}/${fileName}`, {validation: 'crc32c'}); + end = performance.now(); + } else { + iterationResult.md5Enabled = true; + start = performance.now(); + await bucket.upload(`${__dirname}/${fileName}`, {validation: 'md5'}); + end = performance.now(); + } + + iterationResult.elapsedTimeUs = Math.round((end - start) * 1000); + results.push(iterationResult); + } + + bucket = stg.bucket(argv.bucket); + for (let j = 0; j < DEFAULT_NUMBER_OF_READS; j++) { + let start = 0; + let end = 0; + const file = bucket.file(`${fileName}`); + const iterationResult: TestResult = { + op: `READ[${j}]`, + objectSize: sizeInBytes, + appBufferSize: BLOCK_SIZE_IN_BYTES, + libBufferSize: NODE_DEFAULT_HIGHWATER_MARK_BYTES, + crc32Enabled: false, + md5Enabled: false, + apiName: 'JSON', + elapsedTimeUs: 0, + cpuTimeUs: -1, + status: '[OK]', + }; + + const destinationFileName = generateRandomFileName(); + const destination = path.join(__dirname, destinationFileName); + if (checkType === 0) { + start = performance.now(); + await file.download({validation: false, destination}); + end = performance.now(); + } else if (checkType === 1) { + iterationResult.crc32Enabled = true; + start = performance.now(); + await file.download({validation: 'crc32c', destination}); + end = performance.now(); + } else { + iterationResult.md5Enabled = true; + start = performance.now(); + await file.download({validation: 'md5', destination}); + end = performance.now(); + } + cleanupFile(destinationFileName); + iterationResult.elapsedTimeUs = Math.round((end - start) * 1000); + results.push(iterationResult); + } + + cleanupFile(fileName); + + return results; +} + +/** + * Creates a file with a size between the small (default 5120 bytes) and large (2.147e9 bytes) parameters. + * The file is filled with random data. + * + * @param {string} fileName name of the file to generate. + * @returns {number} the size of the file generated. + */ +function generateRandomFile(fileName: string) { + const fileSizeBytes = randomInteger(argv.small, argv.large); + const numberNeeded = Math.ceil(fileSizeBytes / BLOCK_SIZE_IN_BYTES); + const cmd = `dd if=/dev/urandom of=${__dirname}/${fileName} bs=${BLOCK_SIZE_IN_BYTES} count=${numberNeeded} status=none iflag=fullblock`; + execSync(cmd); + + return fileSizeBytes; +} + +/** + * Creates a random file name by appending a UUID to the TEST_NAME_STRING. + * + * @returns {string} random file name that was generated. + */ +function generateRandomFileName(): string { + return `${TEST_NAME_STRING}.${uuid.v4()}`; +} + +/** + * Deletes the file specified by the fileName parameter. + * + * @param {string} fileName name of the file to delete. + */ +function cleanupFile(fileName: string) { + unlinkSync(`${__dirname}/${fileName}`); +} + +main(); diff --git a/internal-tooling/performanceTest.ts b/internal-tooling/performanceTest.ts new file mode 100644 index 000000000..55c2b6baf --- /dev/null +++ b/internal-tooling/performanceTest.ts @@ -0,0 +1,99 @@ +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {appendFile} from 'fs/promises'; +// eslint-disable-next-line node/no-unsupported-features/node-builtins +import {Worker} from 'worker_threads'; +import yargs = require('yargs'); +import {TestResult} from './performPerformanceTest'; +import {existsSync} from 'fs'; +import {writeFile} from 'fs/promises'; + +const DEFAULT_ITERATIONS = 100; +const DEFAULT_THREADS = 1; +const CSV_HEADERS = + 'Op,ObjectSize,AppBufferSize,LibBufferSize,Crc32cEnabled,MD5Enabled,ApiName,ElapsedTimeUs,CpuTimeUs,Status\n'; +const START_TIME = Date.now(); + +const argv = yargs(process.argv.slice(2)) + .options({ + iterations: {type: 'number', default: DEFAULT_ITERATIONS}, + numthreads: {type: 'number', default: DEFAULT_THREADS}, + }) + .parseSync(); + +let iterationsRemaining = argv.iterations; + +/** + * Main entry point for performing a Write 1 Read 3 performance measurement test. + * This function will create the number of threads supplied in the numthreads argument or + * default to 1 if no argument is provided. The test will be run for the number of iterations + * specified by the iterations parameter or 100 if not specified. + */ +function main() { + let numThreads = argv.numthreads; + if (numThreads > iterationsRemaining) { + console.log( + `${numThreads} is greater than number of iterations (${iterationsRemaining}). Using ${iterationsRemaining} threads instead.` + ); + numThreads = iterationsRemaining; + } + for (let i = 0; i < numThreads; i++) { + createWorker(); + } +} + +/** + * Creates a new worker thread and performs a test iteration in that worker. + * When the worker passes back the results, they are appended to the results file. + */ +function createWorker() { + iterationsRemaining--; + console.log( + `Starting new iteration. Current iterations remaining: ${iterationsRemaining}` + ); + const w = new Worker(__dirname + '/performPerformanceTest.js', { + argv: process.argv.slice(2), + }); + w.on('message', data => { + console.log('Successfully completed iteration.'); + appendResultToCSV(data); + if (iterationsRemaining > 0) { + createWorker(); + } + }); + w.on('error', () => { + console.log('An error occurred.'); + }); +} + +/** + * Appends the test results to the CSV file. + * + * @param {TestResult[]} results + */ +async function appendResultToCSV(results: TestResult[]) { + const fileName = `nodejs-perf-metrics-${START_TIME}-${argv.iterations}.csv`; + + if (!existsSync(fileName)) { + await writeFile(fileName, CSV_HEADERS); + } + const csv = results.map(result => Object.values(result)); + const csvString = csv.join('\n'); + await appendFile(fileName, `${csvString}\n`); +} + +main(); diff --git a/package.json b/package.json index 420fb9882..98a70e427 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "author": "Google Inc.", "engines": { - "node": ">=10" + "node": ">=12" }, "repository": "googleapis/nodejs-storage", "main": "./build/src/index.js", @@ -52,26 +52,24 @@ "dependencies": { "@google-cloud/paginator": "^3.0.7", "@google-cloud/projectify": "^2.0.0", - "@google-cloud/promisify": "^2.0.0", + "@google-cloud/promisify": "^3.0.0", "abort-controller": "^3.0.0", "arrify": "^2.0.0", "async-retry": "^1.3.3", "compressible": "^2.0.12", - "configstore": "^5.0.0", + "date-and-time": "^2.0.0", "duplexify": "^4.0.0", "ent": "^2.2.0", "extend": "^3.0.2", - "gaxios": "^4.0.0", - "google-auth-library": "^7.14.1", - "hash-stream-validation": "^0.2.2", + "gaxios": "^5.0.0", + "google-auth-library": "^8.0.1", "mime": "^3.0.0", "mime-types": "^2.0.8", "p-limit": "^3.0.1", "pumpify": "^2.0.0", - "retry-request": "^4.2.2", + "retry-request": "^5.0.0", "stream-events": "^1.0.4", - "teeny-request": "^7.1.3", - "xdg-basedir": "^4.0.0", + "teeny-request": "^8.0.0", "uuid": "^8.0.0" }, "devDependencies": { @@ -80,15 +78,14 @@ "@grpc/proto-loader": "^0.6.0", "@types/async-retry": "^1.4.3", "@types/compressible": "^2.0.0", - "@types/configstore": "^5.0.0", "@types/date-and-time": "^0.13.0", "@types/ent": "^2.2.1", "@types/extend": "^3.0.0", "@types/mime": "^2.0.0", "@types/mime-types": "^2.1.0", - "@types/mocha": "^8.0.0", + "@types/mocha": "^9.1.1", "@types/mockery": "^1.4.29", - "@types/node": "^16.0.0", + "@types/node": "^17.0.30", "@types/node-fetch": "^2.1.3", "@types/proxyquire": "^1.3.28", "@types/pumpify": "^1.4.1", @@ -97,6 +94,7 @@ "@types/tmp": "0.2.3", "@types/uuid": "^8.0.0", "@types/xdg-basedir": "^2.0.0", + "@types/yargs": "^17.0.10", "c8": "^7.0.0", "form-data": "^4.0.0", "gts": "^3.1.0", @@ -104,14 +102,14 @@ "jsdoc-fresh": "^1.0.1", "jsdoc-region-tag": "^1.0.2", "linkinator": "^2.0.0", - "mocha": "^8.4.0", + "mocha": "^9.2.2", "mockery": "^2.1.0", "nock": "~13.2.0", "node-fetch": "^2.6.7", "proxyquire": "^2.1.3", "sinon": "^14.0.0", "tmp": "^0.2.0", - "typescript": "~3.9.10", - "yargs": "^16.0.0" + "typescript": "^4.6.4", + "yargs": "^17.3.1" } } diff --git a/samples/package.json b/samples/package.json index 6d5dfeafa..c650d86f3 100644 --- a/samples/package.json +++ b/samples/package.json @@ -4,7 +4,7 @@ "license": "Apache-2.0", "author": "Google Inc.", "engines": { - "node": ">=10" + "node": ">=12" }, "repository": "googleapis/nodejs-storage", "private": true, diff --git a/src/bucket.ts b/src/bucket.ts index 45c287e2d..2ba1c41d1 100644 --- a/src/bucket.ts +++ b/src/bucket.ts @@ -63,6 +63,7 @@ import { Query, } from './signer'; import {Readable} from 'stream'; +import {CRC32CValidatorGenerator} from './crc32c'; interface SourceObject { name: string; @@ -116,11 +117,6 @@ export interface EnableLoggingOptions { export interface GetFilesOptions { autoPaginate?: boolean; delimiter?: string; - /** - * @deprecated dirrectory is deprecated - * @internal - * */ - directory?: string; endOffset?: string; includeTrailingDelimiter?: boolean; prefix?: string; @@ -365,8 +361,6 @@ export interface UploadOptions destination?: string | File; encryptionKey?: string | Buffer; kmsKeyName?: string; - resumable?: boolean; - timeout?: number; // eslint-disable-next-line @typescript-eslint/no-explicit-any onUploadProgress?: (progressEvent: any) => void; } @@ -396,15 +390,6 @@ export enum BucketExceptionMessages { SUPPLY_NOTIFICATION_ID = 'You must supply a notification ID.', } -/** - * The size of a file (in bytes) must be greater than this number to - * automatically trigger a resumable upload. - * - * @const {number} - * @private - */ -const RESUMABLE_THRESHOLD = 5000000; - /** * Get and set IAM policies for your bucket. * @@ -639,6 +624,7 @@ class Bucket extends ServiceObject { acl: Acl; iam: Iam; + crc32cGenerator: CRC32CValidatorGenerator; // eslint-disable-next-line @typescript-eslint/no-unused-vars getFilesStream(query?: GetFilesOptions): Readable { @@ -1051,6 +1037,9 @@ class Bucket extends ServiceObject { pathPrefix: '/defaultObjectAcl', }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new Iam(this); this.getFilesStream = paginator.streamify('getFiles'); @@ -2203,7 +2192,7 @@ class Bucket extends ServiceObject { }, }); } catch (e) { - callback!(e); + callback!(e as Error); return; } finally { this.storage.retryOptions.autoRetry = this.instanceRetryValue; @@ -2342,8 +2331,6 @@ class Bucket extends ServiceObject { * names, aside from the prefix, contain delimiter will have their name * truncated after the delimiter, returned in `apiResponse.prefixes`. * Duplicate prefixes are omitted. - * @deprecated @property {string} [directory] Filter results based on a directory name, or - * more technically, a "prefix". Assumes delimeter to be '/'. Deprecated. Use prefix instead. * @property {string} [endOffset] Filter results to objects whose names are * lexicographically before endOffset. If startOffset is also set, the objects * listed have names between startOffset (inclusive) and endOffset (exclusive). @@ -2382,8 +2369,6 @@ class Bucket extends ServiceObject { * names, aside from the prefix, contain delimiter will have their name * truncated after the delimiter, returned in `apiResponse.prefixes`. * Duplicate prefixes are omitted. - * @deprecated @param {string} [query.directory] Filter results based on a directory name, or - * more technically, a "prefix". Assumes delimeter to be '/'. Deprecated. Use query.prefix instead. * @param {string} [query.endOffset] Filter results to objects whose names are * lexicographically before endOffset. If startOffset is also set, the objects * listed have names between startOffset (inclusive) and endOffset (exclusive). @@ -2519,11 +2504,6 @@ class Bucket extends ServiceObject { } query = Object.assign({}, query); - if (query.directory) { - query.prefix = `${query.directory}/`.replace(/\/*$/, '/'); - delete query.directory; - } - this.request( { uri: '/o', @@ -3711,8 +3691,8 @@ class Bucket extends ServiceObject { * `options.predefinedAcl = 'private'`) * @property {boolean} [public] Make the uploaded file public. (Alias for * `options.predefinedAcl = 'publicRead'`) - * @property {boolean} [resumable] Force a resumable upload. (default: - * true for files larger than 5 MB). + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. * @property {number} [timeout=60000] Set the HTTP request timeout in * milliseconds. This option is not available for resumable uploads. * Default: `60000` @@ -3741,14 +3721,7 @@ class Bucket extends ServiceObject { * Upload a file to the bucket. This is a convenience method that wraps * {@link File#createWriteStream}. * - * You can specify whether or not an upload is resumable by setting - * `options.resumable`. *Resumable uploads are enabled by default if your - * input file is larger than 5 MB.* - * - * For faster crc32c computation, you must manually install - * {@link https://www.npmjs.com/package/fast-crc32c| `fast-crc32c`}: - * - * $ npm install --save fast-crc32c + * Resumable uploads are enabled by default * * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} @@ -3796,8 +3769,8 @@ class Bucket extends ServiceObject { * `options.predefinedAcl = 'private'`) * @param {boolean} [options.public] Make the uploaded file public. (Alias for * `options.predefinedAcl = 'publicRead'`) - * @param {boolean} [options.resumable] Force a resumable upload. (default: - * true for files larger than 5 MB). + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. * @param {number} [options.timeout=60000] Set the HTTP request timeout in * milliseconds. This option is not available for resumable uploads. * Default: `60000` @@ -3838,7 +3811,6 @@ class Bucket extends ServiceObject { * //- * const options = { * destination: 'new-image.png', - * resumable: true, * validation: 'crc32c', * metadata: { * metadata: { @@ -4041,24 +4013,7 @@ class Bucket extends ServiceObject { }); } - if (options.resumable !== null && typeof options.resumable === 'boolean') { - upload(maxRetries); - } else { - // Determine if the upload should be resumable if it's over the threshold. - fs.stat(pathString, (err, fd) => { - if (err) { - callback!(err); - return; - } - - if (fd.size <= RESUMABLE_THRESHOLD) { - // Only disable resumable uploads so createWriteStream still attempts them and falls back to simple upload. - options.resumable = false; - } - - upload(maxRetries); - }); - } + upload(maxRetries); } makeAllFilesPublicPrivate_( @@ -4136,7 +4091,7 @@ class Bucket extends ServiceObject { if (!options.force) { throw e; } - errors.push(e); + errors.push(e as Error); } }; diff --git a/src/channel.ts b/src/channel.ts index 8d62463b7..f0dff46dd 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -56,11 +56,8 @@ class Channel extends ServiceObject { super(config); - // TODO: remove type cast to any once ServiceObject's type declaration has - // been fixed. https://github.com/googleapis/nodejs-common/issues/176 - const metadata = this.metadata; - metadata.id = id; - metadata.resourceId = resourceId; + this.metadata.id = id; + this.metadata.resourceId = resourceId; } stop(): Promise; diff --git a/src/crc32c.ts b/src/crc32c.ts new file mode 100644 index 000000000..f8d6c10aa --- /dev/null +++ b/src/crc32c.ts @@ -0,0 +1,317 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +] as const; + +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); + +/** An interface for CRC32C hashing and validation */ +interface CRC32CValidator { + /** + * A method returning the CRC32C as a base64-encoded string. + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + toString: () => string; + /** + * A method validating a base64-encoded CRC32C string. + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + */ + validate: (o: string) => boolean; + /** + * A method for passing `Buffer`s for CRC32C generation. + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + */ + update: (b: Buffer) => void; +} + +/** A function that generates a CRC32C Validator */ +interface CRC32CValidatorGenerator { + /** Should return a new, ready-to-use `CRC32CValidator` */ + (): CRC32CValidator; +} + +const CRC32C_DEFAULT_VALIDATOR_GENERATOR: CRC32CValidatorGenerator = () => + new CRC32C(); + +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l: number) => + `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l: number) => + `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l: number) => + `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +} as const; + +class CRC32C implements CRC32CValidator { + /** Current CRC32C value */ + #crc32c = 0; + + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + this.#crc32c = initialValue; + } + + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data: Buffer) { + let current = this.#crc32c ^ 0xffffffff; + + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + + this.#crc32c = current ^ 0xffffffff; + } + + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input: Buffer | CRC32CValidator | string | number): boolean { + if (typeof input === 'number') { + return input === this.#crc32c; + } else if (typeof input === 'string') { + return input === this.toString(); + } else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer(): Buffer { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(this.#crc32c); + + return buffer; + } + + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON(): string { + return this.toString(); + } + + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString(): string { + return this.toBuffer().toString('base64'); + } + + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf(): number { + return this.#crc32c; + } + + static readonly CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; + static readonly CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + private static fromBuffer( + value: ArrayBuffer | ArrayBufferView | Buffer + ): CRC32C { + let buffer: Buffer; + + if (Buffer.isBuffer(value)) { + buffer = value; + } else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + + if (buffer.byteLength !== 4) { + throw new RangeError( + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength) + ); + } + + return new CRC32C(buffer.readInt32BE()); + } + + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + private static fromString(value: string): CRC32C { + const buffer = Buffer.from(value, 'base64'); + + if (buffer.byteLength !== 4) { + throw new RangeError( + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength) + ); + } + + return this.fromBuffer(buffer); + } + + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + private static fromNumber(value: number): CRC32C { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError( + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value) + ); + } + + return new CRC32C(value); + } + + /** + * Generates a `CRC32C` from a variety of compatable types. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from( + value: ArrayBuffer | ArrayBufferView | CRC32CValidator | string | number + ): CRC32C { + if (typeof value === 'number') { + return this.fromNumber(value); + } else if (typeof value === 'string') { + return this.fromString(value); + } else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} + +export { + CRC32C, + CRC32C_DEFAULT_VALIDATOR_GENERATOR, + CRC32C_EXCEPTION_MESSAGES, + CRC32C_EXTENSIONS, + CRC32C_EXTENSION_TABLE, + CRC32CValidator, + CRC32CValidatorGenerator, +}; diff --git a/src/file.ts b/src/file.ts index 83a892fc2..329b43b37 100644 --- a/src/file.ts +++ b/src/file.ts @@ -27,16 +27,12 @@ import compressible = require('compressible'); import * as crypto from 'crypto'; import * as extend from 'extend'; import * as fs from 'fs'; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const hashStreamValidation = require('hash-stream-validation'); import * as mime from 'mime'; -import * as os from 'os'; // eslint-disable-next-line @typescript-eslint/no-var-requires const pumpify = require('pumpify'); -import * as resumableUpload from './gcs-resumable-upload'; +import * as resumableUpload from './resumable-upload'; import {Duplex, Writable, Readable, PassThrough} from 'stream'; import * as streamEvents from 'stream-events'; -import * as xdgBasedir from 'xdg-basedir'; import * as zlib from 'zlib'; import * as http from 'http'; @@ -70,6 +66,9 @@ import { unicodeJSONStringify, formatAsUTCISO, } from './util'; +import {CRC32CValidatorGenerator} from './crc32c'; +import {HashStreamValidator} from './hash-stream-validator'; + import retry = require('async-retry'); export type GetExpirationDateResponse = [Date]; @@ -87,13 +86,13 @@ export interface PolicyDocument { signature: string; } -export type GetSignedPolicyResponse = [PolicyDocument]; +export type GenerateSignedPostPolicyV2Response = [PolicyDocument]; -export interface GetSignedPolicyCallback { +export interface GenerateSignedPostPolicyV2Callback { (err: Error | null, policy?: PolicyDocument): void; } -export interface GetSignedPolicyOptions { +export interface GenerateSignedPostPolicyV2Options { equals?: string[] | string[][]; expires: string | number | Date; startsWith?: string[] | string[][]; @@ -103,12 +102,6 @@ export interface GetSignedPolicyOptions { contentLengthRange?: {min?: number; max?: number}; } -export type GenerateSignedPostPolicyV2Options = GetSignedPolicyOptions; - -export type GenerateSignedPostPolicyV2Response = GetSignedPolicyResponse; - -export type GenerateSignedPostPolicyV2Callback = GetSignedPolicyCallback; - export interface PolicyFields { [key: string]: string; } @@ -199,7 +192,6 @@ export type PredefinedAcl = export interface CreateResumableUploadOptions { chunkSize?: number; - configPath?: string; metadata?: Metadata; origin?: string; offset?: number; @@ -284,16 +276,6 @@ export enum ActionToHTTPMethod { resumable = 'POST', } -/** - * Custom error type for errors related to creating a resumable upload. - * - * @private - */ -class ResumableUploadError extends Error { - name = 'ResumableUploadError'; - additionalInfo?: string; -} - /** * @const {string} * @private @@ -307,11 +289,12 @@ export const STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; export interface FileOptions { + crc32cGenerator?: CRC32CValidatorGenerator; encryptionKey?: string | Buffer; generation?: number | string; kmsKeyName?: string; - userProject?: string; preconditionOpts?: PreconditionOptions; + userProject?: string; } export interface CopyOptions { @@ -420,13 +403,13 @@ export enum FileExceptionMessages { STARTS_WITH_TWO_ELEMENTS = 'StartsWith condition must be an array of 2 elements.', CONTENT_LENGTH_RANGE_MIN_MAX = 'ContentLengthRange must have numeric min & max fields.', DOWNLOAD_MISMATCH = 'The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again.', - UPLOAD_MISMATCH_DELETE_FAIL = `The uploaded data did not match the data from the server. - As a precaution, we attempted to delete the file, but it was not successful. - To be sure the content is the same, you should try removing the file manually, - then uploading the file again. + UPLOAD_MISMATCH_DELETE_FAIL = `The uploaded data did not match the data from the server. + As a precaution, we attempted to delete the file, but it was not successful. + To be sure the content is the same, you should try removing the file manually, + then uploading the file again. \n\nThe delete attempt failed with this message:\n\n `, - UPLOAD_MISMATCH = `The uploaded data did not match the data from the server. - As a precaution, the file has been deleted. + UPLOAD_MISMATCH = `The uploaded data did not match the data from the server. + As a precaution, the file has been deleted. To be sure the content is the same, you should try uploading the file again.`, } @@ -438,7 +421,7 @@ export enum FileExceptionMessages { */ class File extends ServiceObject { acl: Acl; - + crc32cGenerator: CRC32CValidatorGenerator; bucket: Bucket; storage: Storage; kmsKeyName?: string; @@ -893,6 +876,9 @@ class File extends ServiceObject { pathPrefix: '/acl', }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = this.storage?.retryOptions?.autoRetry; this.instancePreconditionOpts = options?.preconditionOpts; } @@ -1228,11 +1214,6 @@ class File extends ServiceObject { * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best * recourse is to try downloading the file again. * - * For faster crc32c computation, you must manually install - * {@link https://www.npmjs.com/package/fast-crc32c| `fast-crc32c`}: - * - * $ npm install --save fast-crc32c - * * NOTE: Readable streams will emit the `end` event when the file is fully * downloaded. * @@ -1296,8 +1277,7 @@ class File extends ServiceObject { typeof options.start === 'number' || typeof options.end === 'number'; const tailRequest = options.end! < 0; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let validateStream: any; // Created later, if necessary. + let validateStream: HashStreamValidator | undefined = undefined; const throughStream = streamEvents(new PassThrough()); @@ -1306,12 +1286,10 @@ class File extends ServiceObject { let md5 = false; if (typeof options.validation === 'string') { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (options as any).validation = ( - options.validation as string - ).toLowerCase(); - crc32c = options.validation === 'crc32c'; - md5 = options.validation === 'md5'; + const value = options.validation.toLowerCase().trim(); + + crc32c = value === 'crc32c'; + md5 = value === 'md5'; } else if (options.validation === false) { crc32c = false; } @@ -1425,7 +1403,12 @@ class File extends ServiceObject { }); } - validateStream = hashStreamValidation({crc32c, md5}); + validateStream = new HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + }); + throughStreams.push(validateStream); } @@ -1479,7 +1462,7 @@ class File extends ServiceObject { try { await this.getMetadata({userProject: options.userProject}); } catch (e) { - throughStream.destroy(e); + throughStream.destroy(e as Error); return; } if (this.metadata.contentEncoding === 'gzip') { @@ -1493,15 +1476,14 @@ class File extends ServiceObject { // the best. let failed = crc32c || md5; - if (crc32c && hashes.crc32c) { - // We must remove the first four bytes from the returned checksum. - // http://stackoverflow.com/questions/25096737/ - // base64-encoding-of-crc32c-long-value - failed = !validateStream.test('crc32c', hashes.crc32c.substr(4)); - } + if (validateStream) { + if (crc32c && hashes.crc32c) { + failed = !validateStream.test('crc32c', hashes.crc32c); + } - if (md5 && hashes.md5) { - failed = !validateStream.test('md5', hashes.md5); + if (md5 && hashes.md5) { + failed = !validateStream.test('md5', hashes.md5); + } } if (md5 && !hashes.md5) { @@ -1548,8 +1530,6 @@ class File extends ServiceObject { */ /** * @typedef {object} CreateResumableUploadOptions - * @property {string} [configPath] A full JSON file path to use with - * `gcs-resumable-upload`. This maps to the {@link https://github.com/yeoman/configstore/tree/0df1ec950d952b1f0dfb39ce22af8e505dffc71a#configpath| configstore option by the same name}. * @property {object} [metadata] Metadata to set on the file. * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. * @property {string} [origin] Origin header to set for the upload. @@ -1579,8 +1559,8 @@ class File extends ServiceObject { * `options.predefinedAcl = 'publicRead'`) * @property {string} [userProject] The ID of the project which will be * billed for the request. - * @property {string} [chunkSize] Create a separate request per chunk. Should - * be a multiple of 256 KiB (2^18). + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} */ /** @@ -1651,7 +1631,6 @@ class File extends ServiceObject { authClient: this.storage.authClient, apiEndpoint: this.storage.apiEndpoint, bucket: this.bucket.name, - configPath: options.configPath, customRequestOptions: this.getRequestInterceptors().reduce( (reqOpts, interceptorFn) => interceptorFn(reqOpts), {} @@ -1677,9 +1656,6 @@ class File extends ServiceObject { /** * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). - * @property {string} [configPath] **This only applies to resumable - * uploads.** A full JSON file path to use with `gcs-resumable-upload`. - * This maps to the {@link https://github.com/yeoman/configstore/tree/0df1ec950d952b1f0dfb39ce22af8e505dffc71a#configpath| configstore option by the same name}. * @property {string} [contentType] Alias for * `options.metadata.contentType`. If set to `auto`, the file name is used * to determine the contentType. @@ -1747,12 +1723,6 @@ class File extends ServiceObject { * Resumable uploads are automatically enabled and must be shut off explicitly * by setting `options.resumable` to `false`. * - * Resumable uploads require write access to the $HOME directory. Through - * {@link https://www.npmjs.com/package/configstore| `config-store`}, some metadata - * is stored. By default, if the directory is not writable, we will fall back - * to a simple upload. However, if you explicitly request a resumable upload, - * and we cannot write to the config directory, we will return a - * `ResumableUploadError`. * *

* There is some overhead when using a resumable upload that can cause @@ -1761,11 +1731,6 @@ class File extends ServiceObject { * resumable feature is disabled. *

* - * For faster crc32c computation, you must manually install - * {@link https://www.npmjs.com/package/fast-crc32c| `fast-crc32c`}: - * - * $ npm install --save fast-crc32c - * * NOTE: Writable streams will emit the `finish` event when the file is fully * uploaded. * @@ -1877,9 +1842,10 @@ class File extends ServiceObject { // Collect data as it comes in to store in a hash. This is compared to the // checksum value on the returned metadata from the API. - const validateStream = hashStreamValidation({ + const validateStream = new HashStreamValidator({ crc32c, md5, + crc32cGenerator: this.crc32cGenerator, }); const fileWriteStream = duplexify(); @@ -1902,71 +1868,7 @@ class File extends ServiceObject { this.startSimpleUpload_(fileWriteStream, options); return; } - - if (options.configPath) { - this.startResumableUpload_(fileWriteStream, options); - return; - } - - // The logic below attempts to mimic the resumable upload library, - // gcs-resumable-upload. That library requires a writable configuration - // directory in order to work. If we wait for that library to discover any - // issues, we've already started a resumable upload which is difficult to back - // out of. We want to catch any errors first, so we can choose a simple, non- - // resumable upload instead. - - // Same as configstore (used by gcs-resumable-upload): - // https://github.com/yeoman/configstore/blob/f09f067e50e6a636cfc648a6fc36a522062bd49d/index.js#L11 - const configDir = xdgBasedir.config || os.tmpdir(); - - fs.access(configDir, fs.constants.W_OK, accessErr => { - if (!accessErr) { - // A configuration directory exists, and it's writable. gcs-resumable-upload - // should have everything it needs to work. - this.startResumableUpload_(fileWriteStream, options); - return; - } - - // The configuration directory is either not writable, or it doesn't exist. - // gcs-resumable-upload will attempt to create it for the user, but we'll try - // it now to confirm that it won't have any issues. That way, if we catch the - // issue before we start the resumable upload, we can instead start a simple - // upload. - fs.mkdir(configDir, {mode: 0o0700}, err => { - if (!err) { - // We successfully created a configuration directory that - // gcs-resumable-upload will use. - this.startResumableUpload_(fileWriteStream, options); - return; - } - - if (options.resumable) { - // The user wanted a resumable upload, but we couldn't create a - // configuration directory, which means gcs-resumable-upload will fail. - - // Determine if the issue is that the directory does not exist or - // if the directory exists, but is not writable. - const error = new ResumableUploadError( - [ - 'A resumable upload could not be performed. The directory,', - `${configDir}, is not writable. You may try another upload,`, - 'this time setting `options.resumable` to `false`.', - ].join(' ') - ); - fs.access(configDir, fs.constants.R_OK, noReadErr => { - if (noReadErr) { - error.additionalInfo = 'The directory does not exist.'; - } else { - error.additionalInfo = 'The directory is read-only.'; - } - stream.destroy(error); - }); - } else { - // The user didn't care, resumable or not. Fall back to simple upload. - this.startSimpleUpload_(fileWriteStream, options); - } - }); - }); + this.startResumableUpload_(fileWriteStream, options); }); fileWriteStream.on('response', stream.emit.bind(stream, 'response')); @@ -1991,10 +1893,7 @@ class File extends ServiceObject { let failed = crc32c || md5; if (crc32c && metadata.crc32c) { - // We must remove the first four bytes from the returned checksum. - // http://stackoverflow.com/questions/25096737/ - // base64-encoding-of-crc32c-long-value - failed = !validateStream.test('crc32c', metadata.crc32c.substr(4)); + failed = !validateStream.test('crc32c', metadata.crc32c); } if (md5 && metadata.md5Hash) { @@ -2033,50 +1932,6 @@ class File extends ServiceObject { return stream as Writable; } - /** - * Delete failed resumable upload file cache. - * - * Resumable file upload cache the config file to restart upload in case of - * failure. In certain scenarios, the resumable upload will not works and - * upload file cache needs to be deleted to upload the same file. - * - * Following are some of the scenarios. - * - * Resumable file upload failed even though the file is successfully saved - * on the google storage and need to clean up a resumable file cache to - * update the same file. - * - * Resumable file upload failed due to pre-condition - * (i.e generation number is not matched) and want to upload a same - * file with the new generation number. - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file', { generation: 0 }); - * const contents = 'This is the contents of the file.'; - * - * file.save(contents, function(err) { - * if (err) { - * file.deleteResumableCache(); - * } - * }); - * - * ``` - */ - deleteResumableCache() { - const uploadStream = resumableUpload.upload({ - bucket: this.bucket.name, - file: this.name, - generation: this.generation, - retryOptions: this.storage.retryOptions, - }); - uploadStream.deleteConfig(); - } - download(options?: DownloadOptions): Promise; download(options: DownloadOptions, callback: DownloadCallback): void; download(callback: DownloadCallback): void; @@ -2169,7 +2024,7 @@ class File extends ServiceObject { if (destination) { fileStream.on('error', callback).once('data', data => { - // We know that the file exists the server + // We know that the file exists the server - now we can truncate/write to a file const writable = fs.createWriteStream(destination); writable.write(data); fileStream.pipe(writable).on('error', callback).on('finish', callback); @@ -2310,123 +2165,6 @@ class File extends ServiceObject { ); } - getSignedPolicy( - options: GetSignedPolicyOptions - ): Promise; - getSignedPolicy( - options: GetSignedPolicyOptions, - callback: GetSignedPolicyCallback - ): void; - getSignedPolicy(callback: GetSignedPolicyCallback): void; - /** - * @typedef {array} GetSignedPolicyResponse - * @property {object} 0 The document policy. - */ - /** - * @callback GetSignedPolicyCallback - * @param {?Error} err Request error, if any. - * @param {object} policy The document policy. - */ - /** - * Get a v2 signed policy document to allow a user to upload data with a POST - * request. - * - * In Google Cloud Platform environments, such as Cloud Functions and App - * Engine, you usually don't provide a `keyFilename` or `credentials` during - * instantiation. In those environments, we call the - * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} - * to create a signed policy. That API requires either the - * `https://www.googleapis.com/auth/iam` or - * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are - * enabled. - * - * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} - * - * @deprecated `getSignedPolicy()` is deprecated in favor of - * `generateSignedPostPolicyV2()` and `generateSignedPostPolicyV4()`. - * Currently, this method is an alias to `getSignedPolicyV2()`, - * and will be removed in a future major release. - * We recommend signing new policies using v4. - * @internal - * - * @throws {Error} If an expiration timestamp from the past is given. - * @throws {Error} If options.equals has an array with less or more than two - * members. - * @throws {Error} If options.startsWith has an array with less or more than two - * members. - * - * @param {object} options Configuration options. - * @param {array|array[]} [options.equals] Array of request parameters and - * their expected value (e.g. [['$', '']]). Values are - * translated into equality constraints in the conditions field of the - * policy document (e.g. ['eq', '$', '']). If only one - * equality condition is to be specified, options.equals can be a one- - * dimensional array (e.g. ['$', '']). - * @param {*} options.expires - A timestamp when this policy will expire. Any - * value given is passed to `new Date()`. - * @param {array|array[]} [options.startsWith] Array of request parameters and - * their expected prefixes (e.g. [['$', '']). Values are - * translated into starts-with constraints in the conditions field of the - * policy document (e.g. ['starts-with', '$', '']). If only - * one prefix condition is to be specified, options.startsWith can be a - * one- dimensional array (e.g. ['$', '']). - * @param {string} [options.acl] ACL for the object from possibly predefined - * ACLs. - * @param {string} [options.successRedirect] The URL to which the user client - * is redirected if the upload is successful. - * @param {string} [options.successStatus] - The status of the Google Storage - * response if the upload is successful (must be string). - * @param {object} [options.contentLengthRange] - * @param {number} [options.contentLengthRange.min] Minimum value for the - * request's content length. - * @param {number} [options.contentLengthRange.max] Maximum value for the - * request's content length. - * @param {GetSignedPolicyCallback} [callback] Callback function. - * @returns {Promise} - * - * @example - * ``` - * const {Storage} = require('@google-cloud/storage'); - * const storage = new Storage(); - * const myBucket = storage.bucket('my-bucket'); - * - * const file = myBucket.file('my-file'); - * const options = { - * equals: ['$Content-Type', 'image/jpeg'], - * expires: '10-25-2022', - * contentLengthRange: { - * min: 0, - * max: 1024 - * } - * }; - * - * file.getSignedPolicy(options, function(err, policy) { - * // policy.string: the policy document in plain text. - * // policy.base64: the policy document in base64. - * // policy.signature: the policy signature in base64. - * }); - * - * //- - * // If the callback is omitted, we'll return a Promise. - * //- - * file.getSignedPolicy(options).then(function(data) { - * const policy = data[0]; - * }); - * ``` - */ - getSignedPolicy( - optionsOrCallback?: GetSignedPolicyOptions | GetSignedPolicyCallback, - cb?: GetSignedPolicyCallback - ): void | Promise { - const args = normalize( - optionsOrCallback, - cb - ); - const options = args.options; - const callback = args.callback; - this.generateSignedPostPolicyV2(options, callback); - } - generateSignedPostPolicyV2( options: GenerateSignedPostPolicyV2Options ): Promise; @@ -2820,7 +2558,7 @@ class File extends ServiceObject { fields, }; } catch (err) { - throw new SigningError(err.message); + throw new SigningError((err as Error).message); } }; @@ -3936,9 +3674,7 @@ class File extends ServiceObject { } /** - * This creates a gcs-resumable-upload upload stream. - * - * See {@link https://github.com/googleapis/gcs-resumable-upload| gcs-resumable-upload} + * This creates a resumable-upload upload stream. * * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. * @param {object=} options - Configuration object. @@ -3970,7 +3706,6 @@ class File extends ServiceObject { authClient: this.storage.authClient, apiEndpoint: this.storage.apiEndpoint, bucket: this.bucket.name, - configPath: options.configPath, customRequestOptions: this.getRequestInterceptors().reduce( (reqOpts, interceptorFn) => interceptorFn(reqOpts), {} diff --git a/src/hash-stream-validator.ts b/src/hash-stream-validator.ts new file mode 100644 index 000000000..db4a527f4 --- /dev/null +++ b/src/hash-stream-validator.ts @@ -0,0 +1,96 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {createHash, Hash} from 'crypto'; +import {Transform} from 'stream'; + +import { + CRC32CValidatorGenerator, + CRC32C_DEFAULT_VALIDATOR_GENERATOR, + CRC32CValidator, +} from './crc32c'; + +interface HashStreamValidatorOptions { + crc32c: boolean; + md5: boolean; + crc32cGenerator: CRC32CValidatorGenerator; +} + +class HashStreamValidator extends Transform { + readonly crc32cEnabled: boolean; + readonly md5Enabled: boolean; + + #crc32cHash?: CRC32CValidator = undefined; + #md5Hash?: Hash = undefined; + + #md5Digest = ''; + + constructor(options: Partial = {}) { + super(); + + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + + if (this.crc32cEnabled) { + const crc32cGenerator = + options.crc32cGenerator || CRC32C_DEFAULT_VALIDATOR_GENERATOR; + + this.#crc32cHash = crc32cGenerator(); + } + + if (this.md5Enabled) { + this.#md5Hash = createHash('md5'); + } + } + + _flush(callback: () => void) { + if (this.#md5Hash) { + this.#md5Digest = this.#md5Hash.digest('base64'); + } + + callback(); + } + + _transform( + chunk: Buffer, + encoding: BufferEncoding, + callback: (e?: Error) => void + ) { + this.push(chunk, encoding); + + try { + if (this.#crc32cHash) this.#crc32cHash.update(chunk); + if (this.#md5Hash) this.#md5Hash.update(chunk); + callback(); + } catch (e) { + callback(e as Error); + } + } + + test(hash: 'crc32c' | 'md5', sum: Buffer | string): boolean { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + + if (hash === 'crc32c' && this.#crc32cHash) { + return this.#crc32cHash.validate(check); + } + + if (hash === 'md5' && this.#md5Hash) { + return this.#md5Digest === check; + } + + return false; + } +} + +export {HashStreamValidator, HashStreamValidatorOptions}; diff --git a/src/index.ts b/src/index.ts index 37bc83061..0f8210aa9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -135,6 +135,7 @@ export { UploadOptions, UploadResponse, } from './bucket'; +export * from './crc32c'; export {Channel, StopCallback} from './channel'; export { CopyCallback, @@ -165,9 +166,6 @@ export { GetFileMetadataResponse, GetFileOptions, GetFileResponse, - GetSignedPolicyCallback, - GetSignedPolicyOptions, - GetSignedPolicyResponse, GenerateSignedPostPolicyV2Callback, GenerateSignedPostPolicyV2Options, GenerateSignedPostPolicyV2Response, @@ -199,6 +197,7 @@ export { SetStorageClassResponse, SignedPostPolicyV4Output, } from './file'; +export * from './hash-stream-validator'; export { HmacKey, HmacKeyMetadata, diff --git a/src/nodejs-common/util.ts b/src/nodejs-common/util.ts index 3008f187b..720ff3ad2 100644 --- a/src/nodejs-common/util.ts +++ b/src/nodejs-common/util.ts @@ -775,25 +775,14 @@ export class Util { callback: BodyResponseCallback ): void | Abortable { let autoRetryValue = AUTO_RETRY_DEFAULT; - if ( - config.autoRetry !== undefined && - config.retryOptions?.autoRetry !== undefined - ) { - throw new ApiError( - 'autoRetry is deprecated. Use retryOptions.autoRetry instead.' - ); - } else if (config.autoRetry !== undefined) { + if (config.autoRetry !== undefined) { autoRetryValue = config.autoRetry; } else if (config.retryOptions?.autoRetry !== undefined) { autoRetryValue = config.retryOptions.autoRetry; } let maxRetryValue = MAX_RETRY_DEFAULT; - if (config.maxRetries && config.retryOptions?.maxRetries) { - throw new ApiError( - 'maxRetries is deprecated. Use retryOptions.maxRetries instead.' - ); - } else if (config.maxRetries) { + if (config.maxRetries) { maxRetryValue = config.maxRetries; } else if (config.retryOptions?.maxRetries) { maxRetryValue = config.retryOptions.maxRetries; diff --git a/src/gcs-resumable-upload.ts b/src/resumable-upload.ts similarity index 90% rename from src/gcs-resumable-upload.ts rename to src/resumable-upload.ts index 60279b57a..5a28f7419 100644 --- a/src/gcs-resumable-upload.ts +++ b/src/resumable-upload.ts @@ -13,7 +13,6 @@ // limitations under the License. import AbortController from 'abort-controller'; -import * as ConfigStore from 'configstore'; import {createHash} from 'crypto'; import * as extend from 'extend'; import { @@ -30,7 +29,6 @@ import {RetryOptions, PreconditionOptions} from './storage'; import * as uuid from 'uuid'; const NOT_FOUND_STATUS_CODE = 404; -const TERMINATED_UPLOAD_STATUS_CODE = 410; const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; const packageJson = require('../../package.json'); @@ -101,12 +99,6 @@ export interface UploadConfig { ) => Promise> | GaxiosPromise; }; - /** - * Where the gcs-resumable-upload configuration file should be stored on your - * system. This maps to the configstore option by the same name. - */ - configPath?: string; - /** * Create a separate request per chunk. * @@ -257,7 +249,6 @@ export class Upload extends Writable { uri?: string; userProject?: string; encryption?: Encryption; - configStore: ConfigStore; uriProvidedManually: boolean; numBytesWritten = 0; numRetries = 0; @@ -337,14 +328,9 @@ export class Upload extends Writable { if (cfg.private) this.predefinedAcl = 'private'; if (cfg.public) this.predefinedAcl = 'publicRead'; - const configPath = cfg.configPath; - this.configStore = new ConfigStore('gcs-resumable-upload', null, { - configPath, - }); - const autoRetry = cfg.retryOptions.autoRetry; this.uriProvidedManually = !!cfg.uri; - this.uri = cfg.uri || this.get('uri'); + this.uri = cfg.uri; this.numBytesWritten = 0; this.numRetries = 0; // counter for number of retries currently executed if (!autoRetry) { @@ -362,11 +348,10 @@ export class Upload extends Writable { if (this.uri) { this.continueUploading(); } else { - this.createURI((err, uri) => { + this.createURI(err => { if (err) { return this.destroy(err); } - this.set({uri}); this.startUploading(); return; }); @@ -642,15 +627,6 @@ export class Upload extends Writable { this.offset = 0; } - // Check if we're uploading the expected object - if (this.numBytesWritten === 0) { - const isSameObject = await this.ensureUploadingSameObject(); - if (!isSameObject) { - // `ensureUploadingSameObject` will restart the upload. - return; - } - } - // Check if the offset (server) is too far behind the current stream if (this.offset < this.numBytesWritten) { const delta = this.numBytesWritten - this.offset; @@ -813,7 +789,6 @@ export class Upload extends Writable { resp.data.size = Number(resp.data.size); } this.emit('metadata', resp.data); - this.deleteConfig(); // Allow the object (Upload) to continue naturally so the user's // "finish" event fires. @@ -821,49 +796,6 @@ export class Upload extends Writable { } } - /** - * Check if this is the same content uploaded previously. This caches a - * slice of the first chunk, then compares it with the first byte of - * incoming data. - * - * @returns if the request is ok to continue as-is - */ - private async ensureUploadingSameObject() { - // A queue for the upstream data - const upstreamQueue = this.upstreamIterator( - 16, - true // we just want one chunk for this validation - ); - - const upstreamChunk = await upstreamQueue.next(); - const chunk = upstreamChunk.value - ? upstreamChunk.value.chunk - : Buffer.alloc(0); - - // Put the original chunk back into the buffer as we just wanted to 'peek' - // at the stream for validation. - this.unshiftChunkBuffer(chunk); - - let cachedFirstChunk = this.get('firstChunk'); - const firstChunk = chunk.valueOf(); - - if (!cachedFirstChunk) { - // This is a new upload. Cache the first chunk. - this.set({uri: this.uri, firstChunk}); - } else { - // this continues an upload in progress. check if the bytes are the same - cachedFirstChunk = Buffer.from(cachedFirstChunk); - const nextChunk = Buffer.from(firstChunk); - if (Buffer.compare(cachedFirstChunk, nextChunk) !== 0) { - // this data is not the same. start a new upload - this.restart(); - return false; - } - } - - return true; - } - private async getAndSetOffset() { const opts: GaxiosOptions = { method: 'PUT', @@ -888,26 +820,6 @@ export class Upload extends Writable { this.offset = 0; } catch (e) { const err = e as GaxiosError; - const resp = err.response; - // we don't return a 404 to the user if they provided the resumable - // URI. if we're just using the configstore file to tell us that this - // file exists, and it turns out that it doesn't (the 404), that's - // probably stale config data. - if ( - resp && - resp.status === NOT_FOUND_STATUS_CODE && - !this.uriProvidedManually - ) { - this.restart(); - return; - } - - // this resumable upload is unrecoverable (bad data or service error). - if (resp && resp.status === TERMINATED_UPLOAD_STATUS_CODE) { - this.restart(); - return; - } - this.destroy(err); } } @@ -983,31 +895,15 @@ export class Upload extends Writable { } this.lastChunkSent = Buffer.alloc(0); - this.deleteConfig(); - this.createURI((err, uri) => { + this.createURI(err => { if (err) { return this.destroy(err); } - this.set({uri}); this.startUploading(); return; }); } - private get(prop: string) { - const store = this.configStore.get(this.cacheKey); - return store && store[prop]; - } - - // eslint-disable-next-line @typescript-eslint/no-explicit-any - private set(props: any) { - this.configStore.set(this.cacheKey, props); - } - - deleteConfig() { - this.configStore.delete(this.cacheKey); - } - /** * @return {bool} is the request good? */ diff --git a/src/signer.ts b/src/signer.ts index 8af898730..416970a20 100644 --- a/src/signer.ts +++ b/src/signer.ts @@ -213,8 +213,9 @@ export class URLSigner { Signature: signature, } as V2SignedUrlQuery; } catch (err) { - const signingErr = new SigningError(err.message); - signingErr.stack = err.stack; + const error = err as Error; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; throw signingErr; } }; @@ -318,8 +319,9 @@ export class URLSigner { }); return signedQuery; } catch (err) { - const signingErr = new SigningError(err.message); - signingErr.stack = err.stack; + const error = err as Error; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; throw signingErr; } }; diff --git a/src/storage.ts b/src/storage.ts index 2298544b6..233fdeee6 100644 --- a/src/storage.ts +++ b/src/storage.ts @@ -23,6 +23,10 @@ import {Channel} from './channel'; import {File} from './file'; import {normalize} from './util'; import {HmacKey, HmacKeyMetadata, HmacKeyOptions} from './hmacKey'; +import { + CRC32CValidatorGenerator, + CRC32C_DEFAULT_VALIDATOR_GENERATOR, +} from './crc32c'; export interface GetServiceAccountOptions { userProject?: string; @@ -68,33 +72,20 @@ export interface PreconditionOptions { } export interface StorageOptions extends ServiceOptions { - retryOptions?: RetryOptions; - /** - * @deprecated Use retryOptions instead. - * @internal - */ - autoRetry?: boolean; - /** - * @deprecated Use retryOptions instead. - * @internal - */ - maxRetries?: number; - /** - * **This option is deprecated.** - * @todo Remove in next major release. - */ - promise?: typeof Promise; /** * The API endpoint of the service used to make requests. * Defaults to `storage.googleapis.com`. */ apiEndpoint?: string; + crc32cGenerator?: CRC32CValidatorGenerator; + retryOptions?: RetryOptions; } export interface BucketOptions { + crc32cGenerator?: CRC32CValidatorGenerator; kmsKeyName?: string; - userProject?: string; preconditionOpts?: PreconditionOptions; + userProject?: string; } export interface Cors { @@ -199,8 +190,6 @@ export enum ExceptionMessages { } export enum StorageExceptionMessages { - AUTO_RETRY_DEPRECATED = 'autoRetry is deprecated. Use retryOptions.autoRetry instead.', - MAX_RETRIES_DEPRECATED = 'maxRetries is deprecated. Use retryOptions.maxRetries instead.', BUCKET_NAME_REQUIRED = 'A bucket name is needed to use Cloud Storage.', BUCKET_NAME_REQUIRED_CREATE = 'A name is required to create a bucket.', HMAC_SERVICE_ACCOUNT = 'The first argument must be a service account email to create an HMAC key.', @@ -474,6 +463,8 @@ export class Storage extends Service { */ acl: typeof Storage.acl; + crc32cGenerator: CRC32CValidatorGenerator; + getBucketsStream(): Readable { // placeholder body, overwritten in constructor return new Readable(); @@ -593,32 +584,16 @@ export class Storage extends Service { // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; - let autoRetryValue = AUTO_RETRY_DEFAULT; - if ( - options.autoRetry !== undefined && - options.retryOptions?.autoRetry !== undefined - ) { - throw new ApiError(StorageExceptionMessages.AUTO_RETRY_DEPRECATED); - } else if (options.autoRetry !== undefined) { - autoRetryValue = options.autoRetry; - } else if (options.retryOptions?.autoRetry !== undefined) { - autoRetryValue = options.retryOptions.autoRetry; - } - - let maxRetryValue = MAX_RETRY_DEFAULT; - if (options.maxRetries && options.retryOptions?.maxRetries) { - throw new ApiError(StorageExceptionMessages.MAX_RETRIES_DEPRECATED); - } else if (options.maxRetries) { - maxRetryValue = options.maxRetries; - } else if (options.retryOptions?.maxRetries) { - maxRetryValue = options.retryOptions.maxRetries; - } - const config = { apiEndpoint: options.apiEndpoint!, retryOptions: { - autoRetry: autoRetryValue, - maxRetries: maxRetryValue, + autoRetry: + options.retryOptions?.autoRetry !== undefined + ? options.retryOptions?.autoRetry + : AUTO_RETRY_DEFAULT, + maxRetries: options.retryOptions?.maxRetries + ? options.retryOptions?.maxRetries + : MAX_RETRY_DEFAULT, retryDelayMultiplier: options.retryOptions?.retryDelayMultiplier ? options.retryOptions?.retryDelayMultiplier : RETRY_DELAY_MULTIPLIER_DEFAULT, @@ -657,6 +632,8 @@ export class Storage extends Service { * @see Storage.acl */ this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || CRC32C_DEFAULT_VALIDATOR_GENERATOR; this.retryOptions = config.retryOptions; diff --git a/system-test/kitchen.ts b/system-test/kitchen.ts index 3a0182c0d..8d687ecc2 100644 --- a/system-test/kitchen.ts +++ b/system-test/kitchen.ts @@ -20,7 +20,7 @@ import * as tmp from 'tmp'; import * as crypto from 'crypto'; import * as os from 'os'; import {Readable} from 'stream'; -import {createURI, ErrorWithCode, upload} from '../src/gcs-resumable-upload'; +import {createURI, ErrorWithCode, upload} from '../src/resumable-upload'; import { RETRY_DELAY_MULTIPLIER_DEFAULT, TOTAL_TIMEOUT_DEFAULT, @@ -54,13 +54,13 @@ async function delay(title: string, retries: number, done: Function) { setTimeout(done(), ms); } -describe('gcs-resumable-upload', () => { +describe('resumable-upload', () => { beforeEach(() => { upload({ bucket: bucketName, file: filePath, retryOptions: retryOptions, - }).deleteConfig(); + }); }); it('should work', done => { @@ -178,39 +178,4 @@ describe('gcs-resumable-upload', () => { done(); }); }); - - it('should set custom config file', done => { - const uploadOptions = { - bucket: bucketName, - file: filePath, - metadata: {contentType: 'image/jpg'}, - retryOptions: retryOptions, - configPath: path.join( - os.tmpdir(), - `test-gcs-resumable-${Date.now()}.json` - ), - }; - let uploadSucceeded = false; - - fs.createReadStream(filePath) - .on('error', done) - .pipe(upload(uploadOptions)) - .on('error', done) - .on('response', resp => { - uploadSucceeded = resp.status === 200; - }) - .on('finish', () => { - assert.strictEqual(uploadSucceeded, true); - - const configData = JSON.parse( - fs.readFileSync(uploadOptions.configPath, 'utf8') - ); - const keyName = `${uploadOptions.bucket}/${uploadOptions.file}`.replace( - path.extname(filePath), - '' - ); - assert.ok(Object.keys(configData).includes(keyName)); - done(); - }); - }); }); diff --git a/system-test/storage.ts b/system-test/storage.ts index 365bbf177..94b733c8d 100644 --- a/system-test/storage.ts +++ b/system-test/storage.ts @@ -32,6 +32,7 @@ import { Notification, DeleteBucketCallback, GetFileCallback, + CRC32C, } from '../src'; import * as nock from 'nock'; import {Transform} from 'stream'; @@ -44,6 +45,14 @@ import {PubSub} from '@google-cloud/pubsub'; import {LifecycleRule} from '../src/bucket'; import {IdempotencyStrategy} from '../src/storage'; +class HTTPError extends Error { + code: number; + constructor(message: string, code: number) { + super(message); + this.code = code; + } +} + // When set to true, skips all tests that is not compatible for // running inside VPCSC. const RUNNING_IN_VPCSC = !!process.env['GOOGLE_CLOUD_TESTS_IN_VPCSC']; @@ -198,7 +207,9 @@ describe('storage', () => { /Could not load the default credentials/, /does not have storage\.objects\.create access/, ]; - assert(allowedErrorMessages.some(msg => msg.test(e.message))); + assert( + allowedErrorMessages.some(msg => msg.test((e as Error).message)) + ); } }); }); @@ -2217,7 +2228,7 @@ describe('storage', () => { const file = FILES[filesKey]; const hash = crypto.createHash('md5'); - return new Promise(resolve => + return new Promise(resolve => fs .createReadStream(file.path) .on('data', hash.update.bind(hash)) @@ -2493,7 +2504,6 @@ describe('storage', () => { fs.stat(FILES.big.path, (err, metadata) => { assert.ifError(err); - // Use a random name to force an empty ConfigStore cache. const file = bucket.file(generateName()); const fileSize = metadata.size; upload({interrupt: true}, err => { @@ -3282,31 +3292,6 @@ describe('storage', () => { }); }); - it('should get files from a directory', done => { - //Note: Directory is deprecated. - bucket.getFiles({directory: DIRECTORY_NAME}, (err, files) => { - assert.ifError(err); - assert.strictEqual(files!.length, 3); - done(); - }); - }); - - it('should get files from a directory as a stream', done => { - //Note: Directory is deprecated. - let numFilesEmitted = 0; - - bucket - .getFilesStream({directory: DIRECTORY_NAME}) - .on('error', done) - .on('data', () => { - numFilesEmitted++; - }) - .on('end', () => { - assert.strictEqual(numFilesEmitted, 3); - done(); - }); - }); - it('should paginate the list', done => { const query = { maxResults: NEW_FILES.length - 1, @@ -3864,6 +3849,36 @@ describe('storage', () => { }); }); + describe('CRC32C', () => { + const KNOWN_INPUT_TO_CRC32C = { + /** empty string (i.e. nothing to 'update') */ + '': 'AAAAAA==', + /** known case #1 - validated from actual GCS object upload + metadata retrieval */ + data: 'rth90Q==', + /** known case #2 - validated from actual GCS object upload + metadata retrieval */ + 'some text\n': 'DkjKuA==', + /** arbitrary large string */ + ['a'.repeat(2 ** 16)]: 'TpXtPw==', + } as const; + + it('should generate the appropriate hashes', async () => { + const file = bucket.file('crc32c-test-file'); + + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const buffer = Buffer.from(input); + const crc32c = new CRC32C(); + + await file.save(buffer); + crc32c.update(buffer); + + const [metadata] = await file.getMetadata(); + + assert.equal(metadata.crc32c, expected); + assert(crc32c.validate(metadata.crc32c)); + } + }); + }); + async function deleteBucketAsync(bucket: Bucket, options?: {}) { // After files are deleted, eventual consistency may require a bit of a // delay to ensure that the bucket recognizes that the files don't exist @@ -3969,7 +3984,8 @@ describe('storage', () => { return false; } } catch (error) { - if (error.code === 404) { + const err = error as HTTPError; + if (err.code === 404) { return false; } else { throw error; diff --git a/test/bucket.ts b/test/bucket.ts index 1129b2d33..fedd5bfe8 100644 --- a/test/bucket.ts +++ b/test/bucket.ts @@ -30,7 +30,7 @@ import * as path from 'path'; import * as proxyquire from 'proxyquire'; import * as stream from 'stream'; -import {Bucket, Channel, Notification} from '../src'; +import {Bucket, Channel, Notification, CRC32C} from '../src'; import { CreateWriteStreamOptions, File, @@ -197,6 +197,7 @@ describe('Bucket', () => { }, idempotencyStrategy: IdempotencyStrategy.RetryConditional, }, + crc32cGenerator: () => new CRC32C(), }; const BUCKET_NAME = 'test-bucket'; @@ -429,6 +430,17 @@ describe('Bucket', () => { assert.strictEqual(bucket.userProject, fakeUserProject); }); + + it('should accept a `crc32cGenerator`', () => { + const crc32cGenerator = () => {}; + + const bucket = new Bucket(STORAGE, 'bucket-name', {crc32cGenerator}); + assert.strictEqual(bucket.crc32cGenerator, crc32cGenerator); + }); + + it("should use storage's `crc32cGenerator` by default", () => { + assert.strictEqual(bucket.crc32cGenerator, STORAGE.crc32cGenerator); + }); }); describe('addLifecycleRule', () => { @@ -1713,27 +1725,6 @@ describe('Bucket', () => { bucket.getFiles({maxResults: 5, pageToken: token}, util.noop); }); - it('should allow setting a directory', done => { - //Note: Directory is deprecated. - const directory = 'directory-name'; - bucket.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.qs.prefix, `${directory}/`); - assert.strictEqual(reqOpts.qs.directory, undefined); - done(); - }; - bucket.getFiles({directory}, assert.ifError); - }); - - it('should strip excess slashes from a directory', done => { - //Note: Directory is deprecated. - const directory = 'directory-name///'; - bucket.request = (reqOpts: DecorateRequestOptions) => { - assert.strictEqual(reqOpts.qs.prefix, 'directory-name/'); - done(); - }; - bucket.getFiles({directory}, assert.ifError); - }); - it('should return nextQuery if more results exist', () => { const token = 'next-page-token'; bucket.request = ( @@ -2100,9 +2091,12 @@ describe('Bucket', () => { }); it('should error if action is undefined', () => { - delete SIGNED_URL_CONFIG.action; + const urlConfig = { + ...SIGNED_URL_CONFIG, + } as Partial; + delete urlConfig.action; assert.throws(() => { - bucket.getSignedUrl(SIGNED_URL_CONFIG, () => {}), + bucket.getSignedUrl(urlConfig, () => {}), ExceptionMessages.INVALID_ACTION; }); }); @@ -2755,9 +2749,9 @@ describe('Bucket', () => { }; }); - it('should force a resumable upload', done => { + it('should respect setting a resumable upload to false', done => { const fakeFile = new FakeFile(bucket, 'file-name'); - const options = {destination: fakeFile, resumable: true}; + const options = {destination: fakeFile, resumable: false}; fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { const ws = new stream.Writable(); ws.write = () => true; @@ -2770,40 +2764,6 @@ describe('Bucket', () => { bucket.upload(filepath, options, assert.ifError); }); - it('should not pass resumable option to createWriteStream when file size is greater than minimum resumable threshold', done => { - const fakeFile = new FakeFile(bucket, 'file-name'); - const options = {destination: fakeFile}; - fsStatOverride = (path: string, callback: Function) => { - // Set size greater than threshold - callback(null, {size: 5000001}); - }; - fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { - const ws = new stream.Writable(); - ws.write = () => true; - setImmediate(() => { - assert.strictEqual(typeof options_.resumable, 'undefined'); - done(); - }); - return ws; - }; - bucket.upload(filepath, options, assert.ifError); - }); - - it('should prevent resumable when file size is less than minimum resumable threshold', done => { - const fakeFile = new FakeFile(bucket, 'file-name'); - const options = {destination: fakeFile}; - fakeFile.createWriteStream = (options_: CreateWriteStreamOptions) => { - const ws = new stream.Writable(); - ws.write = () => true; - setImmediate(() => { - assert.strictEqual(options_.resumable, false); - done(); - }); - return ws; - }; - bucket.upload(filepath, options, assert.ifError); - }); - it('should not retry a nonretryable error code', done => { const fakeFile = new FakeFile(bucket, 'file-name'); const options = {destination: fakeFile, resumable: true}; diff --git a/test/crc32c.ts b/test/crc32c.ts new file mode 100644 index 000000000..6ff951e40 --- /dev/null +++ b/test/crc32c.ts @@ -0,0 +1,501 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + CRC32C, + CRC32CValidator, + CRC32C_EXTENSIONS, + CRC32C_EXTENSION_TABLE, + CRC32C_EXCEPTION_MESSAGES, +} from '../src'; +import * as assert from 'assert'; + +const KNOWN_INPUT_TO_CRC32C = { + /** empty string (i.e. nothing to 'update') */ + '': 'AAAAAA==', + /** known case #1 - validated from actual GCS object upload + metadata retrieval */ + data: 'rth90Q==', + /** known case #2 - validated from actual GCS object upload + metadata retrieval */ + 'some text\n': 'DkjKuA==', + /** arbitrary large string */ + ['a'.repeat(2 ** 16)]: 'TpXtPw==', +} as const; + +describe('CRC32C', () => { + describe('instance', () => { + describe('#constructor', () => { + it('should initial value to `0`', () => { + const crc32c = new CRC32C(); + + assert.equal(crc32c.valueOf(), 0); + }); + + it('should accept an `initialValue`', () => { + const initialValue = 123; + + const crc32c = new CRC32C(initialValue); + + assert.equal(crc32c.valueOf(), initialValue); + }); + }); + + describe('#update', () => { + it('should produce the correct calculation given the input (single buffer)', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + + const buffer = Buffer.from(input); + + crc32c.update(buffer); + + const result = crc32c.toString(); + + assert.equal( + result, + expected, + `Expected '${input}' to produce \`${expected}\` - not \`${result}\`` + ); + } + }); + + it('should produce the correct calculation given the input (multiple buffers)', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + + for (const char of input) { + const buffer = Buffer.from(char); + + crc32c.update(buffer); + } + + const result = crc32c.toString(); + + assert.equal( + result, + expected, + `Expected '${input}' to produce \`${expected}\` - not \`${result}\`` + ); + } + }); + + it('should not mutate a provided buffer', () => { + const crc32c = new CRC32C(); + + const value = 'abc'; + const buffer = Buffer.from(value); + + crc32c.update(buffer); + + assert.equal(buffer.toString(), value); + }); + }); + + describe('#validate', () => { + it('should validate a provided `number`', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + const expectedNumber = Buffer.from(expected, 'base64').readInt32BE(); + + const wrongNumber = expectedNumber + 1; + + crc32c.update(Buffer.from(input)); + + assert.equal(crc32c.validate(wrongNumber), false); + assert.equal(crc32c.validate(expectedNumber), true); + } + }); + + it('should validate a provided `string`', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + const expectedString = expected; + + // Want to test against a string generated in a valid way + const crc32cForIncorrectString = new CRC32C(); + const wrongStringInput = Buffer.from(input + ' '); + crc32cForIncorrectString.update(wrongStringInput); + const wrongString = crc32cForIncorrectString.toString(); + + crc32c.update(Buffer.from(input)); + + assert.equal(crc32c.validate(wrongString), false); + assert.equal(crc32c.validate(expectedString), true); + } + }); + + it('should validate a provided `Buffer`', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + const expectedBuffer = Buffer.from(expected, 'base64'); + + // Want to test against a `Buffer` generated in a valid way + const crc32cForIncorrectString = new CRC32C(); + const wrongBufferInput = Buffer.from(input + ' '); + crc32cForIncorrectString.update(wrongBufferInput); + const wrongBuffer = crc32cForIncorrectString.toBuffer(); + + crc32c.update(Buffer.from(input)); + + assert.equal(crc32c.validate(wrongBuffer), false); + assert.equal(crc32c.validate(expectedBuffer), true); + } + }); + + it('should validate a provided `CRC32C`', () => { + for (const [input] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + const crc32cExpected = new CRC32C(); + const wrongCRC32C = new CRC32C(); + + const wrongBufferInput = Buffer.from(input + ' '); + + crc32c.update(Buffer.from(input)); + crc32cExpected.update(Buffer.from(input)); + wrongCRC32C.update(wrongBufferInput); + + assert.equal(crc32c.validate(wrongCRC32C), false); + assert.equal(crc32c.validate(crc32cExpected), true); + } + }); + + it('should validate a provided generic `CRC32CValidator`', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + const crc32cExpectedValidator: CRC32CValidator = { + toString: () => expected, + update: () => {}, + validate: () => false, + }; + const wrongCRC32CValidator: CRC32CValidator = { + toString: () => { + const crc32c = new CRC32C(); + // Want to test against a `Buffer` generated in a valid way + const wrongBufferInput = Buffer.from(input + ' '); + + crc32c.update(wrongBufferInput); + + return crc32c.toString(); + }, + update: () => {}, + validate: () => false, + }; + + crc32c.update(Buffer.from(input)); + + assert.equal(crc32c.validate(wrongCRC32CValidator), false); + assert.equal(crc32c.validate(crc32cExpectedValidator), true); + } + }); + }); + + describe('#toBuffer', () => { + it('should return a valid 4-byte buffer', () => { + // At least one of our inputs should produce a negative 32-bit number - to prove we're not using unsigned integers + // This ensures the internally we're accurately handling unsigned integers + let atLeastOneWasSigned = false; + + for (const [input] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + crc32c.update(Buffer.from(input)); + + const value = crc32c.valueOf(); + + if (value < 0) { + // this is a negative number, thus is definitely signed + atLeastOneWasSigned = true; + } + + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(value); + + assert.equal(crc32c.toBuffer().byteLength, 4); + assert.equal(Buffer.compare(crc32c.toBuffer(), buffer), 0); + } + + assert(atLeastOneWasSigned); + }); + }); + + describe('#toJSON', () => { + it('should return the expected JSON', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + crc32c.update(Buffer.from(input)); + + const stringified = JSON.stringify({crc32c}); + + assert.equal(crc32c.toJSON(), expected); + assert.deepStrictEqual(JSON.parse(stringified), {crc32c: expected}); + } + }); + }); + + describe('#toString', () => { + it('should return the expected string', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + crc32c.update(Buffer.from(input)); + + const stringified = `${crc32c}`; + + assert.equal(crc32c.toString(), expected); + assert.equal(stringified, expected); + } + }); + }); + + describe('#valueOf', () => { + it('should return the expected string', () => { + for (const [input, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = new CRC32C(); + crc32c.update(Buffer.from(input)); + + const expectedNumber = Buffer.from(expected, 'base64').readInt32BE(); + + assert.equal(crc32c.valueOf(), expectedNumber); + assert.equal(+crc32c, expectedNumber); + + // All `CRC32C` values should be safe integers + assert(Number.isSafeInteger(+crc32c)); + } + }); + }); + }); + + describe('static', () => { + describe('.CRC32C_EXTENSIONS', () => { + it('should be the same as the exported object', () => { + assert.equal(CRC32C.CRC32C_EXTENSIONS, CRC32C_EXTENSIONS); + }); + }); + + describe('.CRC32C_EXTENSION_TABLE', () => { + it('should be the same as the exported object', () => { + assert.equal(CRC32C.CRC32C_EXTENSION_TABLE, CRC32C_EXTENSION_TABLE); + }); + }); + + describe('.from', () => { + describe('`ArrayBuffer`', () => { + it('should generate from `ArrayBuffer`', () => { + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const value = Buffer.from(expected, 'base64').readInt32BE(); + + const arrayBufferView = new Int32Array(1); + const dataView = new DataView(arrayBufferView.buffer); + dataView.setInt32(0, value, false); + + // Pass an `ArrayBuffer` + const crc32c = CRC32C.from(arrayBufferView.buffer); + + assert.equal(crc32c.valueOf(), dataView.getInt32(0, false)); + + // should not update source object + crc32c.update(Buffer.from(' ')); + + assert.notEqual(crc32c.valueOf(), dataView.getInt32(0, false)); + } + }); + + it('should raise a `RangeError` on invalid buffers', () => { + for (let i = 0; i < 8; i++) { + // `Int32Array` with length of 1 are valid + if (i === 1) continue; + + const arrayBufferView = new Int32Array(i); + + const errorMessage = + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(i * 4); + const expectedError = RangeError(errorMessage); + + assert.throws( + () => CRC32C.from(arrayBufferView.buffer), + expectedError + ); + } + }); + }); + + describe('`ArrayBufferView`', () => { + it('should generate from `ArrayBufferView`', () => { + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const value = Buffer.from(expected, 'base64').readInt32BE(); + + const arrayBufferView = new Int32Array(1); + const dataView = new DataView(arrayBufferView.buffer); + dataView.setInt32(0, value, false); + + // Pass an `ArrayBufferView` + const crc32c = CRC32C.from(arrayBufferView); + + assert.equal(crc32c.valueOf(), dataView.getInt32(0, false)); + + // should not update source object + crc32c.update(Buffer.from(' ')); + + assert.notEqual(crc32c.valueOf(), dataView.getInt32(0, false)); + } + }); + + it('should raise a `RangeError` on invalid buffers', () => { + for (let i = 0; i < 8; i++) { + // `Int32Array` with length of 1 are valid + if (i === 1) continue; + + const arrayBufferView = new Int32Array(i); + + const errorMessage = + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(i * 4); + const expectedError = RangeError(errorMessage); + + assert.throws(() => CRC32C.from(arrayBufferView), expectedError); + } + }); + }); + + describe('`Buffer`', () => { + it('should generate from `Buffer`', () => { + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const buffer = Buffer.from(expected, 'base64'); + + const crc32c = CRC32C.from(buffer); + + assert.equal(Buffer.compare(crc32c.toBuffer(), buffer), 0); + + // should not update source object + crc32c.update(Buffer.from(' ')); + + assert.notEqual(Buffer.compare(crc32c.toBuffer(), buffer), 0); + } + }); + + it('should raise a `RangeError` on invalid buffers', () => { + for (let i = 0; i < 8; i++) { + // Buffers with length of 4 are valid + if (i === 4) continue; + + const buffer = Buffer.alloc(i); + + const errorMessage = + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(i); + const expectedError = RangeError(errorMessage); + + assert.throws(() => CRC32C.from(buffer), expectedError); + } + }); + }); + + describe('`CRC32C`', () => { + it('should generate from `CRC32C`', () => { + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const baseCRC32C = CRC32C.from(expected); + const crc32c = CRC32C.from(baseCRC32C); + + assert.equal(crc32c.valueOf(), baseCRC32C.valueOf()); + + // should not update source object + crc32c.update(Buffer.from(' ')); + + assert.notEqual(crc32c.valueOf(), baseCRC32C.valueOf()); + } + }); + }); + + describe('`CRC32CValidator`', () => { + it('should generate from `CRC32CValidator`', () => { + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const baseCRC32C: CRC32CValidator = { + toString: () => expected, + update: () => {}, + validate: () => false, + }; + const crc32c = CRC32C.from(baseCRC32C); + + assert.equal(crc32c.toString(), baseCRC32C.toString()); + + // should not update source object + crc32c.update(Buffer.from(' ')); + + assert.notEqual(crc32c.toString(), baseCRC32C.toString()); + } + }); + }); + + describe('`string`', () => { + it('should generate from base64-encoded data', () => { + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const crc32c = CRC32C.from(expected); + + assert.equal(crc32c.toString(), expected); + } + }); + + it('should raise a `RangeError` on invalid strings', () => { + for (let i = 0; i < 8; i++) { + // Buffers with length of 4 are valid + if (i === 4) continue; + + const string = Buffer.alloc(i).toString('base64'); + + const errorMessage = + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(i); + const expectedError = RangeError(errorMessage); + + assert.throws(() => CRC32C.from(string), expectedError); + } + }); + }); + + describe('`number`', () => { + it('should generate from `number`', () => { + // At least one of our inputs should produce a negative 32-bit number - to prove we're not using unsigned integers + // This ensures the internally we're accurately handling unsigned integers + let atLeastOneWasSigned = false; + + for (const [, expected] of Object.entries(KNOWN_INPUT_TO_CRC32C)) { + const number = Buffer.from(expected, 'base64').readInt32BE(); + + const crc32c = CRC32C.from(number); + + if (number < 0) { + // this is a negative number, thus is definitely signed + atLeastOneWasSigned = true; + } + + assert.equal(crc32c.valueOf(), number); + } + + assert(atLeastOneWasSigned); + }); + + it('should raise a `RangeError` on invalid integers', () => { + const INVALID_SET = [ + NaN, // not a safe number + 0.5, // not an integer + 2 ** 32 + 1, // too high - out of valid range + -(2 ** 32) - 1, // too low - out of valid range + ]; + + for (const number of INVALID_SET) { + const errorMessage = + CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(number); + const expectedError = RangeError(errorMessage); + + assert.throws(() => CRC32C.from(number), expectedError); + } + }); + }); + }); + }); +}); diff --git a/test/file.ts b/test/file.ts index 01e935604..171b88020 100644 --- a/test/file.ts +++ b/test/file.ts @@ -28,10 +28,8 @@ import * as crypto from 'crypto'; import * as duplexify from 'duplexify'; import * as extend from 'extend'; import * as fs from 'fs'; -import * as os from 'os'; -import * as path from 'path'; import * as proxyquire from 'proxyquire'; -import * as resumableUpload from '../src/gcs-resumable-upload'; +import * as resumableUpload from '../src/resumable-upload'; import * as sinon from 'sinon'; import * as tmp from 'tmp'; import * as zlib from 'zlib'; @@ -46,7 +44,7 @@ import { SetFileMetadataOptions, GetSignedUrlConfig, GenerateSignedPostPolicyV2Options, - GenerateSignedPostPolicyV2Callback, + CRC32C, } from '../src'; import { SignedPostPolicyV4Output, @@ -66,10 +64,6 @@ class HTTPError extends Error { } } -class ResumableUploadError extends Error { - additionalInfo?: string; -} - let promisified = false; let makeWritableStreamOverride: Function | null; let handleRespOverride: Function | null; @@ -119,13 +113,6 @@ const fakeZlib = extend(true, {}, zlib, { }, }); -let hashStreamValidationOverride: Function | null; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const hashStreamValidation = require('hash-stream-validation'); -function fakeHashStreamValidation(...args: Array<{}>) { - return (hashStreamValidationOverride || hashStreamValidation)(...args); -} - // eslint-disable-next-line @typescript-eslint/no-var-requires const osCached = extend(true, {}, require('os')); const fakeOs = extend(true, {}, osCached); @@ -169,19 +156,6 @@ class FakeServiceObject extends ServiceObject { } } -// eslint-disable-next-line @typescript-eslint/no-explicit-any -let xdgConfigOverride: any; -// eslint-disable-next-line @typescript-eslint/no-var-requires -const xdgBasedirCached = require('xdg-basedir'); -const fakeXdgBasedir = extend(true, {}, xdgBasedirCached); -Object.defineProperty(fakeXdgBasedir, 'config', { - get() { - return xdgConfigOverride === false - ? false - : xdgConfigOverride || xdgBasedirCached.config; - }, -}); - const fakeSigner = { URLSigner: () => {}, }; @@ -204,6 +178,19 @@ describe('File', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any let BUCKET: any; + const DATA = 'test data'; + // crc32c hash of 'test data' + const CRC32C_HASH = 'M3m0yg=='; + // md5 hash of 'test data' + const MD5_HASH = '63M6AMDJ0zbmVpGjerVCkw=='; + // crc32c hash of `zlib.gzipSync(Buffer.from(DATA), {level: 9})` + const GZIPPED_DATA = Buffer.from( + 'H4sIAAAAAAACEytJLS5RSEksSQQAsq4I0wkAAAA=', + 'base64' + ); + //crc32c hash of `GZIPPED_DATA` + const CRC32C_HASH_GZIP = '64jygg=='; + before(() => { File = proxyquire('../src/file.js', { './nodejs-common': { @@ -212,11 +199,9 @@ describe('File', () => { }, '@google-cloud/promisify': fakePromisify, fs: fakeFs, - '../src/gcs-resumable-upload': fakeResumableUpload, - 'hash-stream-validation': fakeHashStreamValidation, + '../src/resumable-upload': fakeResumableUpload, os: fakeOs, './signer': fakeSigner, - 'xdg-basedir': fakeXdgBasedir, zlib: fakeZlib, }).File; }); @@ -224,7 +209,6 @@ describe('File', () => { beforeEach(() => { extend(true, fakeFs, fsCached); extend(true, fakeOs, osCached); - xdgConfigOverride = null; // eslint-disable-next-line @typescript-eslint/no-explicit-any FakeServiceObject.prototype.request = util.noop as any; @@ -267,7 +251,6 @@ describe('File', () => { createGunzipOverride = null; handleRespOverride = null; - hashStreamValidationOverride = null; makeWritableStreamOverride = null; resumableUploadOverride = null; }); @@ -455,6 +438,17 @@ describe('File', () => { new File(BUCKET, FILE_NAME, {encryptionKey: key}); }); + it('should accept a `crc32cGenerator`', () => { + const crc32cGenerator = () => {}; + + const file = new File(BUCKET, 'name', {crc32cGenerator}); + assert.strictEqual(file.crc32cGenerator, crc32cGenerator); + }); + + it("should use the bucket's `crc32cGenerator` by default", () => { + assert.strictEqual(file.crc32cGenerator, BUCKET.crc32cGenerator); + }); + describe('userProject', () => { const USER_PROJECT = 'grapce-spaceship-123'; @@ -876,9 +870,6 @@ describe('File', () => { }); describe('createReadStream', () => { - const CRC32C_HASH = 'crc32c-hash'; - const MD5_HASH = 'md5-hash'; - function getFakeRequest(data?: {}) { let requestOptions: DecorateRequestOptions | undefined; @@ -1257,14 +1248,7 @@ describe('File', () => { }); describe('compression', () => { - const DATA = 'test data'; - const GZIPPED_DATA = zlib.gzipSync(DATA); - beforeEach(() => { - hashStreamValidationOverride = () => - Object.assign(new PassThrough(), { - test: () => true, - }); handleRespOverride = ( err: Error, res: {}, @@ -1277,39 +1261,39 @@ describe('File', () => { return { headers: { 'content-encoding': 'gzip', - 'x-goog-hash': `crc32c=${CRC32C_HASH},md5=${MD5_HASH}`, + 'x-goog-hash': `crc32c=${CRC32C_HASH_GZIP},md5=${MD5_HASH}`, }, }; }, }); callback(null, null, rawResponseStream); - setImmediate(() => { - rawResponseStream.end(GZIPPED_DATA); - }); + + rawResponseStream.end(GZIPPED_DATA); }; file.requestStream = getFakeSuccessfulRequest(GZIPPED_DATA); }); - it('should gunzip the response', done => { - file - .createReadStream() - .once('error', done) - .on('data', (data: {}) => { - assert.strictEqual(data.toString(), DATA); - done(); - }) - .resume(); + it('should gunzip the response', async () => { + const collection: Buffer[] = []; + + for await (const data of file.createReadStream()) { + collection.push(data); + } + + assert.equal(Buffer.concat(collection).toString(), DATA); }); - it('should not gunzip the response if "decompress: false" is passed', done => { - file - .createReadStream({decompress: false}) - .once('error', done) - .on('data', (data: {}) => { - assert.strictEqual(data, GZIPPED_DATA); - done(); - }) - .resume(); + it('should not gunzip the response if "decompress: false" is passed', async () => { + const collection: Buffer[] = []; + + for await (const data of file.createReadStream({decompress: false})) { + collection.push(data); + } + + assert.equal( + Buffer.compare(Buffer.concat(collection), GZIPPED_DATA), + 0 + ); }); it('should emit errors from the gunzip stream', done => { @@ -1354,17 +1338,15 @@ describe('File', () => { }); describe('validation', () => { - const data = 'test'; - let fakeValidationStream: Stream & {test: Function}; + let responseCRC32C = CRC32C_HASH; + let responseMD5 = MD5_HASH; beforeEach(() => { - file.getMetadata = () => Promise.resolve({}); - fakeValidationStream = Object.assign(new PassThrough(), { - test: () => true, - }); - hashStreamValidationOverride = () => { - return fakeValidationStream; - }; + responseCRC32C = CRC32C_HASH; + responseMD5 = MD5_HASH; + + file.getMetadata = async () => ({}); + handleRespOverride = ( err: Error, res: {}, @@ -1376,75 +1358,72 @@ describe('File', () => { toJSON() { return { headers: { - 'x-goog-hash': `crc32c=${CRC32C_HASH},md5=${MD5_HASH}`, + 'x-goog-hash': `crc32c=${responseCRC32C},md5=${responseMD5}`, }, }; }, }); callback(null, null, rawResponseStream); setImmediate(() => { - rawResponseStream.end(data); + rawResponseStream.end(DATA); }); }; - file.requestStream = getFakeSuccessfulRequest(data); + file.requestStream = getFakeSuccessfulRequest(DATA); }); + function setFileValidationToError(e: Error = new Error('test-error')) { + // Simulating broken CRC32C instance - used by the validation stream + file.crc32cGenerator = () => { + class C extends CRC32C { + update() { + throw e; + } + } + + return new C(); + }; + } + describe('server decompression', () => { it('should skip validation if file was stored compressed', done => { + file.metadata.crc32c = '.invalid.'; file.metadata.contentEncoding = 'gzip'; - const validateStub = sinon.stub().returns(true); - fakeValidationStream.test = validateStub; - file .createReadStream({validation: 'crc32c'}) .on('error', done) - .on('end', () => { - assert(validateStub.notCalled); - done(); - }) + .on('end', done) .resume(); }); }); it('should emit errors from the validation stream', done => { - const error = new Error('Error.'); + const expectedError = new Error('test error'); - hashStreamValidationOverride = () => { - setImmediate(() => { - fakeValidationStream.emit('error', error); - }); - return fakeValidationStream; - }; - - file.requestStream = getFakeSuccessfulRequest(data); + file.requestStream = getFakeSuccessfulRequest(DATA); + setFileValidationToError(expectedError); file .createReadStream() .on('error', (err: Error) => { - assert.strictEqual(err, error); + assert(err === expectedError); + done(); }) .resume(); }); it('should not handle both error and end events', done => { - const error = new Error('Error.'); + const expectedError = new Error('test error'); - hashStreamValidationOverride = () => { - setImmediate(() => { - fakeValidationStream.emit('error', error); - }); - return fakeValidationStream; - }; - - file.requestStream = getFakeSuccessfulRequest(data); + file.requestStream = getFakeSuccessfulRequest(DATA); + setFileValidationToError(expectedError); file .createReadStream() .on('error', (err: Error) => { - assert.strictEqual(err, error); - fakeValidationStream.emit('end'); + assert(err === expectedError); + setImmediate(done); }) .on('end', () => { @@ -1461,10 +1440,12 @@ describe('File', () => { file.getMetadata = (options: GetFileMetadataOptions) => { assert.strictEqual(options.userProject, fakeOptions.userProject); setImmediate(done); - return Promise.resolve({}); + return Promise.resolve({ + crc32c: CRC32C_HASH, + }); }; - file.requestStream = getFakeSuccessfulRequest('data'); + file.requestStream = getFakeSuccessfulRequest(DATA); file.createReadStream(fakeOptions).on('error', done).resume(); }); @@ -1475,7 +1456,7 @@ describe('File', () => { return Promise.reject(error); }; - file.requestStream = getFakeSuccessfulRequest('data'); + file.requestStream = getFakeSuccessfulRequest(DATA); file .createReadStream() @@ -1487,13 +1468,8 @@ describe('File', () => { }); it('should validate with crc32c', done => { - file.requestStream = getFakeSuccessfulRequest(data); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = (algo: string, value: string) => { - assert.strictEqual(algo, 'crc32c'); - assert.strictEqual(value, CRC32C_HASH.substr(4)); - return true; - }; + file.requestStream = getFakeSuccessfulRequest(DATA); + file .createReadStream({validation: 'crc32c'}) .on('error', done) @@ -1503,8 +1479,9 @@ describe('File', () => { it('should emit an error if crc32c validation fails', done => { file.requestStream = getFakeSuccessfulRequest('bad-data'); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = () => false; + + responseCRC32C = 'bad-crc32c'; + file .createReadStream({validation: 'crc32c'}) .on('error', (err: ApiError) => { @@ -1515,13 +1492,8 @@ describe('File', () => { }); it('should validate with md5', done => { - file.requestStream = getFakeSuccessfulRequest(data); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = (algo: string, value: string) => { - assert.strictEqual(algo, 'md5'); - assert.strictEqual(value, MD5_HASH); - return true; - }; + file.requestStream = getFakeSuccessfulRequest(DATA); + file .createReadStream({validation: 'md5'}) .on('error', done) @@ -1531,11 +1503,9 @@ describe('File', () => { it('should emit an error if md5 validation fails', done => { file.requestStream = getFakeSuccessfulRequest('bad-data'); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = (algo: string) => { - assert.strictEqual(algo, 'md5'); - return false; - }; + + responseMD5 = 'bad-md5'; + file .createReadStream({validation: 'md5'}) .on('error', (err: ApiError) => { @@ -1547,11 +1517,9 @@ describe('File', () => { it('should default to crc32c validation', done => { file.requestStream = getFakeSuccessfulRequest('bad-data'); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = (algo: string) => { - assert.strictEqual(algo, 'crc32c'); - return false; - }; + + responseCRC32C = 'bad-crc32c'; + file .createReadStream() .on('error', (err: ApiError) => { @@ -1562,9 +1530,9 @@ describe('File', () => { }); it('should ignore a data mismatch if validation: false', done => { - file.requestStream = getFakeSuccessfulRequest(data); + file.requestStream = getFakeSuccessfulRequest(DATA); // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = () => false; + // (fakeValidationStream as any).test = () => false; file .createReadStream({validation: false}) .resume() @@ -1591,30 +1559,21 @@ describe('File', () => { }); callback(null, null, rawResponseStream); setImmediate(() => { - rawResponseStream.end(data); + rawResponseStream.end(DATA); }); }; - file.requestStream = getFakeSuccessfulRequest(data); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = (algo: string, value: string) => { - assert.strictEqual(algo, 'crc32c'); - assert.strictEqual(value, CRC32C_HASH.substr(4)); - return true; - }; + file.requestStream = getFakeSuccessfulRequest(DATA); file.createReadStream().on('error', done).on('end', done).resume(); }); describe('destroying the through stream', () => { - beforeEach(() => { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (fakeValidationStream as any).test = () => false; - }); - it('should destroy after failed validation', done => { file.requestStream = getFakeSuccessfulRequest('bad-data'); + responseMD5 = 'bad-md5'; + const readStream = file.createReadStream({validation: 'md5'}); readStream.destroy = (err: ApiError) => { assert.strictEqual(err.code, 'CONTENT_DOWNLOAD_MISMATCH'); @@ -1720,7 +1679,7 @@ describe('File', () => { }); it('should end the through stream', done => { - file.requestStream = getFakeSuccessfulRequest('body'); + file.requestStream = getFakeSuccessfulRequest(DATA); const readStream = file.createReadStream({start: 100}); readStream.end = done; @@ -1772,7 +1731,6 @@ describe('File', () => { it('should create a resumable upload URI', done => { const options = { - configPath: '/Users/user/.config/here', metadata: { contentType: 'application/json', }, @@ -1807,7 +1765,6 @@ describe('File', () => { assert.strictEqual(opts.authClient, storage.authClient); assert.strictEqual(opts.apiEndpoint, storage.apiEndpoint); assert.strictEqual(opts.bucket, bucket.name); - assert.strictEqual(opts.configPath, options.configPath); assert.strictEqual(opts.file, file.name); assert.strictEqual(opts.generation, file.generation); assert.strictEqual(opts.key, file.encryptionKey); @@ -1857,7 +1814,6 @@ describe('File', () => { }, }); const options = { - configPath: '/Users/user/.config/here', metadata: { contentType: 'application/json', }, @@ -1888,7 +1844,6 @@ describe('File', () => { assert.strictEqual(opts.authClient, storage.authClient); assert.strictEqual(opts.apiEndpoint, storage.apiEndpoint); assert.strictEqual(opts.bucket, bucket.name); - assert.strictEqual(opts.configPath, options.configPath); assert.strictEqual(opts.file, file.name); assert.strictEqual(opts.generation, file.generation); assert.strictEqual(opts.key, file.encryptionKey); @@ -2026,20 +1981,6 @@ describe('File', () => { writable.write('data'); }); - it('should start a resumable upload if configPath is provided', done => { - const options = { - metadata: METADATA, - configPath: '/config/path.json', - }; - const writable = file.createWriteStream(options); - - file.startResumableUpload_ = () => { - done(); - }; - - writable.write('data'); - }); - it('should start a resumable upload if specified', done => { const options = { metadata: METADATA, @@ -2055,126 +1996,6 @@ describe('File', () => { writable.write('data'); }); - it('should check if xdg-basedir is writable', done => { - const fakeDir = 'fake-xdg-dir'; - - xdgConfigOverride = fakeDir; - - Object.assign(fakeFs, { - access(dir: {}) { - assert.strictEqual(dir, fakeDir); - done(); - }, - }); - - file.createWriteStream({resumable: true}).write('data'); - }); - - it('should fall back to checking tmpdir', done => { - const fakeDir = 'fake-tmp-dir'; - - xdgConfigOverride = false; - - fakeOs.tmpdir = () => { - return fakeDir; - }; - - Object.assign(fakeFs, { - access(dir: {}) { - assert.strictEqual(dir, fakeDir); - done(); - }, - }); - - file.createWriteStream({resumable: true}).write('data'); - }); - - describe('config directory does not exist', () => { - const CONFIG_DIR = path.join(os.tmpdir(), `/fake-xdg-dir/${Date.now()}`); - - beforeEach(() => { - xdgConfigOverride = CONFIG_DIR; - fakeFs.access = fsCached.access; - }); - - it('should attempt to create the config directory', done => { - Object.assign(fakeFs, { - mkdir(dir: string, options: {}) { - assert.strictEqual(dir, CONFIG_DIR); - assert.deepStrictEqual(options, {mode: 0o0700}); - done(); - }, - }); - - const writable = file.createWriteStream({resumable: true}); - writable.write('data'); - }); - - it('should start a resumable upload if config directory created successfully', done => { - Object.assign(fakeFs, { - mkdir(dir: string, options: {}, callback: Function) { - callback(); - }, - }); - - file.startResumableUpload_ = () => { - // If no error is thrown here, we know the request completed successfully. - done(); - }; - - file.createWriteStream().write('data'); - }); - - it('should return error if resumable was requested, but a config directory could not be created', done => { - Object.assign(fakeFs, { - mkdir(dir: string, options: {}, callback: Function) { - callback(new Error()); - }, - }); - - const writable = file.createWriteStream({resumable: true}); - - writable.on('error', (err: ResumableUploadError) => { - assert.strictEqual(err.name, 'ResumableUploadError'); - assert.strictEqual( - err.message, - [ - 'A resumable upload could not be performed. The directory,', - `${CONFIG_DIR}, is not writable. You may try another upload,`, - 'this time setting `options.resumable` to `false`.', - ].join(' ') - ); - assert.strictEqual( - err.additionalInfo, - 'The directory does not exist.' - ); - - done(); - }); - - writable.write('data'); - }); - - it('should fallback to a simple upload if the config directory could not be created', done => { - const options = { - metadata: METADATA, - customValue: true, - }; - - Object.assign(fakeFs, { - mkdir(dir: string, options: {}, callback: Function) { - callback(new Error()); - }, - }); - - file.startSimpleUpload_ = () => { - done(); - }; - - file.createWriteStream(options).write('data'); - }); - }); - it('should default to a resumable upload', done => { const writable = file.createWriteStream({ metadata: METADATA, @@ -2370,7 +2191,7 @@ describe('File', () => { const data = 'test'; const fakeMetadata = { - crc32c: {crc32c: '####wA=='}, + crc32c: {crc32c: 'hqBywA=='}, md5: {md5Hash: 'CY9rzUYh03PK3k6DJie09g=='}, }; @@ -2580,30 +2401,6 @@ describe('File', () => { }); }); - describe('deleteResumableCache', () => { - it('should delete resumable file upload cache', done => { - file.generation = 123; - - resumableUploadOverride = { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - upload(opts: any) { - assert.strictEqual(opts.bucket, file.bucket.name); - assert.strictEqual(opts.file, file.name); - assert.strictEqual(opts.generation, file.generation); - assert.strictEqual(opts.retryOptions, file.storage.retryOptions); - assert.strictEqual(opts.params, file.preconditionOpts); - - return { - deleteConfig: () => { - done(); - }, - }; - }, - }; - file.deleteResumableCache(); - }); - }); - describe('download', () => { let fileReadStream: Readable; @@ -2880,26 +2677,6 @@ describe('File', () => { }); }); - describe('getSignedPolicy', () => { - it('should alias to generateSignedPostPolicyV2', done => { - const options = { - expires: Date.now() + 2000, - }; - const callback = () => {}; - - file.generateSignedPostPolicyV2 = ( - argOpts: GenerateSignedPostPolicyV2Options, - argCb: GenerateSignedPostPolicyV2Callback - ) => { - assert.strictEqual(argOpts, options); - assert.strictEqual(argCb, callback); - done(); - }; - - file.getSignedPolicy(options, callback); - }); - }); - describe('generateSignedPostPolicyV2', () => { let CONFIG: GenerateSignedPostPolicyV2Options; @@ -3723,9 +3500,10 @@ describe('File', () => { }); it('should error if action is undefined', () => { - delete SIGNED_URL_CONFIG.action; + const urlConfig = {...SIGNED_URL_CONFIG} as Partial; + delete urlConfig.action; assert.throws(() => { - file.getSignedUrl(SIGNED_URL_CONFIG, () => {}), + file.getSignedUrl(urlConfig, () => {}), ExceptionMessages.INVALID_ACTION; }); }); @@ -4468,7 +4246,7 @@ describe('File', () => { await file.save(DATA, options); throw Error('unreachable'); } catch (e) { - assert.strictEqual(e.message, 'first error'); + assert.strictEqual((e as Error).message, 'first error'); } }); @@ -4782,7 +4560,6 @@ describe('File', () => { describe('starting', () => { it('should start a resumable upload', done => { const options = { - configPath: '/Users/user/.config/here', metadata: {}, offset: 1234, public: true, @@ -4825,7 +4602,6 @@ describe('File', () => { assert.strictEqual(opts.authClient, authClient); assert.strictEqual(opts.apiEndpoint, storage.apiEndpoint); assert.strictEqual(opts.bucket, bucket.name); - assert.strictEqual(opts.configPath, options.configPath); assert.deepStrictEqual(opts.customRequestOptions, { headers: { a: 'b', diff --git a/test/index.ts b/test/index.ts index 782fb9e61..8283b41e6 100644 --- a/test/index.ts +++ b/test/index.ts @@ -26,7 +26,7 @@ import * as assert from 'assert'; import {describe, it, before, beforeEach, after, afterEach} from 'mocha'; import * as proxyquire from 'proxyquire'; // eslint-disable-next-line @typescript-eslint/no-unused-vars -import {Bucket} from '../src'; +import {Bucket, CRC32C_DEFAULT_VALIDATOR_GENERATOR} from '../src'; import {GetFilesOptions} from '../src/bucket'; import sinon = require('sinon'); import {HmacKey} from '../src/hmacKey'; @@ -188,16 +188,6 @@ describe('Storage', () => { assert.strictEqual(calledWith.useAuthWithCustomEndpoint, true); }); - it('should propagate autoRetry', () => { - const autoRetry = false; - const storage = new Storage({ - projectId: PROJECT_ID, - autoRetry, - }); - const calledWith = storage.calledWith_[0]; - assert.strictEqual(calledWith.retryOptions.autoRetry, autoRetry); - }); - it('should propagate autoRetry in retryOptions', () => { const autoRetry = false; const storage = new Storage({ @@ -208,18 +198,6 @@ describe('Storage', () => { assert.strictEqual(calledWith.retryOptions.autoRetry, autoRetry); }); - it('should throw if autoRetry is defined twice', () => { - const autoRetry = 10; - assert.throws(() => { - new Storage({ - projectId: PROJECT_ID, - retryOptions: {autoRetry}, - autoRetry, - }), - StorageExceptionMessages.AUTO_RETRY_DEPRECATED; - }); - }); - it('should propagate retryDelayMultiplier', () => { const retryDelayMultiplier = 4; const storage = new Storage({ @@ -279,16 +257,6 @@ describe('Storage', () => { ); }); - it('should propagate maxRetries', () => { - const maxRetries = 10; - const storage = new Storage({ - projectId: PROJECT_ID, - maxRetries, - }); - const calledWith = storage.calledWith_[0]; - assert.strictEqual(calledWith.retryOptions.maxRetries, maxRetries); - }); - it('should propagate maxRetries in retryOptions', () => { const maxRetries = 1; const storage = new Storage({ @@ -299,18 +267,6 @@ describe('Storage', () => { assert.strictEqual(calledWith.retryOptions.maxRetries, maxRetries); }); - it('should throw if maxRetries is defined twice', () => { - const maxRetries = 10; - assert.throws(() => { - new Storage({ - projectId: PROJECT_ID, - retryOptions: {maxRetries}, - maxRetries, - }), - StorageExceptionMessages.MAX_RETRIES_DEPRECATED; - }); - }); - it('should set retryFunction', () => { const storage = new Storage({ projectId: PROJECT_ID, @@ -442,6 +398,20 @@ describe('Storage', () => { assert.strictEqual(calledWith.apiEndpoint, 'https://some.fake.endpoint'); }); + it('should accept a `crc32cGenerator`', () => { + const crc32cGenerator = () => {}; + + const storage = new Storage({crc32cGenerator}); + assert.strictEqual(storage.crc32cGenerator, crc32cGenerator); + }); + + it('should use `CRC32C_DEFAULT_VALIDATOR_GENERATOR` by default', () => { + assert.strictEqual( + storage.crc32cGenerator, + CRC32C_DEFAULT_VALIDATOR_GENERATOR + ); + }); + describe('STORAGE_EMULATOR_HOST', () => { // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. const EMULATOR_HOST = 'https://internal.benchmark.com/path'; diff --git a/test/nodejs-common/util.ts b/test/nodejs-common/util.ts index 113cb853c..c5a802c06 100644 --- a/test/nodejs-common/util.ts +++ b/test/nodejs-common/util.ts @@ -1331,20 +1331,6 @@ describe('common/util', () => { }; } - const retryOptionsTwoMaxRetries = { - retryOptions: { - maxRetries: 7, - }, - maxRetries: 7, - }; - - const retryOptionsTwoAutoRetry = { - retryOptions: { - autoRetry: false, - }, - autoRetry: false, - }; - const retryOptionsConfig = { retryOptions: { autoRetry: false, @@ -1550,20 +1536,6 @@ describe('common/util', () => { util.makeRequest(reqOpts, retryOptionsConfig, assert.ifError); }); - it('should throw if autoRetry is specified twice', done => { - assert.throws(() => { - util.makeRequest(reqOpts, retryOptionsTwoAutoRetry, util.noop); - }, /autoRetry is deprecated. Use retryOptions.autoRetry instead\./); - done(); - }); - - it('should throw if maxRetries is specified twice', done => { - assert.throws(() => { - util.makeRequest(reqOpts, retryOptionsTwoMaxRetries, util.noop); - }, /maxRetries is deprecated. Use retryOptions.maxRetries instead\./); - done(); - }); - it('should allow request options to control retry setting', done => { retryRequestOverride = testCustomRetryRequestConfig(done); const reqOptsWithRetrySettings = extend( diff --git a/test/gcs-resumable-upload.ts b/test/resumable-upload.ts similarity index 90% rename from test/gcs-resumable-upload.ts rename to test/resumable-upload.ts index 40fbe3dfc..abff21397 100644 --- a/test/gcs-resumable-upload.ts +++ b/test/resumable-upload.ts @@ -33,7 +33,7 @@ import { ApiError, CreateUriCallback, PROTOCOL_REGEX, -} from '../src/gcs-resumable-upload'; +} from '../src/resumable-upload'; import {GaxiosOptions, GaxiosError, GaxiosResponse} from 'gaxios'; nock.disableNetConnect(); @@ -46,23 +46,6 @@ class AbortController { } } -let configData = {} as {[index: string]: {}}; -class ConfigStore { - constructor(packageName: string, defaults: object, config: object) { - this.set('packageName', packageName); - this.set('config', config); - } - delete(key: string) { - delete configData[key]; - } - get(key: string) { - return configData[key]; - } - set(key: string, value: {}) { - configData[key] = value; - } -} - const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; /** 256 KiB */ const CHUNK_SIZE_MULTIPLE = 2 ** 18; @@ -81,7 +64,7 @@ function mockAuthorizeRequest( .reply(code, data); } -describe('gcs-resumable-upload', () => { +describe('resumable-upload', () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any let upload: any; // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -111,13 +94,11 @@ describe('gcs-resumable-upload', () => { before(() => { mockery.registerMock('abort-controller', {default: AbortController}); - mockery.registerMock('configstore', ConfigStore); mockery.enable({useCleanCache: true, warnOnUnregistered: false}); - upload = require('../src/gcs-resumable-upload').upload; + upload = require('../src/resumable-upload').upload; }); beforeEach(() => { - configData = {}; REQ_OPTS = {url: 'http://fake.local'}; up = upload({ bucket: BUCKET, @@ -318,22 +299,6 @@ describe('gcs-resumable-upload', () => { assert.strictEqual(up.predefinedAcl, 'private'); }); - it('should create a ConfigStore instance', () => { - assert.strictEqual(configData.packageName, 'gcs-resumable-upload'); - }); - - it('should set the configPath', () => { - const configPath = '/custom/config/path'; - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const up = upload({ - bucket: BUCKET, - file: FILE, - configPath, - retryOptions: RETRY_OPTIONS, - }); - assert.deepStrictEqual(configData.config, {configPath}); - }); - it('should set numBytesWritten to 0', () => { assert.strictEqual(up.numBytesWritten, 0); }); @@ -361,7 +326,7 @@ describe('gcs-resumable-upload', () => { assert.strictEqual(up.contentLength, '*'); }); - it('should localize the uri or get one from config', () => { + it('should localize the uri', () => { const uri = 'http://www.blah.com/'; const upWithUri = upload({ bucket: BUCKET, @@ -371,15 +336,6 @@ describe('gcs-resumable-upload', () => { }); assert.strictEqual(upWithUri.uriProvidedManually, true); assert.strictEqual(upWithUri.uri, uri); - - configData[`${BUCKET}/${FILE}`] = {uri: 'fake-uri'}; - const up = upload({ - bucket: BUCKET, - file: FILE, - retryOptions: RETRY_OPTIONS, - }); - assert.strictEqual(up.uriProvidedManually, false); - assert.strictEqual(up.uri, 'fake-uri'); }); it('should not have `chunkSize` by default', () => { @@ -480,18 +436,6 @@ describe('gcs-resumable-upload', () => { }; up.emit('writing'); }); - - it('should save the uri to config on first write event', done => { - const uri = 'http://newly-created-uri'; - up.createURI = (callback: CreateUriCallback) => { - callback(null, uri); - }; - up.set = (props: {}) => { - assert.deepStrictEqual(props, {uri}); - done(); - }; - up.emit('writing'); - }); }); }); @@ -1292,12 +1236,6 @@ describe('gcs-resumable-upload', () => { up.responseHandler(RESP); }); - it('should delete the config', done => { - const RESP = {data: '', status: 200}; - up.deleteConfig = done; - up.responseHandler(RESP); - }); - it('should continue with multi-chunk upload when incomplete', done => { const lastByteReceived = 9; @@ -1387,89 +1325,25 @@ describe('gcs-resumable-upload', () => { }); }); - describe('#ensureUploadingSameObject', () => { - let chunk = Buffer.alloc(0); - - beforeEach(() => { - chunk = crypto.randomBytes(512); - up.upstreamChunkBuffer = chunk; - }); - - it('should not alter the chunk buffer', async () => { - await up.ensureUploadingSameObject(); - - assert.equal(Buffer.compare(up.upstreamChunkBuffer, chunk), 0); - }); - - describe('first write', () => { - it('should get the first chunk', async () => { - let calledGet = false; - up.get = (prop: string) => { - assert.strictEqual(prop, 'firstChunk'); - calledGet = true; - }; - - const result = await up.ensureUploadingSameObject(); - - assert(result); - assert(calledGet); - }); - - describe('new upload', () => { - it('should save the uri and first chunk (16 bytes) if its not cached', done => { - const URI = 'uri'; - up.uri = URI; - up.get = () => {}; - up.set = (props: {uri?: string; firstChunk: Buffer}) => { - const firstChunk = chunk.slice(0, 16); - assert.deepStrictEqual(props.uri, URI); - assert.strictEqual(Buffer.compare(props.firstChunk, firstChunk), 0); - done(); - }; - up.ensureUploadingSameObject(); - }); - }); - - describe('continued upload', () => { - beforeEach(() => { - up.restart = () => {}; - }); - - it('should not `#restart` and return `true` if cache is the same', async () => { - up.upstreamChunkBuffer = Buffer.alloc(512, 'a'); - up.get = (param: string) => { - return param === 'firstChunk' ? Buffer.alloc(16, 'a') : undefined; - }; - - let calledRestart = false; - up.restart = () => { - calledRestart = true; - }; - - const result = await up.ensureUploadingSameObject(); - - assert(result); - assert.equal(calledRestart, false); - }); - - it('should `#restart` and return `false` if different', async () => { - up.upstreamChunkBuffer = Buffer.alloc(512, 'a'); - up.get = (param: string) => { - return param === 'firstChunk' ? Buffer.alloc(16, 'b') : undefined; - }; - - let calledRestart = false; - up.restart = () => { - calledRestart = true; - }; - - const result = await up.ensureUploadingSameObject(); + it('currentInvocationId.offset should be different after success', async () => { + const beforeCallInvocationId = up.currentInvocationId.offset; + up.makeRequest = () => { + return {}; + }; + await up.getAndSetOffset(); + assert.notEqual(beforeCallInvocationId, up.currentInvocationId.offset); + }); - assert(calledRestart); - assert.equal(result, false); - }); - }); - }); + it('currentInvocationId.offset should be the same on error', async done => { + const beforeCallInvocationId = up.currentInvocationId.offset; + up.destroy = () => { + assert.equal(beforeCallInvocationId, up.currentInvocationId.offset); + done(); + }; + up.makeRequest = () => { + throw new Error() as GaxiosError; + }; + await up.getAndSetOffset(); }); describe('#getAndSetOffset', () => { @@ -1494,71 +1368,6 @@ describe('gcs-resumable-upload', () => { up.getAndSetOffset(); }); - it('currentInvocationId.offset should be different after success', async () => { - const beforeCallInvocationId = up.currentInvocationId.offset; - up.makeRequest = () => { - return {}; - }; - await up.getAndSetOffset(); - assert.notEqual(beforeCallInvocationId, up.currentInvocationId.offset); - }); - - it('currentInvocationId.offset should be the same on error', async done => { - const beforeCallInvocationId = up.currentInvocationId.offset; - up.destroy = () => { - assert.equal(beforeCallInvocationId, up.currentInvocationId.offset); - done(); - }; - up.makeRequest = () => { - throw new Error() as GaxiosError; - }; - await up.getAndSetOffset(); - }); - - describe('restart on 404', () => { - const RESP = {status: 404} as GaxiosResponse; - const ERROR = new Error(':(') as GaxiosError; - ERROR.response = RESP; - - beforeEach(() => { - up.makeRequest = async () => { - throw ERROR; - }; - }); - - it('should restart the upload', done => { - up.restart = done; - up.getAndSetOffset(); - }); - - it('should not restart if URI provided manually', done => { - up.uriProvidedManually = true; - up.restart = done; // will cause test to fail - up.on('error', (err: Error) => { - assert.strictEqual(err, ERROR); - done(); - }); - up.getAndSetOffset(); - }); - }); - - describe('restart on 410', () => { - const ERROR = new Error(':(') as GaxiosError; - const RESP = {status: 410} as GaxiosResponse; - ERROR.response = RESP; - - beforeEach(() => { - up.makeRequest = async () => { - throw ERROR; - }; - }); - - it('should restart the upload', done => { - up.restart = done; - up.getAndSetOffset(); - }); - }); - it('should set the offset from the range', async () => { up.makeRequest = async () => RESP; await up.getAndSetOffset(); @@ -1888,11 +1697,6 @@ describe('gcs-resumable-upload', () => { up.restart(); }); - it('should delete the config', done => { - up.deleteConfig = done; - up.restart(); - }); - describe('starting a new upload', () => { it('should create a new URI', done => { up.createURI = () => { @@ -1917,21 +1721,6 @@ describe('gcs-resumable-upload', () => { up.restart(); }); - it('should save the uri to config when restarting', done => { - const uri = 'http://newly-created-uri'; - - up.createURI = (callback: Function) => { - callback(null, uri); - }; - - up.set = (props: {}) => { - assert.deepStrictEqual(props, {uri}); - done(); - }; - - up.restart(); - }); - it('should start uploading', done => { up.createURI = (callback: Function) => { up.startUploading = done; @@ -1942,51 +1731,6 @@ describe('gcs-resumable-upload', () => { }); }); - describe('#get', () => { - it('should return the value from the config store', () => { - const prop = 'property'; - const value = 'abc'; - up.configStore = { - get(name: string) { - assert.strictEqual(name, up.cacheKey); - const obj: {[i: string]: string} = {}; - obj[prop] = value; - return obj; - }, - }; - assert.strictEqual(up.get(prop), value); - }); - }); - - describe('#set', () => { - it('should set the value to the config store', done => { - const props = {setting: true}; - up.configStore = { - set(name: string, prps: {}) { - assert.strictEqual(name, up.cacheKey); - assert.strictEqual(prps, props); - done(); - }, - }; - up.set(props); - }); - }); - - describe('#deleteConfig', () => { - it('should delete the entry from the config store', done => { - const props = {setting: true}; - - up.configStore = { - delete(name: string) { - assert.strictEqual(name, up.cacheKey); - done(); - }, - }; - - up.deleteConfig(props); - }); - }); - describe('#onResponse', () => { beforeEach(() => { up.numRetries = 0; diff --git a/test/signer.ts b/test/signer.ts index 9025ae31b..a2b9868dd 100644 --- a/test/signer.ts +++ b/test/signer.ts @@ -30,6 +30,17 @@ import { } from '../src/signer'; import {encodeURI, formatAsUTCISO, qsStringify} from '../src/util'; import {ExceptionMessages} from '../src/storage'; +import {OutgoingHttpHeaders} from 'http'; + +interface SignedUrlArgs { + bucket: string; + method: 'GET' | 'POST' | 'PUT' | 'DELETE'; + contentMd5?: string; + contentType?: string; + extensionHeaders?: OutgoingHttpHeaders; + expiration?: number; + file: string; +} describe('signer', () => { const BUCKET_NAME = 'bucket-name'; @@ -119,7 +130,7 @@ describe('signer', () => { await signer.getSignedUrl(CONFIG); assert(v2.calledOnce); - const v2arg = v2.getCall(0).args[0]; + const v2arg = v2.getCall(0).args[0] as SignedUrlArgs; assert.strictEqual(v2arg.bucket, bucket.name); assert.strictEqual(v2arg.method, CONFIG.method); assert.strictEqual(v2arg.contentMd5, CONFIG.contentMd5); @@ -147,7 +158,7 @@ describe('signer', () => { await signer.getSignedUrl(CONFIG); assert(v4.calledOnce); - const v4arg = v4.getCall(0).args[0]; + const v4arg = v4.getCall(0).args[0] as SignedUrlArgs; assert.strictEqual(v4arg.bucket, bucket.name); assert.strictEqual(v4arg.method, CONFIG.method); assert.strictEqual(v4arg.contentMd5, CONFIG.contentMd5); @@ -273,7 +284,10 @@ describe('signer', () => { assert(parseExpires.calledOnceWith(CONFIG.expires)); const expiresInSeconds = parseExpires.getCall(0).lastArg; - assert(v2.getCall(0).args[0].expiration, expiresInSeconds); + assert( + (v2.getCall(0).args[0] as SignedUrlArgs).expiration, + expiresInSeconds + ); }); }); @@ -369,7 +383,7 @@ describe('signer', () => { .resolves({}); await signer.getSignedUrl(CONFIG); - const v2arg = v2.getCall(0).args[0]; + const v2arg = v2.getCall(0).args[0] as SignedUrlArgs; assert.strictEqual(v2arg.file, encoded); assert(signedUrl.includes(encoded)); }); diff --git a/tsconfig.json b/tsconfig.json index dac360ec8..f010f8b3d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -10,6 +10,7 @@ "test/**/*.ts", "system-test/*.ts", "conformance-test/*.ts", - "conformance-test/scenarios/*.ts" + "conformance-test/scenarios/*.ts", + "internal-tooling/*.ts" ] }