Skip to content

Commit

Permalink
Revert "Pull request opts into a separate function"
Browse files Browse the repository at this point in the history
This reverts commit c2f4dfe.
  • Loading branch information
danieljbruce committed Jul 4, 2022
1 parent c2f4dfe commit 4817863
Showing 1 changed file with 29 additions and 34 deletions.
63 changes: 29 additions & 34 deletions src/table.ts
Original file line number Diff line number Diff line change
Expand Up @@ -723,6 +723,7 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`);
const maxRetries = is.number(this.maxRetries) ? this.maxRetries! : 3;
let activeRequestStream: AbortableDuplex | null;
let rowKeys: string[];
let filter: {} | null;
const rowsLimit = options.limit || 0;
const hasLimit = rowsLimit !== 0;
let rowsRead = 0;
Expand All @@ -740,6 +741,10 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`);
ranges.push({});
}

if (options.filter) {
filter = Filter.parse(options.filter);
}

const userStream = new PassThrough({objectMode: true});
const end = userStream.end.bind(userStream);
userStream.end = () => {
Expand All @@ -765,6 +770,11 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chunkTransformer = new ChunkTransformer({decode: options.decode} as any);

const reqOpts = {
tableName: this.name,
appProfileId: this.bigtable.appProfileId,
} as google.bigtable.v2.IReadRowsRequest;

const retryOpts = {
currentRetryAttempt: numConsecutiveErrors,
// Handling retries in this client. Specify the retry options to
Expand Down Expand Up @@ -793,7 +803,25 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`);
}
}

const reqOpts: any = this.readRowsReqOpts(ranges, rowKeys, options);
// Create the new reqOpts
reqOpts.rows = {};

// TODO: preprocess all the keys and ranges to Bytes
reqOpts.rows.rowKeys = rowKeys.map(
Mutation.convertToBytes
) as {} as Uint8Array[];

reqOpts.rows.rowRanges = ranges.map(range =>
Filter.createRange(
range.start as BoundData,
range.end as BoundData,
'Key'
)
);

if (filter) {
reqOpts.filter = filter;
}

if (hasLimit) {
reqOpts.rowsLimit = rowsLimit - rowsRead;
Expand Down Expand Up @@ -1545,39 +1573,6 @@ Please use the format 'prezzy' or '${instance.name}/tables/prezzy'.`);
makeNextBatchRequest();
}

private readRowsReqOpts(
ranges: PrefixRange[],
rowKeys: string[],
options: any
) {
const reqOpts = {
tableName: this.name,
appProfileId: this.bigtable.appProfileId,
} as google.bigtable.v2.IReadRowsRequest;

// Create the new reqOpts
reqOpts.rows = {};

// TODO: preprocess all the keys and ranges to Bytes
reqOpts.rows.rowKeys = rowKeys.map(
Mutation.convertToBytes
) as {} as Uint8Array[];

reqOpts.rows.rowRanges = ranges.map(range =>
Filter.createRange(
range.start as BoundData,
range.end as BoundData,
'Key'
)
);

const filter = options.filter;
if (filter) {
reqOpts.filter = Filter.parse(filter);
}
return reqOpts;
}

/**
* Get a reference to a table row.
*
Expand Down

0 comments on commit 4817863

Please sign in to comment.