Skip to content

Commit

Permalink
Allow max_execution_batch_size to be actually used by allowing
Browse files Browse the repository at this point in the history
max_batch_size to be more than max(allowed_batch_sizes). Also
sanity check if max_execution_batch_size is max(allowed_batch_sizes).

PiperOrigin-RevId: 435685377
  • Loading branch information
netfs authored and tensorflow-copybara committed Mar 18, 2022
1 parent a3ed383 commit 48a699a
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 6 deletions.
4 changes: 2 additions & 2 deletions tensorflow_serving/batching/batching_session.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1012,13 +1012,13 @@ Status CreateBasicBatchingSession(
max_allowed_batch_size, "; expected ",
schedule_options.max_batch_size);
}
if (schedule_options.max_batch_size != max_allowed_batch_size) {
if (schedule_options.max_execution_batch_size != max_allowed_batch_size) {
return errors::InvalidArgument(
"Last entry in allowed_batch_sizes must be equal to "
"max_execution_batch_size; last "
"entry was ",
max_allowed_batch_size, "; expected ",
schedule_options.max_batch_size);
schedule_options.max_execution_batch_size);
}
} else if (allowed_batch_sizes.back() != schedule_options.max_batch_size) {
// TODO(b/b/161641195):
Expand Down
15 changes: 11 additions & 4 deletions tensorflow_serving/batching/batching_session_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,11 @@ class BatchingSessionTest
output_options.enable_lazy_split = enable_lazy_split();
if (enable_large_batch_splitting()) {
output_options.split_input_task_func = get_split_input_task_func();
// Bump up the max batch size, and set execution batch size to the max
// size we actually want -- this will allow us to exercise large batch
// splits (they trigger when execution_batch_size < max_batch_size).
output_options.max_execution_batch_size = input_options.max_batch_size;
output_options.max_batch_size = input_options.max_batch_size * 2;
}
return output_options;
}
Expand Down Expand Up @@ -832,10 +836,13 @@ TEST_P(BatchingSessionTest,
BatchingSessionOptions batching_session_options;
batching_session_options.allowed_batch_sizes = {2, 8}; // Final entry != 4.
std::unique_ptr<Session> batching_session;
EXPECT_FALSE(CreateBasicBatchingSession(
schedule_options, batching_session_options, {{"x"}, {"y"}},
CreateHalfPlusTwoSession(), &batching_session)
.ok());
auto status = CreateBasicBatchingSession(
schedule_options, batching_session_options, {{"x"}, {"y"}},
CreateHalfPlusTwoSession(), &batching_session);
EXPECT_EQ(status.code(), error::INVALID_ARGUMENT);
EXPECT_THAT(status.error_message(), HasSubstr(enable_large_batch_splitting()
? "max_execution_batch_size"
: "max_batch_size"));
}

TEST_P(BatchingSessionTest, DifferentOrderForInputAndOutputTensors) {
Expand Down

0 comments on commit 48a699a

Please sign in to comment.