diff --git a/WORKSPACE b/WORKSPACE index bca96f2d386..92cc0330b5a 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -11,8 +11,8 @@ load("//tensorflow_serving:repo.bzl", "tensorflow_http_archive") tensorflow_http_archive( name = "org_tensorflow", - sha256 = "cbd96914936ce3aacc39e02c2efb711f937f8ebcda888c349eab075185d7c914", - git_commit = "d8fac4cb80eb0c42d2550bcb720a80d29fc5f22d", + sha256 = "876f800cf9605375c4a05daf44bc280bcb85ef5b302a9c01592a828c61647574", + git_commit = "694892d4c0fba6bd4322e943f8b1483b36f1ae99", ) # TensorFlow depends on "io_bazel_rules_closure" so we need this here. diff --git a/tensorflow_serving/apis/BUILD b/tensorflow_serving/apis/BUILD index 3da7d9de710..fb9c83ba68d 100644 --- a/tensorflow_serving/apis/BUILD +++ b/tensorflow_serving/apis/BUILD @@ -146,6 +146,20 @@ serving_proto_library( ], ) +serving_proto_library_py( + name = "prediction_log_proto_py_pb2", + srcs = ["prediction_log.proto"], + proto_library = "prediction_log_proto", + deps = [ + ":classification_proto_py_pb2", + ":inference_proto_py_pb2", + ":predict_proto_py_pb2", + ":regression_proto_py_pb2", + ":session_service_proto_py_pb2", + "//tensorflow_serving/core:logging_proto_py_pb2", + ], +) + serving_proto_library( name = "prediction_service_proto", srcs = ["prediction_service.proto"], @@ -351,6 +365,16 @@ serving_proto_library( ], ) +serving_proto_library_py( + name = "session_service_proto_py_pb2", + srcs = ["session_service.proto"], + proto_library = "session_service_proto", + deps = [ + ":model_proto_py_pb2", + "@org_tensorflow//tensorflow/core:protos_all_py", + ], +) + tf_pyclif_proto_library( name = "regression_pyclif", proto_lib = ":regression_proto", diff --git a/tensorflow_serving/config/BUILD b/tensorflow_serving/config/BUILD index cc33650d4be..84dfe5364f6 100644 --- a/tensorflow_serving/config/BUILD +++ b/tensorflow_serving/config/BUILD @@ -57,6 +57,14 @@ serving_proto_library( ], ) +serving_proto_library_py( + name = "log_collector_config_proto_py_pb2", + srcs = ["log_collector_config.proto"], + proto_library = "log_collector_config_proto", + deps = [ + ], +) + serving_proto_library( name = "logging_config_proto", srcs = ["logging_config.proto"], @@ -66,3 +74,12 @@ serving_proto_library( ":log_collector_config_proto", ], ) + +serving_proto_library_py( + name = "logging_config_proto_py_pb2", + srcs = ["logging_config.proto"], + proto_library = "logging_config_proto", + deps = [ + ":log_collector_config_proto_py_pb2", + ], +) diff --git a/tensorflow_serving/core/BUILD b/tensorflow_serving/core/BUILD index 4ec8fefcd1e..b11ab0895a4 100644 --- a/tensorflow_serving/core/BUILD +++ b/tensorflow_serving/core/BUILD @@ -733,6 +733,7 @@ cc_test( ) load("//tensorflow_serving:serving.bzl", "serving_proto_library") +load("//tensorflow_serving:serving.bzl", "serving_proto_library_py") serving_proto_library( name = "logging_proto", @@ -748,6 +749,19 @@ serving_proto_library( ], ) +serving_proto_library_py( + name = "logging_proto_py_pb2", + srcs = ["logging.proto"], + proto_library = "logging_proto", + visibility = [ + "//visibility:public", + ], + deps = [ + "//tensorflow_serving/apis:model_proto_py_pb2", + "//tensorflow_serving/config:logging_config_proto_py_pb2", + ], +) + cc_library( name = "request_logger", srcs = ["request_logger.cc"], diff --git a/tensorflow_serving/core/test_util/BUILD b/tensorflow_serving/core/test_util/BUILD index 082dd7f14ce..b74abc10ded 100644 --- a/tensorflow_serving/core/test_util/BUILD +++ b/tensorflow_serving/core/test_util/BUILD @@ -26,6 +26,7 @@ cc_library( testonly = 1, srcs = ["test_main.cc"], linkopts = ["-lm"], + tags = ["keep_dep"], # Tell build_cleaner to keep dependencies on this. deps = [ "@com_google_googletest//:gtest", "@org_tensorflow//tensorflow/core:testlib", diff --git a/tensorflow_serving/g3doc/api_rest.md b/tensorflow_serving/g3doc/api_rest.md new file mode 100644 index 00000000000..760f90c6af7 --- /dev/null +++ b/tensorflow_serving/g3doc/api_rest.md @@ -0,0 +1,290 @@ +# RESTful API + +In addition to [gRPC +APIs](https://github.com/tensorflow/serving/blob/master/tensorflow_serving/apis/prediction_service.proto) +TensorFlow ModelServer also supports RESTful APIs for classification, regression +and prediction on TensorFlow models. This page describes these API endpoints and +format of request/response involved in using them. + +TensorFlow ModelServer running on `host:port` accepts following REST API +requests: + +``` +POST http://host:port/: + +URI: /v1/models/${MODEL_NAME}[/versions/${MODEL_VERSION}] +VERB: classify|regress|predict +``` + +`/versions/${MODEL_VERSION}` is optional. If omitted the latest version is used. + +This API closely follows the gRPC version of +[`PredictionService`](https://github.com/tensorflow/serving/blob/5369880e9143aa00d586ee536c12b04e945a977c/tensorflow_serving/apis/prediction_service.proto#L15) +API. + +Examples of request URLs: + +``` +http://host:port/v1/models/iris:classify +http://host:port/v1/models/mnist/versions/314:predict +``` + +The request and response is a JSON object. The composition of this object +depends on the request type or verb. See the API specific sections below for +details. + +In case of error, all APIs will return a JSON object in the response body with +`error` as key and the error message as the value: + +```json +{ + "error": +} +``` + +## Classify and Regress API + +### Request format + +The request body for the `classify` and `regress` APIs must be a JSON object +formatted as follows: + +```json +{ + // Optional: serving signature to use. + // If unspecifed default serving signature is used. + "signature_name": , + + // Optional: Common context shared by all examples. + // Features that appear here MUST NOT appear in examples (below). + "context": { + "": | + "": | + }, + + // List of Example objects + "examples": [ + { + // Example 1 + "": |, + "": |, + ... + }, + { + // Example 2 + "": |, + "": |, + ... + } + ... + ] +} +``` + +`` is a JSON number (whole or decimal) or string, and `` is a list +of such values. See [Encoding binary values](#encoding-binary-values) section +below for details on how to represent a binary (stream of bytes) value. This +format is similar to gRPC's `ClassificationRequest` and `RegressionRequest` +protos. Both versions accept list of +[`Example`](https://github.com/tensorflow/tensorflow/blob/92e6c3e4f5c1cabfda1e61547a6a1b268ef95fa5/tensorflow/core/example/example.proto#L13) +objects. + +### Response format + +A `classify` request returns a JSON object in the response body, formatted as +follows: + +```json +{ + "result": [ + // List of class label/score pairs for first Example (in request) + [ [, ], [, ], ... ], + + // List of class label/score pairs for next Example (in request) + [ [, ], [, ], ... ], + ... + ] +} +``` + +`