tensorflow / nmt

TensorFlow Neural Machine Translation Tutorial
Apache License 2.0
6.37k stars 1.96k forks source link

Serving client needed #297

Closed ptamas88 closed 6 years ago

ptamas88 commented 6 years ago

Hello everyone? Can someone pls show a working client.py code? Or can you help me solve this issue? Here is the code i used for freezing:

config = tf.ConfigProto(allow_soft_placement=True)
    sess = tf.Session(config=config)
    saver = tf.train.import_meta_graph(os.path.join(path, "translate.ckpt-2.meta"))
    latest_ckpt = tf.train.latest_checkpoint(path)
    saver.restore(sess, latest_ckpt)
    builder = tf.saved_model.builder.SavedModelBuilder(export_path)
    feature_configs = {
        'x': tf.FixedLenFeature(shape=[1], dtype=tf.string),
        'y': tf.FixedLenFeature(shape=[1], dtype=tf.string)
    }
    serialized_example = tf.placeholder(tf.string, name="tf_example")
    tf_example = tf.parse_example(serialized_example, feature_configs)
    x = tf.identity(tf_example['x'], name='x')
    y = tf.identity(tf_example['y'], name='y')
    predict_input = x
    predict_output = y
    predict_signature_def_map = tf.saved_model.signature_def_utils.predict_signature_def(
        inputs={
            tf.saved_model.signature_constants.PREDICT_INPUTS: predict_input
        },
        outputs={
            tf.saved_model.signature_constants.PREDICT_OUTPUTS: predict_output
        }
    )
    legacy_init_op = tf.group(tf.tables_initializer(), name="legacy_init_op")
    builder.add_meta_graph_and_variables(
        sess=sess,
        tags=[tf.saved_model.tag_constants.SERVING],
        signature_def_map={
            "serving_default": predict_signature_def_map
        },
        legacy_init_op=legacy_init_op,
        assets_collection=None,
        clear_devices=True
    )
    builder.save()

After this i srtarted the serving with the following command: tensorflow_model_server --port=9000 --model_name=saved_model --model_base_path=path_to_models_folder

Then i tried the following client code, which i exported from a working nmt model:

""Example of a translation client."""

from __future__ import print_function

import argparse

import tensorflow as tf

from grpc.beta import implementations

from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import prediction_service_pb2

def parse_translation_result(result):
  """Parses a translation result.

  Args:
    result: A `PredictResponse` proto.

  Returns:
    A list of tokens.
  """
  lengths = tf.make_ndarray(result.outputs["length"])[0]
  hypotheses = tf.make_ndarray(result.outputs["tokens"])[0]

  best_hypothesis = hypotheses[0]
  best_length = lengths[0]

  return best_hypothesis[0:best_length - 1] # Ignore </s>

def translate(stub, model_name, tokens, timeout=5.0):
  """Translates a sequence of tokens.

  Args:
    stub: The prediction service stub.
    model_name: The model to request.
    tokens: A list of tokens.
    timeout: Timeout after this many seconds.

  Returns:
    A future.
  """
  length = len(tokens)

  data = "hello"
  request2 = predict_pb2.PredictRequest()
  request2.model_spec.name = model_name
  request2.inputs["inputs"].CopyFrom(
      tf.contrib.util.make_tensor_proto(data, shape=(1,1)))

  #return stub.Predict.future(request, timeout)
  return stub.Predict.future(request,timeout)

def main():
  parser = argparse.ArgumentParser(description="Translation client example")
  parser.add_argument("--model_name", required=True,
                      help="model name")
  parser.add_argument("--host", default="localhost",
                      help="model server host")
  parser.add_argument("--port", type=int, default=9000,
                      help="model server port")
  parser.add_argument("--timeout", type=float, default=10.0,
                      help="request timeout")
  args = parser.parse_args()

  channel = implementations.insecure_channel(args.host, args.port)
  stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)

  batch_tokens = [
      ["Hello", "world", "!"],
      ["My", "name", "is", "John", "."],
      ["I", "live", "on", "the", "West", "coast", "."]]

  futures = []
  for tokens in batch_tokens:
    future = translate(stub, args.model_name, tokens, timeout=args.timeout)
    futures.append(future)

  for tokens, future in zip(batch_tokens, futures):
    xy= future.result()
    result = parse_translation_result(xy)
    print("{} ||| {}".format(" ".join(tokens), " ".join(result)))

if __name__ == "__main__":
  main()

and i am getting the following error:

```

raise _abortion_error(rpc_error_call) grpc.framework.interfaces.face.face.AbortionError: AbortionError(code=StatusCode.INVALID_ARGUMENT, details="You must feed a value for placeholder tensor 'tf_example' with dtype string [[Node: tf_example = Placeholder[_output_shapes=[], dtype=DT_STRING, shape=, _device="/job:localhost/replica:0/task:0/device:CPU:0"]()]]")



Thanks
luozhouyang commented 6 years ago

@ptamas88 #344

ptamas88 commented 6 years ago

thank you very much, i will give it a try