Closed leafjungle closed 2 months ago
I want to send a string , and the server reponse a string.
` from pytriton.model_config import Tensor, ModelConfig, DynamicBatcher from pytriton.triton import Triton, TritonConfig from pytriton.decorators import batch from pytriton.client import ModelClient, AsyncioModelClient, FuturesModelClient
import sys import os import numpy as np
MODEL_NAME = "HelloWorld"
class TisServer(object): def init(self): self.model_name = MODEL_NAME self.work_space = "/opt/dl/logs/triton_workspace" os.system("rm -rf %s" % self.work_space) self.triton = self.create_triton()
def run(self): self.triton.serve(monitoring_period_s=10) @batch def w_process_request_1(self, request_list=None): print("w_process_request_1, request_list: type: %s" % type(request_list)) print("w_process_request_1, request_list: content: %s" % str(request_list)) if isinstance(request_list, np.ndarray): req_list_list = request_list.tolist() else: req_list_list = request_list ret = [] for req_list in req_list_list: for req in req_list: ret.append("AAA") result = np.array([ret]) return [result] def create_triton(self): triton_config = TritonConfig( allow_grpc=True, grpc_port=8200, log_verbose=1, ) triton = Triton(config=triton_config, workspace=self.work_space) triton.bind( model_name=self.model_name, model_version=1, infer_func=self.w_process_request_1, inputs=[Tensor(name="request_list", shape=(-1,), dtype=np.bytes_)], outputs=[Tensor(name="resp", shape=(-1,), dtype=np.bytes_)], config=ModelConfig(max_batch_size=128), strict=True ) return triton def __del__(self): self.triton.stop()
class TisClient(object): def init(self): self.server_name = MODEL_NAME self.server_addr = "grpc://localhost:8200"
def run(self): client = FuturesModelClient(self.server_addr, self.server_name) inputs = ["aaa", "bbb", "ccc"] task_list = [] count = 0 while True: d2 = np.array(inputs, dtype=np.bytes_) task = client.infer_sample(d2) task_list.append(task) count += 1 if count > 10: break from concurrent.futures import wait, ALL_COMPLETED wait(fs=task_list, timeout=0, return_when=ALL_COMPLETED) for task in task_list: results = task.result() for resp_str in results["resp"].tolist(): print("resp: %s" % resp_str)
def __usage(): sys.stderr.write("python {n} server\n".format(n=sys.argv[0])) sys.stderr.write("python {n} client\n".format(n=sys.argv[0])) sys.exit(1)
def main(): if len(sys.argv) == 1: usage() cmd = sys.argv[1] if cmd == "server": server = TisServer() server.run() elif cmd == "client": client = TisClient() client.run() else: __usage()
if name == "main": __main() `
python demo.py server --> GOOD python demo.py client, But I got the error: _parser.HTTPParseError: ('Expected HTTP/', 8)
I want to send a string , and the server reponse a string.
` from pytriton.model_config import Tensor, ModelConfig, DynamicBatcher from pytriton.triton import Triton, TritonConfig from pytriton.decorators import batch from pytriton.client import ModelClient, AsyncioModelClient, FuturesModelClient
import sys import os import numpy as np
MODEL_NAME = "HelloWorld"
class TisServer(object): def init(self): self.model_name = MODEL_NAME self.work_space = "/opt/dl/logs/triton_workspace" os.system("rm -rf %s" % self.work_space) self.triton = self.create_triton()
class TisClient(object): def init(self): self.server_name = MODEL_NAME self.server_addr = "grpc://localhost:8200"
def __usage(): sys.stderr.write("python {n} server\n".format(n=sys.argv[0])) sys.stderr.write("python {n} client\n".format(n=sys.argv[0])) sys.exit(1)
def main(): if len(sys.argv) == 1: usage() cmd = sys.argv[1] if cmd == "server": server = TisServer() server.run() elif cmd == "client": client = TisClient() client.run() else: __usage()
if name == "main": __main() `
python demo.py server --> GOOD python demo.py client, But I got the error: _parser.HTTPParseError: ('Expected HTTP/', 8)