From e9f348b10b7b92b5def43dc8e97f7068f794991f Mon Sep 17 00:00:00 2001 From: "a.razghandi" Date: Wed, 21 Aug 2024 18:41:58 +0330 Subject: [PATCH] Fix the exiting bug in docker compose when using the scripts/launch_triton_server.py --- scripts/launch_triton_server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/launch_triton_server.py b/scripts/launch_triton_server.py index 8a9bc3e0..ae03cda8 100644 --- a/scripts/launch_triton_server.py +++ b/scripts/launch_triton_server.py @@ -159,4 +159,5 @@ def get_cmd(world_size, tritonserver, grpc_port, http_port, metrics_port, if args.multi_model: assert args.world_size == 1, 'World size must be 1 when using multi-model. Processes will be spawned automatically to run the multi-GPU models' env['TRTLLM_ORCHESTRATOR'] = '1' - subprocess.Popen(cmd, env=env) + child = subprocess.Popen(cmd, env=env) + child.communicate()