Skip to content

Commit d9b7963

Browse files
Update tensorrt_llm/llmapi/llm_args.py
Co-authored-by: pcastonguay <[email protected]> Signed-off-by: Shunkangz <[email protected]>
1 parent 7b6e2c3 commit d9b7963

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

tensorrt_llm/llmapi/llm_args.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2101,8 +2101,7 @@ class TorchLlmArgs(BaseLlmArgs):
21012101
batch_wait_timeout_ms: float = Field(
21022102
default=0,
21032103
description=
2104-
"If greater than 0, returns immediately when fetched requests exceed max_batch_size; "
2105-
"otherwise, waits up to batch_wait_timeout_ms to gather more. If 0, no waiting occurs.",
2104+
"If greater than 0, the request queue might wait up to batch_wait_timeout_ms to receive max_batch_size requests, if fewer than max_batch_size requests are currently available. If 0, no waiting occurs.",
21062105
status="prototype")
21072106

21082107
torch_compile_config: Optional[TorchCompileConfig] = Field(

0 commit comments

Comments
 (0)