Skip to content

Commit bb33483

Browse files
Update tensorrt_llm/llmapi/llm_args.py
Co-authored-by: pcastonguay <[email protected]> Signed-off-by: Shunkangz <[email protected]>
1 parent 8d6a1d1 commit bb33483

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

tensorrt_llm/llmapi/llm_args.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2079,8 +2079,7 @@ class TorchLlmArgs(BaseLlmArgs):
20792079
batch_wait_timeout_ms: float = Field(
20802080
default=0,
20812081
description=
2082-
"If greater than 0, returns immediately when fetched requests exceed max_batch_size; "
2083-
"otherwise, waits up to batch_wait_timeout_ms to gather more. If 0, no waiting occurs.",
2082+
"If greater than 0, the request queue might wait up to batch_wait_timeout_ms to receive max_batch_size requests, if fewer than max_batch_size requests are currently available. If 0, no waiting occurs.",
20842083
status="prototype")
20852084

20862085
torch_compile_config: Optional[TorchCompileConfig] = Field(

0 commit comments

Comments
 (0)