Skip to content

Commit e6cf7a9

Browse files
committed
feat: valide request params for MistralTokenizer process
Signed-off-by: Guillaume Calmettes <[email protected]>
1 parent 98d01d3 commit e6cf7a9

File tree

3 files changed

+13
-3
lines changed

3 files changed

+13
-3
lines changed

vllm/entrypoints/openai/serving_chat.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,8 @@
3939
from vllm.sequence import Logprob
4040
from vllm.transformers_utils.tokenizer import AnyTokenizer, MistralTokenizer
4141
from vllm.transformers_utils.tokenizers import (maybe_serialize_tool_calls,
42-
truncate_tool_call_ids)
42+
truncate_tool_call_ids,
43+
validate_request_params)
4344

4445
logger = init_logger(__name__)
4546

@@ -159,6 +160,7 @@ async def create_chat_completion(
159160
# for more info: see comment in `maybe_serialize_tool_calls`
160161
maybe_serialize_tool_calls(request)
161162
truncate_tool_call_ids(request)
163+
validate_request_params(request)
162164

163165
if (request.tool_choice == "auto" and
164166
not (self.enable_auto_tools and tool_parser is not None)
Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
# SPDX-License-Identifier: Apache-2.0
22

33
from .mistral import (MistralTokenizer, maybe_serialize_tool_calls,
4-
truncate_tool_call_ids)
4+
truncate_tool_call_ids, validate_request_params)
55

66
__all__ = [
7-
"MistralTokenizer", "maybe_serialize_tool_calls", "truncate_tool_call_ids"
7+
"MistralTokenizer", "maybe_serialize_tool_calls", "truncate_tool_call_ids",
8+
"validate_request_params"
89
]

vllm/transformers_utils/tokenizers/mistral.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,13 @@ def truncate_tool_call_ids(request: "ChatCompletionRequest"):
9898
request.messages[i]["tool_call_id"] = tool_call_id
9999

100100

101+
def validate_request_params(request: "ChatCompletionRequest"):
102+
if (request.skip_special_tokens is not None
103+
and not request.skip_special_tokens):
104+
raise ValueError("skip_special_tokens=False is not supported "
105+
"for Mistral tokenizers.")
106+
107+
101108
def list_local_repo_files(repo_id: str, revision: Optional[str]) -> List[str]:
102109
repo_cache = os.path.join(
103110
huggingface_hub.constants.HF_HUB_CACHE,

0 commit comments

Comments
 (0)