|
44 | 44 | # yapf: enable |
45 | 45 | from vllm.transformers_utils.processor import cached_get_processor |
46 | 46 | from vllm.transformers_utils.tokenizer import AnyTokenizer, MistralTokenizer |
47 | | -from vllm.utils import random_uuid |
| 47 | +from vllm.utils import deprecate_kwargs, random_uuid |
48 | 48 |
|
49 | 49 | logger = init_logger(__name__) |
50 | 50 |
|
@@ -329,11 +329,17 @@ def resolve_mistral_chat_template( |
329 | 329 | "so it will be ignored.") |
330 | 330 | return None |
331 | 331 |
|
| 332 | +@deprecate_kwargs( |
| 333 | + "trust_remote_code", |
| 334 | + additional_message="Please use `model_config.trust_remote_code` instead.", |
| 335 | +) |
332 | 336 | def resolve_hf_chat_template( |
333 | | - model_config: ModelConfig, |
334 | 337 | tokenizer: Union[PreTrainedTokenizer, PreTrainedTokenizerFast], |
335 | 338 | chat_template: Optional[str], |
336 | 339 | tools: Optional[list[dict[str, Any]]], |
| 340 | + *, |
| 341 | + model_config: ModelConfig, |
| 342 | + trsut_remote_code: Optional[bool] = None, |
337 | 343 | ) -> Optional[str]: |
338 | 344 | # 1st priority: The given chat template |
339 | 345 | if chat_template is not None: |
@@ -379,18 +385,19 @@ def resolve_hf_chat_template( |
379 | 385 |
|
380 | 386 |
|
381 | 387 | def _resolve_chat_template_content_format( |
382 | | - model_config: ModelConfig, |
383 | 388 | chat_template: Optional[str], |
384 | 389 | tools: Optional[list[dict[str, Any]]], |
385 | 390 | given_format: ChatTemplateContentFormatOption, |
386 | 391 | tokenizer: AnyTokenizer, |
| 392 | + *, |
| 393 | + model_config: ModelConfig, |
387 | 394 | ) -> _ChatTemplateContentFormat: |
388 | 395 | if isinstance(tokenizer, (PreTrainedTokenizer, PreTrainedTokenizerFast)): |
389 | 396 | hf_chat_template = resolve_hf_chat_template( |
390 | | - model_config, |
391 | 397 | tokenizer, |
392 | 398 | chat_template=chat_template, |
393 | 399 | tools=tools, |
| 400 | + model_config=model_config, |
394 | 401 | ) |
395 | 402 | else: |
396 | 403 | hf_chat_template = None |
@@ -428,19 +435,25 @@ def _log_chat_template_content_format( |
428 | 435 | ) |
429 | 436 |
|
430 | 437 |
|
| 438 | +@deprecate_kwargs( |
| 439 | + "trust_remote_code", |
| 440 | + additional_message="Please use `model_config.trust_remote_code` instead.", |
| 441 | +) |
431 | 442 | def resolve_chat_template_content_format( |
432 | | - model_config: ModelConfig, |
433 | 443 | chat_template: Optional[str], |
434 | 444 | tools: Optional[list[dict[str, Any]]], |
435 | 445 | given_format: ChatTemplateContentFormatOption, |
436 | 446 | tokenizer: AnyTokenizer, |
| 447 | + *, |
| 448 | + model_config: ModelConfig, |
| 449 | + trust_remote_code: Optional[bool] = None, |
437 | 450 | ) -> _ChatTemplateContentFormat: |
438 | 451 | detected_format = _resolve_chat_template_content_format( |
439 | | - model_config, |
440 | 452 | chat_template, |
441 | 453 | tools, |
442 | 454 | given_format, |
443 | 455 | tokenizer, |
| 456 | + model_config=model_config, |
444 | 457 | ) |
445 | 458 |
|
446 | 459 | _log_chat_template_content_format( |
@@ -1191,21 +1204,27 @@ def parse_chat_messages_futures( |
1191 | 1204 | return conversation, mm_tracker.all_mm_data() |
1192 | 1205 |
|
1193 | 1206 |
|
| 1207 | +@deprecate_kwargs( |
| 1208 | + "trust_remote_code", |
| 1209 | + additional_message="Please use `model_config.trust_remote_code` instead.", |
| 1210 | +) |
1194 | 1211 | def apply_hf_chat_template( |
1195 | | - model_config: ModelConfig, |
1196 | 1212 | tokenizer: Union[PreTrainedTokenizer, PreTrainedTokenizerFast], |
1197 | 1213 | conversation: list[ConversationMessage], |
1198 | 1214 | chat_template: Optional[str], |
1199 | 1215 | tools: Optional[list[dict[str, Any]]], |
1200 | 1216 | *, |
| 1217 | + model_config: ModelConfig, |
1201 | 1218 | tokenize: bool = False, # Different from HF's default |
| 1219 | + # Deprecated, explicitly capture here so it doesn't slit into kwargs. |
| 1220 | + trust_remote_code: Optional[bool] = None, |
1202 | 1221 | **kwargs: Any, |
1203 | 1222 | ) -> str: |
1204 | 1223 | hf_chat_template = resolve_hf_chat_template( |
1205 | | - model_config, |
1206 | 1224 | tokenizer, |
1207 | 1225 | chat_template=chat_template, |
1208 | 1226 | tools=tools, |
| 1227 | + model_config=model_config, |
1209 | 1228 | ) |
1210 | 1229 |
|
1211 | 1230 | if hf_chat_template is None: |
|
0 commit comments