Skip to content

Commit ef65fc5

Browse files
authored
Add MistralLite, Intel, and OpenChat prompt formats (ggml-org#927)
* Add MistralLite format * Update llama_chat_format.py * Update llama_chat_format.py
1 parent 9d7c830 commit ef65fc5

File tree

1 file changed

+43
-0
lines changed

1 file changed

+43
-0
lines changed

llama_cpp/llama_chat_format.py

+43
Original file line numberDiff line numberDiff line change
@@ -529,6 +529,19 @@ def format_phind(
529529
_prompt = _format_add_colon_single(_system_message, _messages, _sep)
530530
return ChatFormatterResponse(prompt=_prompt)
531531

532+
@register_chat_format("intel")
533+
def format_intel(
534+
messages: List[llama_types.ChatCompletionRequestMessage],
535+
**kwargs: Any,
536+
) -> ChatFormatterResponse:
537+
_roles = dict(user="### User:", assistant="### Assistant:")
538+
_sep = "\n"
539+
_system_message = "### System:\n{system_message}"
540+
_messages = _map_roles(messages, _roles)
541+
_messages.append((_roles["assistant"], None))
542+
_prompt = _format_add_colon_single(_system_message, _messages, _sep)
543+
return ChatFormatterResponse(prompt=_prompt)
544+
532545

533546
@register_chat_format("open-orca")
534547
def format_open_orca(
@@ -557,6 +570,21 @@ def format_open_orca(
557570
return ChatFormatterResponse(prompt=_prompt, stop=stop_str)
558571

559572

573+
@register_chat_format("mistrallite")
574+
def format_mistrallite(
575+
messages: List[llama_types.ChatCompletionRequestMessage],
576+
**kwargs: Any,
577+
) -> ChatFormatterResponse:
578+
_roles = dict(user="<|prompter|>", assistant="</s>\n<|assistant|>")
579+
_sep = " "
580+
system_template = """<|system|>{system_message}</s>"""
581+
system_message = _get_system_message(messages)
582+
system_message = system_template.format(system_message=system_message)
583+
_messages = _map_roles(messages, _roles)
584+
_messages.append((_roles["assistant"], None))
585+
_prompt = _format_no_colon_single(system_message, _messages, _sep)
586+
return ChatFormatterResponse(prompt=_prompt)
587+
560588
@register_chat_format("chatml")
561589
def format_chatml(
562590
messages: List[llama_types.ChatCompletionRequestMessage],
@@ -573,6 +601,21 @@ def format_chatml(
573601
_prompt = _format_chatml(system_message, _messages, _sep)
574602
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
575603

604+
@register_chat_format("openchat")
605+
def format_openchat(
606+
messages: List[llama_types.ChatCompletionRequestMessage],
607+
**kwargs: Any,
608+
) -> ChatFormatterResponse:
609+
system_template = "{system_message}<|end_of_turn|>"
610+
system_message = _get_system_message(messages)
611+
system_message = system_template.format(system_message=system_message)
612+
_roles = dict(user="GPT4 Correct User: ", assistant="<|end_of_turn|>GPT4 Correct Assistant: ")
613+
_sep = "<|end_of_turn|>"
614+
_messages = _map_roles(messages, _roles)
615+
_messages.append((_roles["assistant"], None))
616+
_prompt = _format_chatml(system_message, _messages, _sep)
617+
return ChatFormatterResponse(prompt=_prompt, stop=_sep)
618+
576619

577620
@register_chat_completion_handler("functionary")
578621
def functionary_chat_handler(

0 commit comments

Comments
 (0)