Skip to content

Commit

Permalink
fix: do not add BOS (with last llamacpp-python version)
Browse files Browse the repository at this point in the history
  • Loading branch information
jaluma committed Jul 30, 2024
1 parent 452a045 commit 8699644
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 7 deletions.
7 changes: 3 additions & 4 deletions private_gpt/components/llm/prompt_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ class Llama3PromptStyle(AbstractPromptStyle):
"""

def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str:
prompt = self.BOS
prompt = ""
has_system_message = False

for i, message in enumerate(messages):
Expand All @@ -189,8 +189,7 @@ def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str:
# Add default system prompt if no system message was provided
if not has_system_message:
prompt = (
f"{self.BOS}{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}"
+ prompt[len(self.BOS) :]
f"{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}" + prompt
)

# TODO: Implement tool handling logic
Expand All @@ -199,7 +198,7 @@ def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str:

def _completion_to_prompt(self, completion: str) -> str:
return (
f"{self.BOS}{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}"
f"{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}"
f"{self.B_INST}user{self.E_INST}\n\n{completion.strip()}{self.EOT}"
f"{self.ASSISTANT_INST}\n\n"
)
Expand Down
6 changes: 3 additions & 3 deletions tests/test_prompt_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def test_llama3_prompt_style_format():
]

expected_prompt = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
"<|start_header_id|>system<|end_header_id|>\n\n"
"You are a helpful assistant<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\n"
"Hello, how are you doing?<|eot_id|>"
Expand All @@ -166,7 +166,7 @@ def test_llama3_prompt_style_with_default_system():
ChatMessage(content="Hello!", role=MessageRole.USER),
]
expected = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
"<|start_header_id|>system<|end_header_id|>\n\n"
f"{prompt_style.DEFAULT_SYSTEM_PROMPT}<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\nHello!<|eot_id|>"
"<|start_header_id|>assistant<|end_header_id|>\n\n"
Expand All @@ -185,7 +185,7 @@ def test_llama3_prompt_style_with_assistant_response():
]

expected_prompt = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
"<|start_header_id|>system<|end_header_id|>\n\n"
"You are a helpful assistant<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\n"
"What is the capital of France?<|eot_id|>"
Expand Down

0 comments on commit 8699644

Please sign in to comment.