Skip to content

Commit

Permalink
Added system_prompt wrapper
Browse files Browse the repository at this point in the history
  • Loading branch information
NeonBohdan committed Jan 15, 2024
1 parent 00aad75 commit d1202a8
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions neon_llm_gemini/gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,8 @@ def _assemble_prompt(self, message: str, chat_history: List[List[str]], persona:
# Context N messages
messages = []
for role, content in chat_history[-self.context_depth:]:
if ((len(messages) == 0) and (role == "user")):
content = self._convert2instruction(content, system_prompt)
role_gemini = self.convert_role(role)
messages.append(Content(parts=[Part.from_text(content)], role = role_gemini))
prompt = {
Expand All @@ -151,6 +153,10 @@ def _assemble_prompt(self, message: str, chat_history: List[List[str]], persona:

return prompt

def _convert2instruction(self, content: str, system_prompt: str):
instruction = f"{system_prompt.strip()}\n\n{content.strip()}"
return instruction

def _score(self, prompt: str, targets: List[str], persona: dict) -> List[float]:
"""
Calculates logarithmic probabilities for the list of provided text sequences
Expand Down

0 comments on commit d1202a8

Please sign in to comment.