Skip to content

Commit

Permalink
Fix roles in chat interface
Browse files Browse the repository at this point in the history
  • Loading branch information
aresnow1 committed Jan 30, 2024
1 parent 5dad18a commit 39aa2a6
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 2 deletions.
9 changes: 8 additions & 1 deletion xinference/core/chat_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,16 @@ def flatten(matrix: List[List[str]]) -> List[str]:
return flat_list

def to_chat(lst: List[str]) -> List[ChatCompletionMessage]:
from ..model.llm import BUILTIN_LLM_PROMPT_STYLE

res = []
prompt_style = BUILTIN_LLM_PROMPT_STYLE.get(self.model_name)
if prompt_style is None:
roles = ["assistant", "user"]
else:
roles = prompt_style.roles
for i in range(len(lst)):
role = "assistant" if i % 2 == 1 else "user"
role = roles[0] if i % 2 == 1 else roles[1]
res.append(ChatCompletionMessage(role=role, content=lst[i]))
return res

Expand Down
2 changes: 2 additions & 0 deletions xinference/model/llm/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,8 @@ def to_dict(self):
"model_description": self._llm_family.model_description,
"model_format": self._llm_spec.model_format,
"model_size_in_billions": self._llm_spec.model_size_in_billions,
"model_family": self._llm_family.model_family
or self._llm_family.model_name,
"quantization": self._quantization,
"model_hub": self._llm_spec.model_hub,
"revision": self._llm_spec.model_revision,
Expand Down
2 changes: 1 addition & 1 deletion xinference/web/ui/src/scenes/running_models/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ const RunningModels = () => {
},
body: JSON.stringify({
model_type: row.model_type,
model_name: row.model_name,
model_name: row.model_family,
model_size_in_billions: row.model_size_in_billions,
model_format: row.model_format,
quantization: row.quantization,
Expand Down

0 comments on commit 39aa2a6

Please sign in to comment.