Skip to content

Commit

Permalink
Merge pull request #155 from Chainlit/clement/bump-version-09-12
Browse files Browse the repository at this point in the history
chore: bump version
  • Loading branch information
clementsirieix authored Dec 9, 2024
2 parents 87274ae + 2141421 commit cd3dc94
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 30 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ on:
workflow_dispatch:
pull_request:
branches: ["main"]
push:
branches:
- main

permissions:
contents: read
Expand Down
2 changes: 1 addition & 1 deletion examples/langchain_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
cb = lai.langchain_callback()

# Returns a langchain_openai.ChatOpenAI instance.
gpt_4o = init_chat_model(
gpt_4o = init_chat_model( # type: ignore
model_provider=prompt.provider,
**prompt.settings,
)
Expand Down
33 changes: 15 additions & 18 deletions literalai/instrumentation/llamaindex/event_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def extract_document_info(nodes: List[NodeWithScore]):


def build_message_dict(message: ChatMessage):
message_dict = {
message_dict: GenerationMessage = {
"role": convert_message_role(message.role),
"content": message.content,
}
Expand Down Expand Up @@ -144,8 +144,8 @@ def extract_query(x: Union[str, QueryBundle]):
class LiteralEventHandler(BaseEventHandler):
"""This class handles events coming from LlamaIndex."""

_client: "LiteralClient" = PrivateAttr(...)
_span_handler: "LiteralSpanHandler" = PrivateAttr(...)
_client: "LiteralClient" = PrivateAttr()
_span_handler: "LiteralSpanHandler" = PrivateAttr()
runs: Dict[str, List[Step]] = {}
streaming_run_ids: List[str] = []
_standalone_step_id: Optional[str] = None
Expand All @@ -163,21 +163,18 @@ def __init__(
object.__setattr__(self, "_client", literal_client)
object.__setattr__(self, "_span_handler", llama_index_span_handler)

def _convert_message(
self,
message: ChatMessage,
):
def _convert_message(self, message: ChatMessage):
tool_calls = message.additional_kwargs.get("tool_calls")
msg = GenerationMessage(
name=getattr(message, "name", None),
role=convert_message_role(message.role),
content="",
)

msg["content"] = message.content

if tool_calls:
msg["tool_calls"] = [tool_call.to_dict() for tool_call in tool_calls]
msg: GenerationMessage = {
"name": getattr(message, "name", None),
"role": convert_message_role(message.role),
"content": message.content,
"tool_calls": (
[tool_call.to_dict() for tool_call in tool_calls]
if tool_calls
else None
),
}

return msg

Expand Down Expand Up @@ -238,7 +235,7 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
thread_id=thread_id,
content=query,
)

# Retrieval wraps the Embedding step in LlamaIndex
if isinstance(event, RetrievalStartEvent):
run = self._client.start_step(
Expand Down
14 changes: 8 additions & 6 deletions literalai/observability/generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,16 @@ class BaseGeneration(Utils):
to_dict(self) -> Dict:
Converts the generation object to a dictionary.
"""

id: Optional[str] = None
prompt_id: Optional[str] = None
provider: Optional[str] = None
model: Optional[str] = None
error: Optional[str] = None
settings: Optional[Dict] = Field(default_factory=dict)
variables: Optional[Dict] = Field(default_factory=dict)
tags: Optional[List[str]] = Field(default_factory=list)
metadata: Optional[Dict] = Field(default_factory=dict)
settings: Optional[Dict] = Field(default_factory=lambda: {})
variables: Optional[Dict] = Field(default_factory=lambda: {})
tags: Optional[List[str]] = Field(default_factory=lambda: [])
metadata: Optional[Dict] = Field(default_factory=lambda: {})
tools: Optional[List[Dict]] = None
token_count: Optional[int] = None
input_token_count: Optional[int] = None
Expand Down Expand Up @@ -129,6 +130,7 @@ class CompletionGeneration(BaseGeneration, Utils):
completion (Optional[str]): The generated completion text.
type (GenerationType): The type of generation, which is set to GenerationType.COMPLETION.
"""

prompt: Optional[str] = None
completion: Optional[str] = None
type = GenerationType.COMPLETION
Expand Down Expand Up @@ -177,8 +179,9 @@ class ChatGeneration(BaseGeneration, Utils):
message_completion (Optional[GenerationMessage]): The completion message of the chat generation.
type (GenerationType): The type of generation, which is set to GenerationType.CHAT.
"""

type = GenerationType.CHAT
messages: Optional[List[GenerationMessage]] = Field(default_factory=list)
messages: Optional[List[GenerationMessage]] = Field(default_factory=lambda: [])
message_completion: Optional[GenerationMessage] = None

def to_dict(self):
Expand Down Expand Up @@ -213,4 +216,3 @@ def from_dict(self, generation_dict: Dict):
messages=generation_dict.get("messages", []),
message_completion=generation_dict.get("messageCompletion"),
)

6 changes: 3 additions & 3 deletions literalai/prompt_engineering/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class Prompt(Utils):
Attributes
----------
template_messages : List[GenerationMessage]
template_messages : List[GenerationMessage]
The messages that make up the prompt. Messages can be of type `text` or `image`.
Messages can reference variables.
variables : List[PromptVariable]
Expand Down Expand Up @@ -214,9 +214,9 @@ def to_langchain_chat_prompt_template(self, additional_messages=[]):

class CustomChatPromptTemplate(ChatPromptTemplate):
orig_messages: Optional[List[GenerationMessage]] = Field(
default_factory=list
default_factory=lambda: []
)
default_vars: Optional[Dict] = Field(default_factory=dict)
default_vars: Optional[Dict] = Field(default_factory=lambda: {})
prompt_id: Optional[str] = None

def format_messages(self, **kwargs: Any) -> List[BaseMessage]:
Expand Down
2 changes: 1 addition & 1 deletion literalai/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.1.102"
__version__ = "0.1.103"
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name="literalai",
version="0.1.102", # update version in literalai/version.py
version="0.1.103", # update version in literalai/version.py
description="An SDK for observability in Python applications",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
Expand Down

0 comments on commit cd3dc94

Please sign in to comment.