Skip to content

Commit

Permalink
fix: add doc + try catch
Browse files Browse the repository at this point in the history
  • Loading branch information
desaxce committed Sep 27, 2024
1 parent 468ae23 commit 76b4e44
Showing 1 changed file with 14 additions and 3 deletions.
17 changes: 14 additions & 3 deletions literalai/instrumentation/llamaindex/event_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,8 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
thread_id = self._span_handler.get_thread_id(event.span_id)
run_id = self._span_handler.get_run_id(event.span_id)

"""The events are presented here roughly in chronological order"""
# AgentChatWithStep wraps several AgentRunStep events
# as the agent may want to perform multiple tool calls in a row.
if isinstance(event, AgentChatWithStepStartEvent) or isinstance(
event, AgentRunStepStartEvent
):
Expand All @@ -214,7 +215,12 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
if isinstance(event, AgentChatWithStepEndEvent) or isinstance(
event, AgentRunStepEndEvent
):
step = self.open_runs.pop()
try:
step = self.open_runs.pop()
except IndexError:
logging.error(
"[Literal] Error in Llamaindex instrumentation: AgentRunStepEndEvent called without an open run."
)
if step:
step.end()

Expand All @@ -232,7 +238,8 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
thread_id=thread_id,
content=query,
)


# Retrieval wraps the Embedding step in LlamaIndex
if isinstance(event, RetrievalStartEvent):
run = self._client.start_step(
name="RAG",
Expand Down Expand Up @@ -290,6 +297,7 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
retrieval_step.output = {"retrieved_documents": retrieved_documents}
retrieval_step.end()

# Only event where we create LLM steps
if isinstance(event, LLMChatStartEvent):
if run_id:
self._client.step()
Expand All @@ -316,6 +324,7 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
llm_step.generation = generation
llm_step.name = event.model_dict.get("model")

# Actual creation of the event happens upon ending the event
if isinstance(event, LLMChatEndEvent):
llm_step = self.get_first_step_of_type(run_id=run_id, step_type="llm")
if not llm_step and self._standalone_step_id:
Expand All @@ -327,6 +336,8 @@ def handle(self, event: BaseEvent, **kwargs) -> None:

if llm_step and response:
chat_completion = response.raw

# ChatCompletionChunk needed for chat stream methods
if isinstance(chat_completion, ChatCompletion) or isinstance(
chat_completion, ChatCompletionChunk
):
Expand Down

0 comments on commit 76b4e44

Please sign in to comment.