Skip to content

Commit

Permalink
Merge pull request #124 from holoviz-topics/langchain_lcel
Browse files Browse the repository at this point in the history
Langchain lcel
  • Loading branch information
ahuang11 authored Jan 25, 2024
2 parents d75d94a + 7e26bc3 commit d0d719f
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 0 deletions.
Binary file modified docs/assets/thumbnails/langchain_lcel.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified docs/assets/videos/langchain_lcel.mp4
Binary file not shown.
Binary file not shown.
56 changes: 56 additions & 0 deletions docs/examples/langchain/langchain_streaming_lcel_with_memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""
Demonstrates how to use the `ChatInterface` to create a chatbot using
[LangChain Expression Language](https://python.langchain.com/docs/expression_language/) (LCEL)
with streaming and memory.
"""

from operator import itemgetter

import panel as pn
from langchain.memory import ConversationSummaryBufferMemory
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
from langchain_openai import ChatOpenAI

pn.extension()

SYSTEM_PROMPT = "Try to be a silly comedian."


async def callback(contents, user, instance):
message = ""
inputs = {"input": contents}
async for token in chain.astream(inputs):
message += token
yield message
memory.save_context(inputs, {"output": message})


model = ChatOpenAI(model="gpt-3.5-turbo")
memory = ConversationSummaryBufferMemory(return_messages=True, llm=model)
prompt = ChatPromptTemplate.from_messages(
[
("system", SYSTEM_PROMPT),
MessagesPlaceholder(variable_name="history"),
("human", "{input}"),
]
)
output_parser = StrOutputParser()
chain = (
RunnablePassthrough.assign(
history=RunnableLambda(memory.load_memory_variables) | itemgetter("history")
)
| prompt
| model
| output_parser
)

chat_interface = pn.chat.ChatInterface(
pn.chat.ChatMessage(
"Offer a topic and ChatGPT will try to be funny!", user="System"
),
callback=callback,
callback_user="ChatGPT",
)
chat_interface.servable()
8 changes: 8 additions & 0 deletions tests/ui/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,14 @@ def langchain_lcel(page: Page):
page.wait_for_timeout(5000)


def langchain_streaming_lcel_with_memory(page: Page):
chat = ChatInterface(page)
chat.send("Remember this number: 8. Be concise.")
page.wait_for_timeout(10000)
chat.send("What number did I just ask you to remember?")
page.wait_for_timeout(10000)


def mistral_and_llama(page: Page):
chat = ChatInterface(page)
chat.send("What do you think about HoloViz in a single sentence?")
Expand Down

0 comments on commit d0d719f

Please sign in to comment.