Skip to content

Commit

Permalink
chore: add py example #2250
Browse files Browse the repository at this point in the history
  • Loading branch information
marek-mihok committed Feb 12, 2024
1 parent 06b006a commit 7ed9ae3
Show file tree
Hide file tree
Showing 10 changed files with 89 additions and 4 deletions.
47 changes: 47 additions & 0 deletions py/examples/chatbot_events_suggestions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Chatbot / Events/ Suggestions
# Use prompt suggestions to simplify user interaction.
# #chatbot #events #suggestions
# ---
from h2o_wave import main, app, Q, ui, data

first_suggestion = "I need more information about this."
second_suggestion = "I have another problem."
third_suggestion = "The information you provided is not correct."
fourth_suggestion = "I got this, thank you!"

@app('/demo')
async def serve(q: Q):
if not q.client.initialized:
q.page['example'] = ui.chatbot_card(
box='1 1 5 5',
data=data(fields='content from_user', t='list', rows=[
['Hi, my files are not loaded after plugging my USB in.', True],
['Hi, I am glad I can assist you today! Have you tried turning your PC off and on again?', False]
]),
name='chatbot',
events=['prompt_suggestion'],
prompt_suggestions=[
ui.chat_prompt_suggestion('sug1', label=first_suggestion),
ui.chat_prompt_suggestion('sug2', label=second_suggestion),
ui.chat_prompt_suggestion('sug3', label=third_suggestion),
ui.chat_prompt_suggestion('sug4', label=fourth_suggestion),
],
disabled=True
)
q.client.initialized = True

# Handle prompt_suggestion event.
elif q.events.chatbot and q.events.chatbot.prompt_suggestion:
# Append user message based on the suggestion.
if q.events.chatbot.prompt_suggestion == 'sug1':
q.page['example'].data += [first_suggestion, True]
elif q.events.chatbot.prompt_suggestion == 'sug2':
q.page['example'].data += [second_suggestion, True]
elif q.events.chatbot.prompt_suggestion == 'sug3':
q.page['example'].data += [third_suggestion, True]
elif q.events.chatbot.prompt_suggestion == 'sug4':
q.page['example'].data += [fourth_suggestion, True]
# Append bot response.
q.page['example'].data += ['I am a fake chatbot. Sorry, I cannot help you.', False]

await q.page.save()
1 change: 1 addition & 0 deletions py/examples/tour.conf
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ chatbot_stream.py
chatbot_events_stop.py
chatbot_events_scroll.py
chatbot_events_feedback.py
chatbot_events_suggestions.py
form.py
form_visibility.py
text.py
Expand Down
10 changes: 10 additions & 0 deletions py/h2o_lightwave/h2o_lightwave/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -8296,6 +8296,7 @@ def __init__(
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
disabled: Optional[bool] = None,
commands: Optional[List[Command]] = None,
):
_guard_scalar('ChatbotCard.box', box, (str,), False, False, False)
Expand All @@ -8304,6 +8305,7 @@ def __init__(
_guard_vector('ChatbotCard.events', events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_scalar('ChatbotCard.disabled', disabled, (bool,), False, True, False)
_guard_vector('ChatbotCard.commands', commands, (Command,), False, True, False)
self.box = box
"""A string indicating how to place this component on the page."""
Expand All @@ -8319,6 +8321,8 @@ def __init__(
"""True to show a button to stop the text generation. Defaults to False."""
self.prompt_suggestions = prompt_suggestions
"""Clickable prompt suggestions shown below the last response."""
self.disabled = disabled
"""True if the user input should be disabled."""
self.commands = commands
"""Contextual menu commands for this component."""

Expand All @@ -8330,6 +8334,7 @@ def dump(self) -> Dict:
_guard_vector('ChatbotCard.events', self.events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', self.generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', self.prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_scalar('ChatbotCard.disabled', self.disabled, (bool,), False, True, False)
_guard_vector('ChatbotCard.commands', self.commands, (Command,), False, True, False)
return _dump(
view='chatbot',
Expand All @@ -8340,6 +8345,7 @@ def dump(self) -> Dict:
events=self.events,
generating=self.generating,
prompt_suggestions=None if self.prompt_suggestions is None else [__e.dump() for __e in self.prompt_suggestions],
disabled=self.disabled,
commands=None if self.commands is None else [__e.dump() for __e in self.commands],
)

Expand All @@ -8359,6 +8365,8 @@ def load(__d: Dict) -> 'ChatbotCard':
_guard_scalar('ChatbotCard.generating', __d_generating, (bool,), False, True, False)
__d_prompt_suggestions: Any = __d.get('prompt_suggestions')
_guard_vector('ChatbotCard.prompt_suggestions', __d_prompt_suggestions, (dict,), False, True, False)
__d_disabled: Any = __d.get('disabled')
_guard_scalar('ChatbotCard.disabled', __d_disabled, (bool,), False, True, False)
__d_commands: Any = __d.get('commands')
_guard_vector('ChatbotCard.commands', __d_commands, (dict,), False, True, False)
box: str = __d_box
Expand All @@ -8368,6 +8376,7 @@ def load(__d: Dict) -> 'ChatbotCard':
events: Optional[List[str]] = __d_events
generating: Optional[bool] = __d_generating
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None if __d_prompt_suggestions is None else [ChatPromptSuggestion.load(__e) for __e in __d_prompt_suggestions]
disabled: Optional[bool] = __d_disabled
commands: Optional[List[Command]] = None if __d_commands is None else [Command.load(__e) for __e in __d_commands]
return ChatbotCard(
box,
Expand All @@ -8377,6 +8386,7 @@ def load(__d: Dict) -> 'ChatbotCard':
events,
generating,
prompt_suggestions,
disabled,
commands,
)

Expand Down
3 changes: 3 additions & 0 deletions py/h2o_lightwave/h2o_lightwave/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -2904,6 +2904,7 @@ def chatbot_card(
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
disabled: Optional[bool] = None,
commands: Optional[List[Command]] = None,
) -> ChatbotCard:
"""Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
Expand All @@ -2916,6 +2917,7 @@ def chatbot_card(
events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.
generating: True to show a button to stop the text generation. Defaults to False.
prompt_suggestions: Clickable prompt suggestions shown below the last response.
disabled: True if the user input should be disabled.
commands: Contextual menu commands for this component.
Returns:
A `h2o_wave.types.ChatbotCard` instance.
Expand All @@ -2928,6 +2930,7 @@ def chatbot_card(
events,
generating,
prompt_suggestions,
disabled,
commands,
)

Expand Down
10 changes: 10 additions & 0 deletions py/h2o_wave/h2o_wave/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -8296,6 +8296,7 @@ def __init__(
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
disabled: Optional[bool] = None,
commands: Optional[List[Command]] = None,
):
_guard_scalar('ChatbotCard.box', box, (str,), False, False, False)
Expand All @@ -8304,6 +8305,7 @@ def __init__(
_guard_vector('ChatbotCard.events', events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_scalar('ChatbotCard.disabled', disabled, (bool,), False, True, False)
_guard_vector('ChatbotCard.commands', commands, (Command,), False, True, False)
self.box = box
"""A string indicating how to place this component on the page."""
Expand All @@ -8319,6 +8321,8 @@ def __init__(
"""True to show a button to stop the text generation. Defaults to False."""
self.prompt_suggestions = prompt_suggestions
"""Clickable prompt suggestions shown below the last response."""
self.disabled = disabled
"""True if the user input should be disabled."""
self.commands = commands
"""Contextual menu commands for this component."""

Expand All @@ -8330,6 +8334,7 @@ def dump(self) -> Dict:
_guard_vector('ChatbotCard.events', self.events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', self.generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', self.prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_scalar('ChatbotCard.disabled', self.disabled, (bool,), False, True, False)
_guard_vector('ChatbotCard.commands', self.commands, (Command,), False, True, False)
return _dump(
view='chatbot',
Expand All @@ -8340,6 +8345,7 @@ def dump(self) -> Dict:
events=self.events,
generating=self.generating,
prompt_suggestions=None if self.prompt_suggestions is None else [__e.dump() for __e in self.prompt_suggestions],
disabled=self.disabled,
commands=None if self.commands is None else [__e.dump() for __e in self.commands],
)

Expand All @@ -8359,6 +8365,8 @@ def load(__d: Dict) -> 'ChatbotCard':
_guard_scalar('ChatbotCard.generating', __d_generating, (bool,), False, True, False)
__d_prompt_suggestions: Any = __d.get('prompt_suggestions')
_guard_vector('ChatbotCard.prompt_suggestions', __d_prompt_suggestions, (dict,), False, True, False)
__d_disabled: Any = __d.get('disabled')
_guard_scalar('ChatbotCard.disabled', __d_disabled, (bool,), False, True, False)
__d_commands: Any = __d.get('commands')
_guard_vector('ChatbotCard.commands', __d_commands, (dict,), False, True, False)
box: str = __d_box
Expand All @@ -8368,6 +8376,7 @@ def load(__d: Dict) -> 'ChatbotCard':
events: Optional[List[str]] = __d_events
generating: Optional[bool] = __d_generating
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None if __d_prompt_suggestions is None else [ChatPromptSuggestion.load(__e) for __e in __d_prompt_suggestions]
disabled: Optional[bool] = __d_disabled
commands: Optional[List[Command]] = None if __d_commands is None else [Command.load(__e) for __e in __d_commands]
return ChatbotCard(
box,
Expand All @@ -8377,6 +8386,7 @@ def load(__d: Dict) -> 'ChatbotCard':
events,
generating,
prompt_suggestions,
disabled,
commands,
)

Expand Down
3 changes: 3 additions & 0 deletions py/h2o_wave/h2o_wave/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -2904,6 +2904,7 @@ def chatbot_card(
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
disabled: Optional[bool] = None,
commands: Optional[List[Command]] = None,
) -> ChatbotCard:
"""Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
Expand All @@ -2916,6 +2917,7 @@ def chatbot_card(
events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.
generating: True to show a button to stop the text generation. Defaults to False.
prompt_suggestions: Clickable prompt suggestions shown below the last response.
disabled: True if the user input should be disabled.
commands: Contextual menu commands for this component.
Returns:
A `h2o_wave.types.ChatbotCard` instance.
Expand All @@ -2928,6 +2930,7 @@ def chatbot_card(
events,
generating,
prompt_suggestions,
disabled,
commands,
)

Expand Down
4 changes: 4 additions & 0 deletions r/R/ui.R
Original file line number Diff line number Diff line change
Expand Up @@ -3366,6 +3366,7 @@ ui_chat_prompt_suggestion <- function(
#' @param events The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.
#' @param generating True to show a button to stop the text generation. Defaults to False.
#' @param prompt_suggestions Clickable prompt suggestions shown below the last response.
#' @param disabled True if the user input should be disabled.
#' @param commands Contextual menu commands for this component.
#' @return A ChatbotCard instance.
#' @export
Expand All @@ -3377,6 +3378,7 @@ ui_chatbot_card <- function(
events = NULL,
generating = NULL,
prompt_suggestions = NULL,
disabled = NULL,
commands = NULL) {
.guard_scalar("box", "character", box)
.guard_scalar("name", "character", name)
Expand All @@ -3385,6 +3387,7 @@ ui_chatbot_card <- function(
.guard_vector("events", "character", events)
.guard_scalar("generating", "logical", generating)
.guard_vector("prompt_suggestions", "WaveChatPromptSuggestion", prompt_suggestions)
.guard_scalar("disabled", "logical", disabled)
.guard_vector("commands", "WaveCommand", commands)
.o <- list(
box=box,
Expand All @@ -3394,6 +3397,7 @@ ui_chatbot_card <- function(
events=events,
generating=generating,
prompt_suggestions=prompt_suggestions,
disabled=disabled,
commands=commands,
view='chatbot')
class(.o) <- append(class(.o), c(.wave_obj, "WaveChatbotCard"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1136,24 +1136,26 @@
<option name="Python" value="true"/>
</context>
</template>
<template name="w_full_chatbot" value="ui.chatbot(name='$name$',data=$data$,placeholder='$placeholder$',generating=$generating$,events=[&#10; $events$ &#10;],prev_items=[&#10; $prev_items$ &#10;],prompt_suggestions=[&#10; $prompt_suggestions$ &#10;]),$END$" description="Create Wave Chatbot with full attributes." toReformat="true" toShortenFQNames="true">
<template name="w_full_chatbot" value="ui.chatbot(name='$name$',data=$data$,placeholder='$placeholder$',generating=$generating$,disabled=$disabled$,events=[&#10; $events$ &#10;],prev_items=[&#10; $prev_items$ &#10;],prompt_suggestions=[&#10; $prompt_suggestions$ &#10;]),$END$" description="Create Wave Chatbot with full attributes." toReformat="true" toShortenFQNames="true">
<variable name="name" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="data" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="placeholder" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="generating" expression="" defaultValue="&quot;False&quot;" alwaysStopAt="true"/>
<variable name="disabled" expression="" defaultValue="&quot;False&quot;" alwaysStopAt="true"/>
<variable name="events" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="prev_items" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="prompt_suggestions" expression="" defaultValue="" alwaysStopAt="true"/>
<context>
<option name="Python" value="true"/>
</context>
</template>
<template name="w_full_chatbot_card" value="ui.chatbot_card(box='$box$',name='$name$',data=$data$,placeholder='$placeholder$',generating=$generating$,events=[&#10; $events$ &#10;],prompt_suggestions=[&#10; $prompt_suggestions$ &#10;],commands=[&#10; $commands$ &#10;])$END$" description="Create Wave ChatbotCard with full attributes." toReformat="true" toShortenFQNames="true">
<template name="w_full_chatbot_card" value="ui.chatbot_card(box='$box$',name='$name$',data=$data$,placeholder='$placeholder$',generating=$generating$,disabled=$disabled$,events=[&#10; $events$ &#10;],prompt_suggestions=[&#10; $prompt_suggestions$ &#10;],commands=[&#10; $commands$ &#10;])$END$" description="Create Wave ChatbotCard with full attributes." toReformat="true" toShortenFQNames="true">
<variable name="box" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="name" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="data" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="placeholder" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="generating" expression="" defaultValue="&quot;False&quot;" alwaysStopAt="true"/>
<variable name="disabled" expression="" defaultValue="&quot;False&quot;" alwaysStopAt="true"/>
<variable name="events" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="prompt_suggestions" expression="" defaultValue="" alwaysStopAt="true"/>
<variable name="commands" expression="" defaultValue="" alwaysStopAt="true"/>
Expand Down
4 changes: 2 additions & 2 deletions tools/vscode-extension/component-snippets.json
Original file line number Diff line number Diff line change
Expand Up @@ -1073,14 +1073,14 @@
"Wave Full Chatbot": {
"prefix": "w_full_chatbot",
"body": [
"ui.chatbot(name='$1', data=$2, placeholder='$3', generating=${4:False}, events=[\n\t\t$5\t\t\n], prev_items=[\n\t\t$6\t\t\n], prompt_suggestions=[\n\t\t$7\t\t\n]),$0"
"ui.chatbot(name='$1', data=$2, placeholder='$3', generating=${4:False}, disabled=${5:False}, events=[\n\t\t$6\t\t\n], prev_items=[\n\t\t$7\t\t\n], prompt_suggestions=[\n\t\t$8\t\t\n]),$0"
],
"description": "Create a full Wave Chatbot."
},
"Wave Full ChatbotCard": {
"prefix": "w_full_chatbot_card",
"body": [
"ui.chatbot_card(box='$1', name='$2', data=$3, placeholder='$4', generating=${5:False}, events=[\n\t\t$6\t\t\n], prompt_suggestions=[\n\t\t$7\t\t\n], commands=[\n\t\t$8\t\t\n])$0"
"ui.chatbot_card(box='$1', name='$2', data=$3, placeholder='$4', generating=${5:False}, disabled=${6:False}, events=[\n\t\t$7\t\t\n], prompt_suggestions=[\n\t\t$8\t\t\n], commands=[\n\t\t$9\t\t\n])$0"
],
"description": "Create a full Wave ChatbotCard."
},
Expand Down
5 changes: 5 additions & 0 deletions ui/src/chatbot.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ export interface Chatbot {
prev_items?: ChatbotMessage[]
/** Clickable prompt suggestions shown below the last response. */
prompt_suggestions?: ChatPromptSuggestion[]
/** True if the user input should be disabled. */
disabled?: B
}

const processData = (data: Rec) => unpack<ChatbotMessage[]>(data).map(({ content, from_user }) => ({ content, from_user }))
Expand Down Expand Up @@ -260,6 +262,7 @@ export const XChatbot = (props: Chatbot) => {
multiline
autoAdjustHeight
placeholder={props.placeholder || 'Type your message'}
disabled={props.disabled}
styles={{
root: { flexGrow: 1 },
fieldGroup: { minHeight: INPUT_HEIGHT },
Expand Down Expand Up @@ -302,6 +305,8 @@ interface State {
generating?: B
/** Clickable prompt suggestions shown below the last response. */
prompt_suggestions?: ChatPromptSuggestion[]
/** True if the user input should be disabled. */
disabled?: B
}

export const View = bond(({ name, state, changed }: Model<State>) => {
Expand Down

0 comments on commit 7ed9ae3

Please sign in to comment.