Skip to content

Commit

Permalink
feat: layout and handling #2250
Browse files Browse the repository at this point in the history
  • Loading branch information
marek-mihok committed Feb 12, 2024
1 parent 269c8d3 commit 06b006a
Show file tree
Hide file tree
Showing 8 changed files with 226 additions and 23 deletions.
51 changes: 50 additions & 1 deletion py/h2o_lightwave/h2o_lightwave/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -8245,6 +8245,45 @@ def load(__d: Dict) -> 'ChatCard':
)


class ChatPromptSuggestion:
"""Create a chat prompt suggestion displayed as button below the last response in chatbot component.
"""
def __init__(
self,
name: str,
label: str,
):
_guard_scalar('ChatPromptSuggestion.name', name, (str,), True, False, False)
_guard_scalar('ChatPromptSuggestion.label', label, (str,), False, False, False)
self.name = name
"""An identifying name for this component."""
self.label = label
"""The text displayed for this suggestion."""

def dump(self) -> Dict:
"""Returns the contents of this object as a dict."""
_guard_scalar('ChatPromptSuggestion.name', self.name, (str,), True, False, False)
_guard_scalar('ChatPromptSuggestion.label', self.label, (str,), False, False, False)
return _dump(
name=self.name,
label=self.label,
)

@staticmethod
def load(__d: Dict) -> 'ChatPromptSuggestion':
"""Creates an instance of this class using the contents of a dict."""
__d_name: Any = __d.get('name')
_guard_scalar('ChatPromptSuggestion.name', __d_name, (str,), True, False, False)
__d_label: Any = __d.get('label')
_guard_scalar('ChatPromptSuggestion.label', __d_label, (str,), False, False, False)
name: str = __d_name
label: str = __d_label
return ChatPromptSuggestion(
name,
label,
)


class ChatbotCard:
"""Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
"""
Expand All @@ -8256,13 +8295,15 @@ def __init__(
placeholder: Optional[str] = None,
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
commands: Optional[List[Command]] = None,
):
_guard_scalar('ChatbotCard.box', box, (str,), False, False, False)
_guard_scalar('ChatbotCard.name', name, (str,), True, False, False)
_guard_scalar('ChatbotCard.placeholder', placeholder, (str,), False, True, False)
_guard_vector('ChatbotCard.events', events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_vector('ChatbotCard.commands', commands, (Command,), False, True, False)
self.box = box
"""A string indicating how to place this component on the page."""
Expand All @@ -8273,9 +8314,11 @@ def __init__(
self.placeholder = placeholder
"""Chat input box placeholder. Use for prompt examples."""
self.events = events
"""The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'."""
"""The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'."""
self.generating = generating
"""True to show a button to stop the text generation. Defaults to False."""
self.prompt_suggestions = prompt_suggestions
"""Clickable prompt suggestions shown below the last response."""
self.commands = commands
"""Contextual menu commands for this component."""

Expand All @@ -8286,6 +8329,7 @@ def dump(self) -> Dict:
_guard_scalar('ChatbotCard.placeholder', self.placeholder, (str,), False, True, False)
_guard_vector('ChatbotCard.events', self.events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', self.generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', self.prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_vector('ChatbotCard.commands', self.commands, (Command,), False, True, False)
return _dump(
view='chatbot',
Expand All @@ -8295,6 +8339,7 @@ def dump(self) -> Dict:
placeholder=self.placeholder,
events=self.events,
generating=self.generating,
prompt_suggestions=None if self.prompt_suggestions is None else [__e.dump() for __e in self.prompt_suggestions],
commands=None if self.commands is None else [__e.dump() for __e in self.commands],
)

Expand All @@ -8312,6 +8357,8 @@ def load(__d: Dict) -> 'ChatbotCard':
_guard_vector('ChatbotCard.events', __d_events, (str,), False, True, False)
__d_generating: Any = __d.get('generating')
_guard_scalar('ChatbotCard.generating', __d_generating, (bool,), False, True, False)
__d_prompt_suggestions: Any = __d.get('prompt_suggestions')
_guard_vector('ChatbotCard.prompt_suggestions', __d_prompt_suggestions, (dict,), False, True, False)
__d_commands: Any = __d.get('commands')
_guard_vector('ChatbotCard.commands', __d_commands, (dict,), False, True, False)
box: str = __d_box
Expand All @@ -8320,6 +8367,7 @@ def load(__d: Dict) -> 'ChatbotCard':
placeholder: Optional[str] = __d_placeholder
events: Optional[List[str]] = __d_events
generating: Optional[bool] = __d_generating
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None if __d_prompt_suggestions is None else [ChatPromptSuggestion.load(__e) for __e in __d_prompt_suggestions]
commands: Optional[List[Command]] = None if __d_commands is None else [Command.load(__e) for __e in __d_commands]
return ChatbotCard(
box,
Expand All @@ -8328,6 +8376,7 @@ def load(__d: Dict) -> 'ChatbotCard':
placeholder,
events,
generating,
prompt_suggestions,
commands,
)

Expand Down
23 changes: 22 additions & 1 deletion py/h2o_lightwave/h2o_lightwave/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -2878,13 +2878,32 @@ def chat_card(
)


def chat_prompt_suggestion(
name: str,
label: str,
) -> ChatPromptSuggestion:
"""Create a chat prompt suggestion displayed as button below the last response in chatbot component.
Args:
name: An identifying name for this component.
label: The text displayed for this suggestion.
Returns:
A `h2o_wave.types.ChatPromptSuggestion` instance.
"""
return ChatPromptSuggestion(
name,
label,
)


def chatbot_card(
box: str,
name: str,
data: PackedRecord,
placeholder: Optional[str] = None,
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
commands: Optional[List[Command]] = None,
) -> ChatbotCard:
"""Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
Expand All @@ -2894,8 +2913,9 @@ def chatbot_card(
name: An identifying name for this component.
data: Chat messages data. Requires cyclic buffer.
placeholder: Chat input box placeholder. Use for prompt examples.
events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'.
events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.
generating: True to show a button to stop the text generation. Defaults to False.
prompt_suggestions: Clickable prompt suggestions shown below the last response.
commands: Contextual menu commands for this component.
Returns:
A `h2o_wave.types.ChatbotCard` instance.
Expand All @@ -2907,6 +2927,7 @@ def chatbot_card(
placeholder,
events,
generating,
prompt_suggestions,
commands,
)

Expand Down
51 changes: 50 additions & 1 deletion py/h2o_wave/h2o_wave/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -8245,6 +8245,45 @@ def load(__d: Dict) -> 'ChatCard':
)


class ChatPromptSuggestion:
"""Create a chat prompt suggestion displayed as button below the last response in chatbot component.
"""
def __init__(
self,
name: str,
label: str,
):
_guard_scalar('ChatPromptSuggestion.name', name, (str,), True, False, False)
_guard_scalar('ChatPromptSuggestion.label', label, (str,), False, False, False)
self.name = name
"""An identifying name for this component."""
self.label = label
"""The text displayed for this suggestion."""

def dump(self) -> Dict:
"""Returns the contents of this object as a dict."""
_guard_scalar('ChatPromptSuggestion.name', self.name, (str,), True, False, False)
_guard_scalar('ChatPromptSuggestion.label', self.label, (str,), False, False, False)
return _dump(
name=self.name,
label=self.label,
)

@staticmethod
def load(__d: Dict) -> 'ChatPromptSuggestion':
"""Creates an instance of this class using the contents of a dict."""
__d_name: Any = __d.get('name')
_guard_scalar('ChatPromptSuggestion.name', __d_name, (str,), True, False, False)
__d_label: Any = __d.get('label')
_guard_scalar('ChatPromptSuggestion.label', __d_label, (str,), False, False, False)
name: str = __d_name
label: str = __d_label
return ChatPromptSuggestion(
name,
label,
)


class ChatbotCard:
"""Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
"""
Expand All @@ -8256,13 +8295,15 @@ def __init__(
placeholder: Optional[str] = None,
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
commands: Optional[List[Command]] = None,
):
_guard_scalar('ChatbotCard.box', box, (str,), False, False, False)
_guard_scalar('ChatbotCard.name', name, (str,), True, False, False)
_guard_scalar('ChatbotCard.placeholder', placeholder, (str,), False, True, False)
_guard_vector('ChatbotCard.events', events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_vector('ChatbotCard.commands', commands, (Command,), False, True, False)
self.box = box
"""A string indicating how to place this component on the page."""
Expand All @@ -8273,9 +8314,11 @@ def __init__(
self.placeholder = placeholder
"""Chat input box placeholder. Use for prompt examples."""
self.events = events
"""The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'."""
"""The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'."""
self.generating = generating
"""True to show a button to stop the text generation. Defaults to False."""
self.prompt_suggestions = prompt_suggestions
"""Clickable prompt suggestions shown below the last response."""
self.commands = commands
"""Contextual menu commands for this component."""

Expand All @@ -8286,6 +8329,7 @@ def dump(self) -> Dict:
_guard_scalar('ChatbotCard.placeholder', self.placeholder, (str,), False, True, False)
_guard_vector('ChatbotCard.events', self.events, (str,), False, True, False)
_guard_scalar('ChatbotCard.generating', self.generating, (bool,), False, True, False)
_guard_vector('ChatbotCard.prompt_suggestions', self.prompt_suggestions, (ChatPromptSuggestion,), False, True, False)
_guard_vector('ChatbotCard.commands', self.commands, (Command,), False, True, False)
return _dump(
view='chatbot',
Expand All @@ -8295,6 +8339,7 @@ def dump(self) -> Dict:
placeholder=self.placeholder,
events=self.events,
generating=self.generating,
prompt_suggestions=None if self.prompt_suggestions is None else [__e.dump() for __e in self.prompt_suggestions],
commands=None if self.commands is None else [__e.dump() for __e in self.commands],
)

Expand All @@ -8312,6 +8357,8 @@ def load(__d: Dict) -> 'ChatbotCard':
_guard_vector('ChatbotCard.events', __d_events, (str,), False, True, False)
__d_generating: Any = __d.get('generating')
_guard_scalar('ChatbotCard.generating', __d_generating, (bool,), False, True, False)
__d_prompt_suggestions: Any = __d.get('prompt_suggestions')
_guard_vector('ChatbotCard.prompt_suggestions', __d_prompt_suggestions, (dict,), False, True, False)
__d_commands: Any = __d.get('commands')
_guard_vector('ChatbotCard.commands', __d_commands, (dict,), False, True, False)
box: str = __d_box
Expand All @@ -8320,6 +8367,7 @@ def load(__d: Dict) -> 'ChatbotCard':
placeholder: Optional[str] = __d_placeholder
events: Optional[List[str]] = __d_events
generating: Optional[bool] = __d_generating
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None if __d_prompt_suggestions is None else [ChatPromptSuggestion.load(__e) for __e in __d_prompt_suggestions]
commands: Optional[List[Command]] = None if __d_commands is None else [Command.load(__e) for __e in __d_commands]
return ChatbotCard(
box,
Expand All @@ -8328,6 +8376,7 @@ def load(__d: Dict) -> 'ChatbotCard':
placeholder,
events,
generating,
prompt_suggestions,
commands,
)

Expand Down
23 changes: 22 additions & 1 deletion py/h2o_wave/h2o_wave/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -2878,13 +2878,32 @@ def chat_card(
)


def chat_prompt_suggestion(
name: str,
label: str,
) -> ChatPromptSuggestion:
"""Create a chat prompt suggestion displayed as button below the last response in chatbot component.
Args:
name: An identifying name for this component.
label: The text displayed for this suggestion.
Returns:
A `h2o_wave.types.ChatPromptSuggestion` instance.
"""
return ChatPromptSuggestion(
name,
label,
)


def chatbot_card(
box: str,
name: str,
data: PackedRecord,
placeholder: Optional[str] = None,
events: Optional[List[str]] = None,
generating: Optional[bool] = None,
prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None,
commands: Optional[List[Command]] = None,
) -> ChatbotCard:
"""Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
Expand All @@ -2894,8 +2913,9 @@ def chatbot_card(
name: An identifying name for this component.
data: Chat messages data. Requires cyclic buffer.
placeholder: Chat input box placeholder. Use for prompt examples.
events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'.
events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.
generating: True to show a button to stop the text generation. Defaults to False.
prompt_suggestions: Clickable prompt suggestions shown below the last response.
commands: Contextual menu commands for this component.
Returns:
A `h2o_wave.types.ChatbotCard` instance.
Expand All @@ -2907,6 +2927,7 @@ def chatbot_card(
placeholder,
events,
generating,
prompt_suggestions,
commands,
)

Expand Down
24 changes: 23 additions & 1 deletion r/R/ui.R
Original file line number Diff line number Diff line change
Expand Up @@ -3339,14 +3339,33 @@ ui_chat_card <- function(
return(.o)
}

#' Create a chat prompt suggestion displayed as button below the last response in chatbot component.
#'
#' @param name An identifying name for this component.
#' @param label The text displayed for this suggestion.
#' @return A ChatPromptSuggestion instance.
#' @export
ui_chat_prompt_suggestion <- function(
name,
label) {
.guard_scalar("name", "character", name)
.guard_scalar("label", "character", label)
.o <- list(
name=name,
label=label)
class(.o) <- append(class(.o), c(.wave_obj, "WaveChatPromptSuggestion"))
return(.o)
}

#' Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers.
#'
#' @param box A string indicating how to place this component on the page.
#' @param name An identifying name for this component.
#' @param data Chat messages data. Requires cyclic buffer.
#' @param placeholder Chat input box placeholder. Use for prompt examples.
#' @param events The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'.
#' @param events The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.
#' @param generating True to show a button to stop the text generation. Defaults to False.
#' @param prompt_suggestions Clickable prompt suggestions shown below the last response.
#' @param commands Contextual menu commands for this component.
#' @return A ChatbotCard instance.
#' @export
Expand All @@ -3357,13 +3376,15 @@ ui_chatbot_card <- function(
placeholder = NULL,
events = NULL,
generating = NULL,
prompt_suggestions = NULL,
commands = NULL) {
.guard_scalar("box", "character", box)
.guard_scalar("name", "character", name)
# TODO Validate data: Rec
.guard_scalar("placeholder", "character", placeholder)
.guard_vector("events", "character", events)
.guard_scalar("generating", "logical", generating)
.guard_vector("prompt_suggestions", "WaveChatPromptSuggestion", prompt_suggestions)
.guard_vector("commands", "WaveCommand", commands)
.o <- list(
box=box,
Expand All @@ -3372,6 +3393,7 @@ ui_chatbot_card <- function(
placeholder=placeholder,
events=events,
generating=generating,
prompt_suggestions=prompt_suggestions,
commands=commands,
view='chatbot')
class(.o) <- append(class(.o), c(.wave_obj, "WaveChatbotCard"))
Expand Down
Loading

0 comments on commit 06b006a

Please sign in to comment.