Skip to content

Commit

Permalink
experimental/widget voice commands (#583)
Browse files Browse the repository at this point in the history
* Add VoiceRecorder component and dependencies

* Refactor Tooltip component

* added the voice record ability

* Release 2.3.1

* Update dependencies in package.json and pnpm-lock.yaml

* Fix eslint rule and remove unused dependencies

* Remove unused dependencies, and update `pilot.js`

* Release 2.3.2

* Update dependencies in package.json and pnpm-lock.yaml

---------

Co-authored-by: gharbat <arbioun@gmail.com>
  • Loading branch information
faltawy and gharbat authored Jan 26, 2024
1 parent 6067be4 commit 1ac0b1f
Show file tree
Hide file tree
Showing 11 changed files with 375 additions and 141 deletions.
1 change: 1 addition & 0 deletions copilot-widget/.eslintrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@ module.exports = {
plugins: ['react-refresh'],
rules: {
'react-refresh/only-export-components': 'warn',
"@typescript-eslint/ban-ts-comment":"off"
},
}
2 changes: 1 addition & 1 deletion copilot-widget/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ <h2>
<script type="module" src="/src/main.tsx"></script>
<div id="opencopilot-root"></div>
<script>
const token = "v7dUtVpoHKfeL94l";
const token = "ofZTrhcaPVzK17oM";
const apiUrl = "http://localhost:8888/backend";
const socketUrl = "http://localhost:8888";
</script>
Expand Down
61 changes: 30 additions & 31 deletions copilot-widget/lib/components/ChatInputFooter.tsx
Original file line number Diff line number Diff line change
@@ -1,56 +1,53 @@
import TextareaAutosize from "react-textarea-autosize";
import {
SendHorizonal,
Redo2,
} from 'lucide-react'
import { SendHorizonal, Redo2 } from "lucide-react";
import { useChat } from "../contexts/Controller";
import { useRef, useState } from "react";
import { useInitialData } from "../contexts/InitialDataContext";
import { Tooltip, TooltipContent, TooltipTrigger } from "./ToolTip";
import { getId, isEmpty } from "@lib/utils/utils";
import now from "@lib/utils/timenow";
import { useDocumentDirection } from "@lib/hooks/useDocumentDirection";
import { VoiceRecorder } from "./VoiceRecorder";
function MessageSuggestions() {
const { data } = useInitialData();
const { messages, sendMessage } = useChat();

return (
<>
{
isEmpty(messages) && !isEmpty(data?.inital_questions) &&
(
<div className="opencopilot-flex no-scrollbar opencopilot-items-center opencopilot-flex-wrap opencopilot-justify-start opencopilot-gap-2 opencopilot-flex-1">
{data?.inital_questions?.map((q, index) => (
<button
className="opencopilot-text-sm opencopilot-font-medium opencopilot-whitespace-nowrap opencopilot-px-2.5 opencopilot-py-1.5 opencopilot-rounded-lg opencopilot-bg-accent opencopilot-text-primary"
key={index}
onClick={() => {
sendMessage({
from: "user",
content: q,
id: getId(),
timestamp: now(),
});
}}
>
{q}
</button>
))}
</div>
)}
{isEmpty(messages) && !isEmpty(data?.inital_questions) && (
<div className="opencopilot-flex no-scrollbar opencopilot-items-center opencopilot-flex-wrap opencopilot-justify-start opencopilot-gap-2 opencopilot-flex-1">
{data?.inital_questions?.map((q, index) => (
<button
className="opencopilot-text-sm opencopilot-font-medium opencopilot-whitespace-nowrap opencopilot-px-2.5 opencopilot-py-1.5 opencopilot-rounded-lg opencopilot-bg-accent opencopilot-text-primary"
key={index}
onClick={() => {
sendMessage({
from: "user",
content: q,
id: getId(),
timestamp: now(),
});
}}
>
{q}
</button>
))}
</div>
)}
</>
);
}
// curl --location 'http://localhost:8888/backend/chat/transcribe' \
// --form 'file=@"/Users/gharbat/Downloads/Neets.ai-example-us-female-2.mp3"'

function ChatInputFooter() {
const [input, setInput] = useState("");
const textAreaRef = useRef<HTMLTextAreaElement>(null);
const { sendMessage, reset, messages } = useChat();
const { loading } = useChat();
const canSend = input.trim().length > 0;
const {
direction
} = useDocumentDirection();
const { direction } = useDocumentDirection();

const handleTextareaChange = (
event: React.ChangeEvent<HTMLTextAreaElement>
) => {
Expand Down Expand Up @@ -94,9 +91,10 @@ function ChatInputFooter() {
</div>
<div
dir={direction}
className="opencopilot-flex opencopilot-items-center opencopilot-justify-center opencopilot-gap-2 opencopilot-h-fit opencopilot-px-2 opencopilot-text-lg">
className="opencopilot-flex opencopilot-items-center opencopilot-justify-center opencopilot-gap-2 opencopilot-h-fit opencopilot-px-2 opencopilot-text-lg"
>
<Tooltip>
<TooltipTrigger asChild>
<TooltipTrigger asChild hidden>
<button
onClick={reset}
className="opencopilot-text-xl disabled:opencopilot-opacity-40 disabled:opencopilot-pointer-events-none disabled:opencopilot-cursor-not-allowed opencopilot-text-[#5e5c5e] opencopilot-transition-all"
Expand All @@ -107,6 +105,7 @@ function ChatInputFooter() {
</TooltipTrigger>
<TooltipContent>reset chat</TooltipContent>
</Tooltip>
<VoiceRecorder onSuccess={(text) => setInput(text)} />
<button
onClick={handleInputSubmit}
className="opencopilot-text-xl disabled:opencopilot-opacity-40 disabled:opencopilot-pointer-events-none disabled:opencopilot-cursor-not-allowed opencopilot-text-[#5e5c5e] opencopilot-transition-all"
Expand Down
30 changes: 13 additions & 17 deletions copilot-widget/lib/components/ToolTip.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
"use client";
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
import { type ReactNode, forwardRef } from "react";
import { forwardRef, ComponentProps } from "react";
import cn from "../utils/cn";

const TooltipTrigger = TooltipPrimitive.Trigger;
const TooltipRoot = TooltipPrimitive.Root;

const Tooltip = forwardRef(
({ children, ...rest }: ComponentProps<typeof TooltipRoot>, ref) => {
return (
<TooltipPrimitive.Provider>
<TooltipRoot {...rest}>{children}</TooltipRoot>
</TooltipPrimitive.Provider>
);
}
);

function Tooltip({ children }: { children: ReactNode }) {
return (
<TooltipPrimitive.Provider>
<TooltipPrimitive.Root>{children}</TooltipPrimitive.Root>
</TooltipPrimitive.Provider>
);
}
Tooltip.displayName = "ToolTip";
const TooltipContent = forwardRef<
React.ElementRef<typeof TooltipPrimitive.Content>,
Expand All @@ -30,15 +34,7 @@ const TooltipContent = forwardRef<
)}
{...props}
>
<>
{children}
<TooltipPrimitive.Arrow
className={cn(
"opencopilot-fill-current opencopilot-text-accent opencopilot-animate-in opencopilot-slide-in-from-top-1 opencopilot-ease-out data-[state=closed]:opencopilot-animate-out",
arrowClassName
)}
/>
</>
{children}
</TooltipPrimitive.Content>
));
TooltipContent.displayName = TooltipPrimitive.Content.displayName;
Expand Down
70 changes: 70 additions & 0 deletions copilot-widget/lib/components/VoiceRecorder.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import { Square, MicIcon } from "lucide-react";
import { Tooltip, TooltipContent, TooltipTrigger } from "./ToolTip";
import { useAxiosInstance } from "@lib/contexts/axiosInstance";
import now from "@lib/utils/timenow";
import { useEffect } from "react";
import useAudioRecorder from "@lib/hooks/useAudioRecord";

export function VoiceRecorder({
onSuccess,
}: {
onSuccess?: (text: string) => void;
}) {
const { axiosInstance } = useAxiosInstance();

const {
startRecording,
stopRecording,
isRecording,
recordingTime,
recordingBlob,
} = useAudioRecorder({
noiseSuppression: true,
echoCancellation: true,
});
useEffect(() => {
async function transcribe() {
if (recordingBlob && !isRecording) {
const { data } = await axiosInstance.postForm<{ text: string }>(
"/chat/transcribe",
{
file: new File([recordingBlob], now() + ".mp3", {
type: "audio/mp3",
}),
}
);
if (data) {
onSuccess && onSuccess(data.text);
}
}
}
transcribe();
}, [recordingBlob]);
async function handleClick() {
if (isRecording) {
stopRecording();
} else {
startRecording();
}
}

return (
<Tooltip open={isRecording}>
<TooltipContent sideOffset={5} side="top">
Recording {recordingTime}s
</TooltipContent>
<TooltipTrigger asChild>
<button
onClick={handleClick}
className="opencopilot-flex opencopilot-items-center opencopilot-justify-center opencopilot-shrink-0 opencopilot-bg-emerald-500 opencopilot-rounded-full opencopilot-size-6 [&>svg]:opencopilot-size-4"
>
{isRecording ? (
<Square strokeLinecap="round" className="opencopilot-text-accent" />
) : (
<MicIcon strokeLinecap="round" className="opencopilot-text-white" />
)}
</button>
</TooltipTrigger>
</Tooltip>
);
}
154 changes: 154 additions & 0 deletions copilot-widget/lib/hooks/useAudioRecord.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
// https://github.com/samhirtarif/react-audio-recorder/blob/master/src/hooks/useAudioRecorder.ts
import { useState, useCallback } from "react";

export interface recorderControls {
startRecording: () => void;
stopRecording: () => void;
togglePauseResume: () => void;
recordingBlob?: Blob;
isRecording: boolean;
isPaused: boolean;
recordingTime: number;
mediaRecorder?: MediaRecorder;
}

export type MediaAudioTrackConstraints = Pick<
MediaTrackConstraints,
| "deviceId"
| "groupId"
| "autoGainControl"
| "channelCount"
| "echoCancellation"
| "noiseSuppression"
| "sampleRate"
| "sampleSize"
>;

/**
* @returns Controls for the recording. Details of returned controls are given below
*
* @param `audioTrackConstraints`: Takes a {@link https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackSettings#instance_properties_of_audio_tracks subset} of `MediaTrackConstraints` that apply to the audio track
* @param `onNotAllowedOrFound`: A method that gets called when the getUserMedia promise is rejected. It receives the DOMException as its input.
*
* @details `startRecording`: Calling this method would result in the recording to start. Sets `isRecording` to true
* @details `stopRecording`: This results in a recording in progress being stopped and the resulting audio being present in `recordingBlob`. Sets `isRecording` to false
* @details `togglePauseResume`: Calling this method would pause the recording if it is currently running or resume if it is paused. Toggles the value `isPaused`
* @details `recordingBlob`: This is the recording blob that is created after `stopRecording` has been called
* @details `isRecording`: A boolean value that represents whether a recording is currently in progress
* @details `isPaused`: A boolean value that represents whether a recording in progress is paused
* @details `recordingTime`: Number of seconds that the recording has gone on. This is updated every second
* @details `mediaRecorder`: The current mediaRecorder in use
*/
const useAudioRecorder: (
audioTrackConstraints?: MediaAudioTrackConstraints,
onNotAllowedOrFound?: (exception: DOMException) => any,
mediaRecorderOptions?: MediaRecorderOptions
) => recorderControls = (
audioTrackConstraints,
onNotAllowedOrFound,
mediaRecorderOptions
) => {
const [isRecording, setIsRecording] = useState(false);
const [isPaused, setIsPaused] = useState(false);
const [recordingTime, setRecordingTime] = useState(0);
const [mediaRecorder, setMediaRecorder] = useState<MediaRecorder>();
const [timerInterval, setTimerInterval] = useState<NodeJS.Timer>();
const [recordingBlob, setRecordingBlob] = useState<Blob>();

const _startTimer: () => void = useCallback(() => {
const interval = setInterval(() => {
setRecordingTime((time) => time + 1);
}, 1000);
setTimerInterval(interval);
}, [setRecordingTime, setTimerInterval]);

const _stopTimer: () => void = useCallback(() => {
// @ts-ignore
timerInterval != null && clearInterval(timerInterval);
setTimerInterval(undefined);
}, [timerInterval, setTimerInterval]);

/**
* Calling this method would result in the recording to start. Sets `isRecording` to true
*/
const startRecording: () => void = useCallback(() => {
if (timerInterval != null) return;

navigator.mediaDevices
.getUserMedia({ audio: audioTrackConstraints ?? true })
.then((stream) => {
setIsRecording(true);
const recorder: MediaRecorder = new MediaRecorder(
stream,
mediaRecorderOptions
);
setMediaRecorder(recorder);
recorder.start();
_startTimer();

recorder.addEventListener("dataavailable", (event) => {
setRecordingBlob(event.data);
recorder.stream.getTracks().forEach((t) => t.stop());
setMediaRecorder(undefined);
});
})
.catch((err: DOMException) => {
onNotAllowedOrFound?.(err);
});
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
timerInterval,
setIsRecording,
setMediaRecorder,
_startTimer,
setRecordingBlob,
onNotAllowedOrFound,
mediaRecorderOptions,
]);

/**
* Calling this method results in a recording in progress being stopped and the resulting audio being present in `recordingBlob`. Sets `isRecording` to false
*/
const stopRecording: () => void = useCallback(() => {
mediaRecorder?.stop();
_stopTimer();
setRecordingTime(0);
setIsRecording(false);
setIsPaused(false);
}, [
mediaRecorder,
setRecordingTime,
setIsRecording,
setIsPaused,
_stopTimer,
]);

/**
* Calling this method would pause the recording if it is currently running or resume if it is paused. Toggles the value `isPaused`
*/
const togglePauseResume: () => void = useCallback(() => {
if (isPaused) {
setIsPaused(false);
mediaRecorder?.resume();
_startTimer();
} else {
setIsPaused(true);
_stopTimer();
mediaRecorder?.pause();
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [mediaRecorder, setIsPaused, _startTimer, _stopTimer]);

return {
startRecording,
stopRecording,
togglePauseResume,
recordingBlob,
isRecording,
isPaused,
recordingTime,
mediaRecorder,
};
};

export default useAudioRecorder;
Loading

0 comments on commit 1ac0b1f

Please sign in to comment.