Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions apps/web/src/components/app-sidebar.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { useCallback, useEffect, useRef, useState } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useNavigate, useParams } from "@tanstack/react-router";
import { useQuery } from "convex/react";
import { api } from "@server/convex/_generated/api";
Expand Down Expand Up @@ -56,13 +56,12 @@ function ChatItemSkeleton({ delay = 0 }: { delay?: number }) {
);
}

function groupChatsByTime(chats: Array<ChatItem>) {
function groupChatsByTime(chats: Array<ChatItem>, now: number) {
const today: Array<ChatItem> = [];
const last7Days: Array<ChatItem> = [];
const last30Days: Array<ChatItem> = [];
const older: Array<ChatItem> = [];

const now = Date.now();
const oneDayMs = 1000 * 60 * 60 * 24;

for (const chat of chats) {
Expand Down Expand Up @@ -314,8 +313,12 @@ export function AppSidebar() {
? convexUser === undefined || chatsResult === undefined
: false;

const grouped = groupChatsByTime(chats);
const deleteChat = deleteChatId ? chats.find((chat) => chat._id === deleteChatId) : null;
const dayKey = new Date().toDateString();
const grouped = useMemo(() => groupChatsByTime(chats, Date.now()), [chats, dayKey]);
const deleteChat = useMemo(
() => (deleteChatId ? chats.find((chat) => chat._id === deleteChatId) : null),
[deleteChatId, chats],
);

const handleNewChat = () => {
if (isMobile) {
Expand Down
276 changes: 149 additions & 127 deletions apps/web/src/components/chat-interface.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
* - Convex persistence for chat history
*/

import { useCallback, useEffect, useRef, useState } from "react";
import { memo, useCallback, useEffect, useMemo, useRef, useState } from "react";
import { createPortal } from "react-dom";
import { useNavigate } from "@tanstack/react-router";
import { ArrowUpIcon, BrainIcon, ChevronDownIcon, GlobeIcon,
Expand Down Expand Up @@ -1261,6 +1261,145 @@ export function ChatInterface({ chatId }: ChatInterfaceProps) {
);
}

interface ChatMessageListProps {
messages: Array<{
id: string;
role: string;
parts?: Array<UIMessagePart<UIDataTypes, UITools>>;
}>;
isLoading: boolean;
isNewChat: boolean;
onPromptSelect: (prompt: string) => void;
}

const ChatMessageList = memo(function ChatMessageList({
messages,
isLoading,
isNewChat,
onPromptSelect,
}: ChatMessageListProps) {
const processedMessages = useMemo(() => {
if (messages.length === 0) return [];
const streamingId = isLoading ? messages[messages.length - 1]?.id : null;

return messages.map((message) => {
const msg = message as typeof message & {
error?: {
code: string;
message: string;
details?: string;
provider?: string;
retryable?: boolean;
};
messageType?: "text" | "error" | "system";
};

const allParts = message.parts || [];
const textParts = allParts.filter((p): p is { type: "text"; text: string } => p.type === "text");
const fileParts = allParts.filter((p): p is { type: "file"; filename?: string; url?: string; mediaType?: string } => p.type === "file");

const {
steps: thinkingSteps,
isAnyStreaming: isAnyStepStreaming,
hasTextContent,
} = buildChainOfThoughtSteps(allParts);

const textContent = textParts.map((p) => p.text).join("").trim();
const hasReasoning = allParts.some((p) => p.type === "reasoning");
const hasFiles = fileParts.length > 0;
const isCurrentlyStreaming = streamingId === message.id;

const shouldSkip =
msg.messageType !== "error" &&
message.role === "assistant" &&
!textContent &&
!hasReasoning &&
!hasFiles &&
!isCurrentlyStreaming;

return {
message,
msg,
textParts,
fileParts,
thinkingSteps,
isAnyStepStreaming,
hasTextContent,
isCurrentlyStreaming,
shouldSkip,
};
});
}, [messages, isLoading]);

return (
<Conversation className="flex-1 px-2 md:px-4" showScrollButton>
<AutoScroll messageCount={messages.length} />
{/* Mobile: extra top padding to clear hamburger menu (fixed left-3 top-3 size-11 = 12px + 44px + 8px breathing room = 64px) */}
<ConversationContent className="mx-auto max-w-3xl pt-16 md:pt-6 pb-16 px-2 md:px-4">
{messages.length === 0 && isNewChat ? (
<StartScreen onPromptSelect={onPromptSelect} />
) : messages.length === 0 ? null : (
<>
{processedMessages.map((item) => {
if (item.shouldSkip) return null;

if (item.msg.messageType === "error" && item.msg.error) {
return (
<div key={item.message.id}>
<Message from={item.message.role as "user" | "assistant"}>
<MessageContent>
<InlineErrorMessage error={item.msg.error} />
</MessageContent>
</Message>
</div>
);
}

return (
<div key={item.message.id}>
<Message from={item.message.role as "user" | "assistant"}>
<MessageContent>
{item.thinkingSteps.length > 0 && (
<ChainOfThought
steps={item.thinkingSteps}
isStreaming={item.isAnyStepStreaming}
hasTextContent={item.hasTextContent || item.textParts.length > 0}
/>
)}

{item.textParts.map((part, partIndex) => (
<MessageResponse
key={`text-${partIndex}`}
isStreaming={item.isCurrentlyStreaming && partIndex === item.textParts.length - 1}
>
{part.text || ""}
</MessageResponse>
))}

{item.fileParts.map((part, partIndex) => (
<MessageFile
key={`file-${partIndex}`}
filename={part.filename}
url={part.url}
mediaType={part.mediaType}
/>
))}
</MessageContent>
</Message>
</div>
);
})}
{isLoading && messages[messages.length - 1]?.role === "user" && (
<LoadingIndicator />
)}
{/* Note: Errors are now shown inline as messages via InlineErrorMessage */}
</>
)}
</ConversationContent>
</Conversation>
);
});

// Inner content component that has access to PromptInputProvider context
interface ChatInterfaceContentProps {
chatId: string | null;
Expand Down Expand Up @@ -1324,15 +1463,16 @@ function ChatInterfaceContent({
}, [textareaRef]);

// Handler for StartScreen prompt selection - populates input and focuses
const setInput = controller.textInput.setInput;
const onPromptSelect = useCallback(
(prompt: string) => {
controller.textInput.setInput(prompt);
setInput(prompt);
// Focus the textarea after setting the value
setTimeout(() => {
textareaRef.current?.focus();
}, 0);
},
[controller.textInput, textareaRef],
[setInput, textareaRef],
);

// Wrap handleSubmit to clear the draft after successful submission
Expand All @@ -1348,130 +1488,12 @@ function ChatInterfaceContent({

return (
<div className="flex h-full flex-col">
{/* Messages area - using AI Elements Conversation */}
<Conversation className="flex-1 px-2 md:px-4" showScrollButton>
<AutoScroll messageCount={messages.length} />
{/* Mobile: extra top padding to clear hamburger menu (fixed left-3 top-3 size-11 = 12px + 44px + 8px breathing room = 64px) */}
<ConversationContent className="mx-auto max-w-3xl pt-16 md:pt-6 pb-16 px-2 md:px-4">
{messages.length === 0 && isNewChat ? (
<StartScreen onPromptSelect={onPromptSelect} />
) : messages.length === 0 ? null : (
<>
{messages.map((message) => {
// Cast to include our custom error fields
const msg = message as typeof message & {
error?: {
code: string;
message: string;
details?: string;
provider?: string;
retryable?: boolean;
};
messageType?: "text" | "error" | "system";
};

// Render error messages with special styling (like T3.chat)
if (msg.messageType === "error" && msg.error) {
return (
<div key={message.id}>
<Message from={message.role as "user" | "assistant"}>
<MessageContent>
<InlineErrorMessage error={msg.error} />
</MessageContent>
</Message>
</div>
);
}

// Skip rendering assistant messages with no meaningful content
// This handles cases where AI SDK creates empty messages on error
const allParts = message.parts || [];
const textContent = allParts
.filter((p): p is { type: "text"; text: string } => p.type === "text")
.map((p) => p.text)
.join("")
.trim();
const hasReasoning = allParts.some((p) => p.type === "reasoning");
const hasFiles = allParts.some((p) => p.type === "file");

// Skip empty assistant messages (no text, reasoning, or files)
// But don't skip during streaming (messages are being built)
const isCurrentlyStreaming =
isLoading && messages[messages.length - 1]?.id === message.id;
if (
message.role === "assistant" &&
!textContent &&
!hasReasoning &&
!hasFiles &&
!isCurrentlyStreaming
) {
return null;
}

// Regular message rendering
// Use buildChainOfThoughtSteps to process parts IN ORDER
// This preserves the exact stream order and merges consecutive reasoning

const textParts = allParts.filter((p) => p.type === "text") as unknown as Array<{
type: "text";
text: string;
}>;
const fileParts = allParts.filter((p) => p.type === "file") as unknown as Array<{
type: "file";
filename?: string;
url?: string;
mediaType?: string;
}>;

// Build thinking steps from reasoning and tool parts
const {
steps: thinkingSteps,
isAnyStreaming: isAnyStepStreaming,
hasTextContent,
} = buildChainOfThoughtSteps(allParts);

return (
<div key={message.id}>
<Message from={message.role as "user" | "assistant"}>
<MessageContent>
{thinkingSteps.length > 0 && (
<ChainOfThought
steps={thinkingSteps}
isStreaming={isAnyStepStreaming}
hasTextContent={hasTextContent || textParts.length > 0}
/>
)}

{textParts.map((part, partIndex) => (
<MessageResponse
key={`text-${partIndex}`}
isStreaming={isCurrentlyStreaming && partIndex === textParts.length - 1}
>
{part.text || ""}
</MessageResponse>
))}

{fileParts.map((part, partIndex) => (
<MessageFile
key={`file-${partIndex}`}
filename={part.filename}
url={part.url}
mediaType={part.mediaType}
/>
))}
</MessageContent>
</Message>
</div>
);
})}
{isLoading && messages[messages.length - 1]?.role === "user" && (
<LoadingIndicator />
)}
{/* Note: Errors are now shown inline as messages via InlineErrorMessage */}
</>
)}
</ConversationContent>
</Conversation>
<ChatMessageList
messages={messages}
isLoading={isLoading}
isNewChat={isNewChat}
onPromptSelect={onPromptSelect}
/>

<div className="px-2 md:px-4 pt-2 md:pt-4 pb-[max(0.5rem,env(safe-area-inset-bottom))] md:pb-4">
<div className="mx-auto max-w-3xl">
Expand Down
25 changes: 24 additions & 1 deletion apps/web/src/providers/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,30 @@ import { ThemeProvider } from "./theme-provider";
import { PostHogProvider } from "./posthog";

if (typeof window !== "undefined") {
prefetchModels();
const schedulePrefetch = () => {
const connection = (navigator as Navigator & {
connection?: { effectiveType?: string; saveData?: boolean };
}).connection;
if (connection?.saveData) return;
if (connection?.effectiveType && /2g/.test(connection.effectiveType)) return;

const run = () => prefetchModels();
const requestIdle = (window as Window & {
requestIdleCallback?: (cb: () => void, options?: { timeout: number }) => number;
}).requestIdleCallback;

if (requestIdle) {
requestIdle(run, { timeout: 2000 });
} else {
setTimeout(run, 1500);
}
};

if (document.readyState === "complete") {
schedulePrefetch();
} else {
window.addEventListener("load", schedulePrefetch, { once: true });
}
}

const queryClient = new QueryClient({
Expand Down
Loading