diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts
index f550fdb..24c1b16 100644
--- a/app/api/chat/route.ts
+++ b/app/api/chat/route.ts
@@ -1,17 +1,43 @@
-import { NextRequest, NextResponse } from 'next/server';
-
-// Remove the Edge runtime specification
-// export const runtime = 'edge';
+import { NextRequest } from 'next/server';
export async function POST(req: NextRequest) {
- try {
- const { message } = await req.json();
+ const { message, model } = await req.json();
- // Process the chat message here
- // For now, let's just echo the message back
+ const response = await fetch('http://localhost:11434/api/chat', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ model,
+ messages: [{ role: 'user', content: message }],
+ stream: true,
+ }),
+ });
- return NextResponse.json({ reply: `You said: ${message}` });
- } catch (error) {
- return NextResponse.json({ error: 'Internal server error' }, { status: 500 });
+ if (!response.ok) {
+ throw new Error('Failed to fetch from Ollama');
}
+
+ const stream = new ReadableStream({
+ async start(controller) {
+ const reader = response.body?.getReader();
+ while (true) {
+ const { done, value } = await reader!.read();
+ if (done) break;
+ const chunk = new TextDecoder().decode(value);
+ try {
+ const parsed = JSON.parse(chunk);
+ if (parsed.message?.content) {
+ controller.enqueue(parsed.message.content);
+ }
+ } catch (e) {
+ console.error('Error parsing JSON:', e);
+ }
+ }
+ controller.close();
+ },
+ });
+
+ return new Response(stream, {
+ headers: { 'Content-Type': 'text/plain' },
+ });
}
diff --git a/app/api/tags/route.ts b/app/api/tags/route.ts
new file mode 100644
index 0000000..50fbfb4
--- /dev/null
+++ b/app/api/tags/route.ts
@@ -0,0 +1,16 @@
+import { NextRequest, NextResponse } from 'next/server';
+
+export async function GET(req: NextRequest) {
+ try {
+ const response = await fetch('http://localhost:11434/api/tags');
+ if (!response.ok) {
+ throw new Error('Failed to fetch models from Ollama');
+ }
+ const data = await response.json();
+ const models = data.models.map((model: { name: string }) => model.name);
+ return NextResponse.json({ models });
+ } catch (error) {
+ console.error('Error fetching models:', error);
+ return NextResponse.json({ error: 'Failed to fetch models' }, { status: 500 });
+ }
+}
\ No newline at end of file
diff --git a/app/layout.tsx b/app/layout.tsx
index 7789487..fb0aad4 100644
--- a/app/layout.tsx
+++ b/app/layout.tsx
@@ -1,7 +1,9 @@
'use client';
-import React, { ReactNode } from 'react';
+import React from 'react';
import { ThemeProvider } from 'next-themes';
+import '@/styles/globals.css';
+import Layout from '@/components/Layout';
export default function RootLayout({
children,
@@ -12,7 +14,7 @@ export default function RootLayout({
- {children}
+ {children}
diff --git a/app/settings/page.tsx b/app/settings/page.tsx
index cca368f..696dd26 100644
--- a/app/settings/page.tsx
+++ b/app/settings/page.tsx
@@ -7,38 +7,24 @@ import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@
import { Switch } from "@/components/ui/switch"
import { Button } from "@/components/ui/button"
import { toast } from "react-hot-toast"
+import { Settings, defaultSettings } from '@/lib/settings'
export default function SettingsPage() {
- const [settings, setSettings] = useState({
- theme: 'light',
- language: 'en',
- notifications_enabled: true,
- model: ''
- })
+ const [settings, setSettings] = useState(defaultSettings)
const [availableModels, setAvailableModels] = useState([])
- //DO NOT REMOVE curl http://localhost:11434/api/tags
- //https://github.com/ollama/ollama/blob/main/docs/api.md#list-local-models
useEffect(() => {
- // Fetch available models from Ollama API
const fetchModels = async () => {
try {
- const response = await fetch('http://localhost:11434/api/tags') // Replace with actual API endpoint
+ const response = await fetch('/api/tags')
const data = await response.json()
- setAvailableModels(data.models) // Adjust based on actual API response structure
+ setAvailableModels(data.models)
} catch (error) {
console.error('Error fetching models:', error)
}
}
fetchModels()
-
- // Set default model based on environment
- if (window.location.hostname === 'localhost') {
- setSettings((prev) => ({ ...prev, model: 'llama3.2' }))
- } else {
- setSettings((prev) => ({ ...prev, model: 'gpt-4o-mini' }))
- }
}, [])
const handleSaveGeneral = () => {
@@ -61,100 +47,35 @@ export default function SettingsPage() {
Model Config
+ {/* Existing general settings content */}
+
+
- General Settings
- Manage your account settings and preferences.
+ Model Configuration
+ Select your preferred AI model.
-
-
-
- setSettings({ ...settings, notifications_enabled: checked })}
- />
-
-
-
+
-
-
-
- Model Configuration
- Select your preferred AI model.
-
-
-
-
- {settings.model}
-
-
- {availableModels.map((model) => (
- {model}
- ))}
-
-
-
-
-
-
-
-
- Security Settings
- Manage your password and account security.
-
-
-
-
-
-
-
-
-
- Advanced Settings
- Manage advanced settings for your account.
-
-
-
-
Delete Account
-
- Once you delete your account, there is no going back. Please be certain.
-
-
-
-
-
-
+ {/* Existing security and advanced tabs content */}
)
diff --git a/components/ChatModal.tsx b/components/ChatModal.tsx
index bf5c891..015e533 100644
--- a/components/ChatModal.tsx
+++ b/components/ChatModal.tsx
@@ -1,6 +1,6 @@
'use client';
-import React, { useState } from 'react';
+import React, { useState, useEffect } from 'react';
import { X } from 'lucide-react';
import supabase from '../utils/supabase'; // Adjust the import based on your file structure
@@ -8,70 +8,94 @@ interface ChatModalProps {
onClose: () => void;
}
+interface Message {
+ role: 'user' | 'assistant';
+ content: string;
+}
+
const ChatModal: React.FC = ({ onClose }) => {
- const [messages, setMessages] = useState([]);
+ const [messages, setMessages] = useState([]);
const [inputMessage, setInputMessage] = useState('');
- const [loading, setLoading] = useState(false); // No loading state needed for local messages
+ const [loading, setLoading] = useState(false);
+ const [models, setModels] = useState([]);
+ const [selectedModel, setSelectedModel] = useState('');
- const handleSendMessage = async () => {
- if (inputMessage.trim()) {
- // Send message to Supabase
- const { data, error } = await supabase
- .from('messages') // Replace with your actual table name
- .insert([{ content: inputMessage }]); // Adjust based on your table structure
-
- if (error) {
- console.error('Error sending message to Supabase:', error.message);
- return; // Exit if there's an error
- } else {
- console.log('Message sent to Supabase:', data);
- setMessages((prevMessages) => [...prevMessages, inputMessage]);
- setInputMessage('');
-
- // Fetch response from Ollama
- const response = await fetchOllamaResponse(inputMessage);
- if (response) {
- setMessages((prevMessages) => [...prevMessages, response]);
- }
+ useEffect(() => {
+ fetchModels();
+ }, []);
+
+ const fetchModels = async () => {
+ try {
+ const response = await fetch('/api/tags');
+ if (!response.ok) {
+ throw new Error('Failed to fetch models');
}
- } else {
- console.warn('Input message is empty.'); // Log if input is empty
+ const data = await response.json();
+ setModels(data.models);
+ if (data.models.length > 0) {
+ setSelectedModel(data.models[0]);
+ }
+ } catch (error) {
+ console.error('Error fetching models:', error);
}
};
- // Function to fetch response from Ollama
- const fetchOllamaResponse = async (message: string) => {
- try {
- const res = await fetch('/api/ollama', { // Correct API route
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify({ message }),
- });
-
- if (!res.ok) {
- throw new Error(`Network response was not ok: ${res.statusText}`);
- }
+ const handleSendMessage = async () => {
+ if (inputMessage.trim() && !loading) {
+ setLoading(true);
+ const userMessage: Message = { role: 'user', content: inputMessage };
+ setMessages(prevMessages => [...prevMessages, userMessage]);
+ setInputMessage('');
- const data = await res.json();
- console.log('Ollama response:', data); // Log the response from Ollama
- return data.reply; // Adjust based on the structure of the response
- } catch (error) {
- console.error('Error fetching Ollama response:', error);
- return null;
+ try {
+ const response = await fetch('/api/chat', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ message: inputMessage, model: selectedModel }),
+ });
+
+ if (!response.ok) {
+ throw new Error('Failed to fetch response');
+ }
+
+ const reader = response.body?.getReader();
+ let assistantMessage = '';
+
+ while (true) {
+ const { done, value } = await reader!.read();
+ if (done) break;
+ const chunk = new TextDecoder().decode(value);
+ assistantMessage += chunk;
+ setMessages(prevMessages => [
+ ...prevMessages.slice(0, -1),
+ { role: 'assistant', content: assistantMessage }
+ ]);
+ }
+
+ // Save messages to Supabase
+ await supabase.from('messages').insert([
+ { content: inputMessage, role: 'user' },
+ { content: assistantMessage, role: 'assistant' }
+ ]);
+
+ } catch (error) {
+ console.error('Error:', error);
+ } finally {
+ setLoading(false);
+ }
}
};
const handleKeyDown = (e: React.KeyboardEvent) => {
- if (e.key === 'Enter') {
+ if (e.key === 'Enter' && !e.shiftKey) {
+ e.preventDefault();
handleSendMessage();
}
};
return (
-
+
Chat
{messages.map((msg, index) => (
-
-
{msg}
+
+
+ {msg.content}
+
))}
+ {loading && (
+
+ )}
-
diff --git a/components/Layout.tsx b/components/Layout.tsx
index 77c43dd..8af4b75 100644
--- a/components/Layout.tsx
+++ b/components/Layout.tsx
@@ -1,20 +1,27 @@
+'use client';
+
import React from 'react';
-import Nav from './Nav';
-import RobotTransformerWallpaper from '@/components/RobotTransformerWallpaper'; // Ensure this path is correct
+import TopBar from './TopBar';
+import { usePathname } from 'next/navigation';
+
+interface LayoutProps {
+ children: React.ReactNode;
+}
+
+const Layout: React.FC
= ({ children }) => {
+ const pathname = usePathname();
+
+ // Add routes where you don't want the TopBar to appear
+ const routesWithoutTopBar = ['/login', '/register', '/landing'];
+
+ const showTopBar = !routesWithoutTopBar.includes(pathname);
-const Layout: React.FC<{ children: React.ReactNode }> = ({ children }) => {
- return (
-
-
{/* Add the wallpaper component */}
-
-
- {children}
-
-
-
- );
+ return (
+ <>
+ {showTopBar && }
+ {children}
+ >
+ );
};
export default Layout;
\ No newline at end of file
diff --git a/lib/settings.ts b/lib/settings.ts
index 291f177..20d7a25 100644
--- a/lib/settings.ts
+++ b/lib/settings.ts
@@ -3,4 +3,13 @@ export interface Settings {
theme: 'light' | 'dark';
notifications: boolean;
language: string;
+ model: string; // Added model field
}
+
+export const defaultSettings: Settings = {
+ userId: '',
+ theme: 'light',
+ notifications: true,
+ language: 'en',
+ model: 'llama2', // Default Ollama model
+};
diff --git a/next.config.js b/next.config.js
index 714da3e..652bfaf 100644
--- a/next.config.js
+++ b/next.config.js
@@ -1,9 +1,7 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
- experimental: {
- appDir: true,
- },
+ // Add any other necessary configurations
}
module.exports = nextConfig
\ No newline at end of file
diff --git a/tailwind.config.js b/tailwind.config.js
index 68ca458..5aee9f4 100644
--- a/tailwind.config.js
+++ b/tailwind.config.js
@@ -2,9 +2,9 @@
module.exports = {
darkMode: 'class', // Enable dark mode
content: [
- './app/**/*.{js,ts,jsx,tsx}', // Adjust paths as necessary
- './components/**/*.{js,ts,jsx,tsx}',
- './pages/**/*.{js,ts,jsx,tsx}',
+ './pages/**/*.{js,ts,jsx,tsx,mdx}',
+ './components/**/*.{js,ts,jsx,tsx,mdx}',
+ './app/**/*.{js,ts,jsx,tsx,mdx}',
],
theme: {
extend: {},