Skip to content

Commit

Permalink
Merge pull request #133 from supabase-community/feat/telemetry
Browse files Browse the repository at this point in the history
Telemetry
  • Loading branch information
gregnr authored Nov 13, 2024
2 parents 6da1e9b + 5784722 commit 4900c04
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 11 deletions.
4 changes: 4 additions & 0 deletions apps/postgres-new/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,7 @@ KV_REST_API_TOKEN="local_token"
NEXT_PUBLIC_LEGACY_DOMAIN=https://postgres.new
NEXT_PUBLIC_CURRENT_DOMAIN=https://database.build
REDIRECT_LEGACY_DOMAIN=false

# Optional
#LOGFLARE_SOURCE="<logflare-source>"
#LOGFLARE_API_KEY="<logflare-api-key>"
57 changes: 46 additions & 11 deletions apps/postgres-new/app/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { ToolInvocation, convertToCoreMessages, streamText } from 'ai'
import { codeBlock } from 'common-tags'
import { convertToCoreTools, maxMessageContext, maxRowLimit, tools } from '~/lib/tools'
import { createClient } from '~/utils/supabase/server'
import { logEvent } from '~/utils/telemetry'

// Allow streaming responses up to 30 seconds
export const maxDuration = 30
Expand Down Expand Up @@ -46,20 +47,31 @@ export async function POST(req: Request) {
return new Response('Unauthorized', { status: 401 })
}

const { user } = data
const {
user: { id: userId },
} = data

const { remaining: inputRemaining } = await inputTokenRateLimit.getRemaining(user.id)
const { remaining: outputRemaining } = await outputTokenRateLimit.getRemaining(user.id)
const { remaining: inputTokensRemaining } = await inputTokenRateLimit.getRemaining(userId)
const { remaining: outputTokensRemaining } = await outputTokenRateLimit.getRemaining(userId)

if (inputRemaining <= 0 || outputRemaining <= 0) {
const { messages, databaseId }: { messages: Message[]; databaseId: string } = await req.json()

if (inputTokensRemaining <= 0 || outputTokensRemaining <= 0) {
logEvent('chat-rate-limit', {
databaseId,
userId,
inputTokensRemaining,
outputTokensRemaining,
})
return new Response('Rate limited', { status: 429 })
}

const { messages }: { messages: Message[] } = await req.json()

// Trim the message context sent to the LLM to mitigate token abuse
const trimmedMessageContext = messages.slice(-maxMessageContext)

const coreMessages = convertToCoreMessages(trimmedMessageContext)
const coreTools = convertToCoreTools(tools)

const result = await streamText({
system: codeBlock`
You are a helpful database assistant. Under the hood you have access to an in-browser Postgres database called PGlite (https://github.com/electric-sql/pglite).
Expand Down Expand Up @@ -104,15 +116,38 @@ export async function POST(req: Request) {
Feel free to suggest corrections for suspected typos.
`,
model: openai(chatModel),
messages: convertToCoreMessages(trimmedMessageContext),
tools: convertToCoreTools(tools),
async onFinish({ usage }) {
await inputTokenRateLimit.limit(user.id, {
messages: coreMessages,
tools: coreTools,
async onFinish({ usage, finishReason, toolCalls }) {
await inputTokenRateLimit.limit(userId, {
rate: usage.promptTokens,
})
await outputTokenRateLimit.limit(user.id, {
await outputTokenRateLimit.limit(userId, {
rate: usage.completionTokens,
})

// The last message should always be an input message (user message or tool result)
const inputMessage = coreMessages.at(-1)
if (!inputMessage || (inputMessage.role !== 'user' && inputMessage.role !== 'tool')) {
return
}

// `tool` role indicates a tool result, `user` role indicates a user message
const inputType = inputMessage.role === 'tool' ? 'tool-result' : 'user-message'

// +1 for the assistant message just received
const messageCount = coreMessages.length + 1

logEvent('chat-inference', {
databaseId,
userId,
messageCount,
inputType,
inputTokens: usage.promptTokens,
outputTokens: usage.completionTokens,
finishReason,
toolCalls: toolCalls?.map((toolCall) => toolCall.toolName),
})
},
})

Expand Down
3 changes: 3 additions & 0 deletions apps/postgres-new/components/workspace.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ export default function Workspace({
maxToolRoundtrips: 10,
keepLastMessageOnError: true,
onToolCall: onToolCall as any, // our `OnToolCall` type is more specific than `ai` SDK's
body: {
databaseId,
},
initialMessages:
existingMessages && existingMessages.length > 0 ? existingMessages : initialMessages,
async onFinish(message) {
Expand Down
70 changes: 70 additions & 0 deletions apps/postgres-new/utils/telemetry.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/**
* Event for an AI chat rate limit. Includes the
* remaining input and output tokens in the rate
* limit window (one of these will be <= 0).
*/
export type ChatRateLimitEvent = {
type: 'chat-rate-limit'
metadata: {
databaseId: string
userId: string
inputTokensRemaining: number
outputTokensRemaining: number
}
}

/**
* Event for an AI chat inference request-response.
* Includes both input and output metadata.
*/
export type ChatInferenceEvent = {
type: 'chat-inference'
metadata: {
databaseId: string
userId: string
messageCount: number
inputType: 'user-message' | 'tool-result'
inputTokens: number
outputTokens: number
finishReason:
| 'stop'
| 'length'
| 'content-filter'
| 'tool-calls'
| 'error'
| 'other'
| 'unknown'
toolCalls?: string[]
}
}

export type TelemetryEvent = ChatRateLimitEvent | ChatInferenceEvent

export async function logEvent<E extends TelemetryEvent>(type: E['type'], metadata: E['metadata']) {
if (!process.env.LOGFLARE_SOURCE || !process.env.LOGFLARE_API_KEY) {
if (process.env.DEBUG) {
console.log(type, metadata)
}
return
}

const response = await fetch(
`https://api.logflare.app/logs?source=${process.env.LOGFLARE_SOURCE}`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-API-KEY': process.env.LOGFLARE_API_KEY,
},
body: JSON.stringify({
event_message: type,
metadata,
}),
}
)

if (!response.ok) {
const { error } = await response.json()
console.error('failed to send logflare event', error)
}
}

0 comments on commit 4900c04

Please sign in to comment.