Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions src/api/providers/__tests__/minimax.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,34 @@ describe("MiniMaxHandler", () => {
undefined,
)
})

it("should handle streaming chunks with null choices array", async () => {
const testContent = "Content after null choices"

mockCreate.mockImplementationOnce(() => {
return {
[Symbol.asyncIterator]: () => ({
next: vitest
.fn()
.mockResolvedValueOnce({
done: false,
value: { choices: null },
})
.mockResolvedValueOnce({
done: false,
value: { choices: [{ delta: { content: testContent } }] },
})
.mockResolvedValueOnce({ done: true }),
}),
}
})

const stream = handler.createMessage("system prompt", [])
const firstChunk = await stream.next()

expect(firstChunk.done).toBe(false)
expect(firstChunk.value).toEqual({ type: "text", text: testContent })
})
})

describe("Model Configuration", () => {
Expand Down
20 changes: 18 additions & 2 deletions src/api/providers/base-openai-compatible-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,15 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
)

for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta
// Check for provider-specific error responses (e.g., MiniMax base_resp)
const chunkAny = chunk as any
if (chunkAny.base_resp?.status_code && chunkAny.base_resp.status_code !== 0) {
throw new Error(
`${this.providerName} API Error (${chunkAny.base_resp.status_code}): ${chunkAny.base_resp.status_msg || "Unknown error"}`,
)
}

const delta = chunk.choices?.[0]?.delta

if (delta?.content) {
for (const processedChunk of matcher.update(delta.content)) {
Expand Down Expand Up @@ -155,7 +163,15 @@ export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
messages: [{ role: "user", content: prompt }],
})

return response.choices[0]?.message.content || ""
// Check for provider-specific error responses (e.g., MiniMax base_resp)
const responseAny = response as any
if (responseAny.base_resp?.status_code && responseAny.base_resp.status_code !== 0) {
throw new Error(
`${this.providerName} API Error (${responseAny.base_resp.status_code}): ${responseAny.base_resp.status_msg || "Unknown error"}`,
)
}

return response.choices?.[0]?.message.content || ""
} catch (error) {
throw handleOpenAIError(error, this.providerName)
}
Expand Down
10 changes: 5 additions & 5 deletions src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
let lastUsage

for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta ?? {}
const delta = chunk.choices?.[0]?.delta ?? {}

if (delta.content) {
for (const chunk of matcher.update(delta.content)) {
Expand Down Expand Up @@ -242,7 +242,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl

yield {
type: "text",
text: response.choices[0]?.message.content || "",
text: response.choices?.[0]?.message.content || "",
}

yield this.processUsageMetrics(response.usage, modelInfo)
Expand Down Expand Up @@ -290,7 +290,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
throw handleOpenAIError(error, this.providerName)
}

return response.choices[0]?.message.content || ""
return response.choices?.[0]?.message.content || ""
} catch (error) {
if (error instanceof Error) {
throw new Error(`${this.providerName} completion error: ${error.message}`)
Expand Down Expand Up @@ -373,15 +373,15 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl

yield {
type: "text",
text: response.choices[0]?.message.content || "",
text: response.choices?.[0]?.message.content || "",
}
yield this.processUsageMetrics(response.usage)
}
}

private async *handleStreamResponse(stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>): ApiStream {
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta
const delta = chunk.choices?.[0]?.delta
if (delta?.content) {
yield {
type: "text",
Expand Down