Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 126 additions & 0 deletions src/api/providers/__tests__/openrouter.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,15 @@ vitest.mock("../fetchers/modelCache", () => ({
excludedTools: ["existing_excluded"],
includedTools: ["existing_included"],
},
"mistralai/devstral-2512": {
maxTokens: 16384,
contextWindow: 128000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.1,
outputPrice: 0.3,
description: "Devstral 2512",
},
})
}),
}))
Expand Down Expand Up @@ -527,6 +536,123 @@ describe("OpenRouterHandler", () => {
})
})

describe("Mistral/Devstral message formatting", () => {
it("merges tool result text into tool messages for devstral models to avoid user-after-tool error", async () => {
const handler = new OpenRouterHandler({
...mockOptions,
openRouterModelId: "mistralai/devstral-2512",
})

const mockStream = {
async *[Symbol.asyncIterator]() {
yield {
id: "test-id",
choices: [{ delta: { content: "test response" } }],
}
yield {
id: "test-id",
choices: [{ delta: {} }],
usage: { prompt_tokens: 10, completion_tokens: 20 },
}
},
}

const mockCreate = vitest.fn().mockResolvedValue(mockStream)
;(OpenAI as any).prototype.chat = {
completions: { create: mockCreate },
} as any

// Simulate messages with tool results followed by text content in same user message
// This is the pattern that causes "Unexpected role 'user' after role 'tool'" errors
const messages: Anthropic.Messages.MessageParam[] = [
{ role: "user", content: "Use the read_file tool" },
{
role: "assistant",
content: [
{
type: "tool_use",
id: "tool_call_1",
name: "read_file",
input: { path: "test.ts" },
},
],
},
{
role: "user",
content: [
{
type: "tool_result",
tool_use_id: "tool_call_1",
content: "file contents here",
},
{
type: "text",
text: "Now analyze this file",
},
],
},
]

const generator = handler.createMessage("test system", messages)
for await (const _chunk of generator) {
// consume the stream
}

// Verify the messages sent to OpenAI API
const callArgs = mockCreate.mock.calls[0][0]
const apiMessages = callArgs.messages

// After tool messages, the text should NOT appear as a separate user message.
// Instead it should be merged into the last tool message content.
// Check that no user message directly follows a tool message
for (let i = 1; i < apiMessages.length; i++) {
if (apiMessages[i].role === "user" && apiMessages[i - 1].role === "tool") {
throw new Error(
"Found user message directly after tool message - mergeToolResultText is not working",
)
}
}
})

it("detects models with 'mistral' in the model ID (e.g. mistralai/mistral-large)", async () => {
const handler = new OpenRouterHandler({
...mockOptions,
openRouterModelId: "mistralai/devstral-2512",
})

const mockStream = {
async *[Symbol.asyncIterator]() {
yield {
id: "test-id",
choices: [{ delta: { content: "ok" } }],
}
yield {
id: "test-id",
choices: [{ delta: {} }],
usage: { prompt_tokens: 5, completion_tokens: 5 },
}
},
}

const mockCreate = vitest.fn().mockResolvedValue(mockStream)
;(OpenAI as any).prototype.chat = {
completions: { create: mockCreate },
} as any

// Simple message without tool results - just verify it works
const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Hello" }]

const generator = handler.createMessage("test", messages)
for await (const _chunk of generator) {
// consume
}

expect(mockCreate).toHaveBeenCalled()
const callArgs = mockCreate.mock.calls[0][0]
expect(callArgs.model).toBe("mistralai/devstral-2512")
})
})

describe("completePrompt", () => {
it("returns correct response", async () => {
const handler = new OpenRouterHandler(mockOptions)
Expand Down
6 changes: 4 additions & 2 deletions src/api/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -233,12 +233,14 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH

// Convert Anthropic messages to OpenAI format.
// Pass normalization function for Mistral compatibility (requires 9-char alphanumeric IDs)
const isMistral = modelId.toLowerCase().includes("mistral")
// Also detect "devstral" models (e.g. mistralai/devstral-2512) which have the same requirements
const lowerModelId = modelId.toLowerCase()
const isMistral = lowerModelId.includes("mistral") || lowerModelId.includes("devstral")
let openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
{ role: "system", content: systemPrompt },
...convertToOpenAiMessages(
messages,
isMistral ? { normalizeToolCallId: normalizeMistralToolCallId } : undefined,
isMistral ? { normalizeToolCallId: normalizeMistralToolCallId, mergeToolResultText: true } : undefined,
),
]

Expand Down
Loading