diff --git a/src/api/providers/__tests__/openrouter.spec.ts b/src/api/providers/__tests__/openrouter.spec.ts index e03abea635..f2361b411a 100644 --- a/src/api/providers/__tests__/openrouter.spec.ts +++ b/src/api/providers/__tests__/openrouter.spec.ts @@ -80,10 +80,41 @@ vitest.mock("../fetchers/modelCache", () => ({ excludedTools: ["existing_excluded"], includedTools: ["existing_included"], }, + "mistralai/mistral-large-latest": { + maxTokens: 8192, + contextWindow: 128000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 2, + outputPrice: 6, + description: "Mistral Large", + }, + "mistralai/devstral-2512": { + maxTokens: 8192, + contextWindow: 128000, + supportsImages: false, + supportsPromptCache: false, + inputPrice: 1, + outputPrice: 3, + description: "Devstral", + }, }) }), })) +const mockConvertToOpenAiMessages = vitest.fn().mockReturnValue([]) + +vitest.mock("../../transform/openai-format", async (importOriginal) => { + const actual = (await importOriginal()) as Record + return { + ...actual, + convertToOpenAiMessages: (...args: unknown[]) => { + mockConvertToOpenAiMessages(...args) + return (actual.convertToOpenAiMessages as (...a: unknown[]) => unknown)(...args) + }, + } +}) + describe("OpenRouterHandler", () => { const mockOptions: ApiHandlerOptions = { openRouterApiKey: "test-key", @@ -527,6 +558,93 @@ describe("OpenRouterHandler", () => { }) }) + describe("Mistral/Devstral model detection", () => { + const createMockStream = () => ({ + async *[Symbol.asyncIterator]() { + yield { + id: "test-id", + choices: [{ delta: { content: "response" } }], + } + yield { + id: "test-id", + choices: [{ delta: {} }], + usage: { prompt_tokens: 10, completion_tokens: 5 }, + } + }, + }) + + const setupMockCreate = () => { + const mockCreate = vitest.fn().mockResolvedValue(createMockStream()) + ;(OpenAI as any).prototype.chat = { + completions: { create: mockCreate }, + } as any + return mockCreate + } + + const messages: Anthropic.Messages.MessageParam[] = [{ role: "user" as const, content: "test" }] + + beforeEach(() => { + mockConvertToOpenAiMessages.mockClear() + }) + + it("passes mergeToolResultText and normalizeToolCallId for Mistral models", async () => { + const handler = new OpenRouterHandler({ + openRouterApiKey: "test-key", + openRouterModelId: "mistralai/mistral-large-latest", + }) + setupMockCreate() + + const generator = handler.createMessage("system", messages) + for await (const _chunk of generator) { + // consume stream + } + + expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith( + messages, + expect.objectContaining({ + normalizeToolCallId: expect.any(Function), + mergeToolResultText: true, + }), + ) + }) + + it("passes mergeToolResultText and normalizeToolCallId for Devstral models", async () => { + const handler = new OpenRouterHandler({ + openRouterApiKey: "test-key", + openRouterModelId: "mistralai/devstral-2512", + }) + setupMockCreate() + + const generator = handler.createMessage("system", messages) + for await (const _chunk of generator) { + // consume stream + } + + expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith( + messages, + expect.objectContaining({ + normalizeToolCallId: expect.any(Function), + mergeToolResultText: true, + }), + ) + }) + + it("does not pass Mistral options for non-Mistral models", async () => { + const handler = new OpenRouterHandler({ + openRouterApiKey: "test-key", + openRouterModelId: "anthropic/claude-sonnet-4", + }) + setupMockCreate() + + const generator = handler.createMessage("system", messages) + for await (const _chunk of generator) { + // consume stream + } + + expect(mockConvertToOpenAiMessages).toHaveBeenCalledWith(messages, undefined) + }) + }) + describe("completePrompt", () => { it("returns correct response", async () => { const handler = new OpenRouterHandler(mockOptions) diff --git a/src/api/providers/openrouter.ts b/src/api/providers/openrouter.ts index 7fcc24b15f..1af59f64f8 100644 --- a/src/api/providers/openrouter.ts +++ b/src/api/providers/openrouter.ts @@ -232,13 +232,17 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH } // Convert Anthropic messages to OpenAI format. - // Pass normalization function for Mistral compatibility (requires 9-char alphanumeric IDs) - const isMistral = modelId.toLowerCase().includes("mistral") + // Pass normalization function for Mistral/Devstral compatibility (requires 9-char alphanumeric IDs) + // Also merge tool result text to avoid "Unexpected role 'user' after role 'tool'" errors + const modelIdLower = modelId.toLowerCase() + const isMistralFamily = modelIdLower.includes("mistral") || modelIdLower.includes("devstral") let openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [ { role: "system", content: systemPrompt }, ...convertToOpenAiMessages( messages, - isMistral ? { normalizeToolCallId: normalizeMistralToolCallId } : undefined, + isMistralFamily + ? { normalizeToolCallId: normalizeMistralToolCallId, mergeToolResultText: true } + : undefined, ), ]