diff --git a/packages/types/src/global-settings.ts b/packages/types/src/global-settings.ts index 91b37f3d6d..93432b0cc0 100644 --- a/packages/types/src/global-settings.ts +++ b/packages/types/src/global-settings.ts @@ -279,6 +279,7 @@ export const SECRET_STATE_KEYS = [ "fireworksApiKey", "vercelAiGatewayApiKey", "basetenApiKey", + "brainiallApiKey", ] as const // Global secrets that are part of GlobalSettings (not ProviderSettings) diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts index 859792d7c3..b8cc94514e 100644 --- a/packages/types/src/provider-settings.ts +++ b/packages/types/src/provider-settings.ts @@ -5,6 +5,7 @@ import { codebaseIndexProviderSchema } from "./codebase-index.js" import { anthropicModels, basetenModels, + brainiallModels, bedrockModels, deepSeekModels, fireworksModels, @@ -105,6 +106,7 @@ export const providerNames = [ "anthropic", "bedrock", "baseten", + "brainiall", "deepseek", "fireworks", "gemini", @@ -377,6 +379,10 @@ const vercelAiGatewaySchema = baseProviderSettingsSchema.extend({ vercelAiGatewayModelId: z.string().optional(), }) +const brainiallSchema = apiModelIdProviderModelSchema.extend({ + brainiallApiKey: z.string().optional(), +}) + const basetenSchema = apiModelIdProviderModelSchema.extend({ basetenApiKey: z.string().optional(), }) @@ -407,6 +413,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv fakeAiSchema.merge(z.object({ apiProvider: z.literal("fake-ai") })), xaiSchema.merge(z.object({ apiProvider: z.literal("xai") })), basetenSchema.merge(z.object({ apiProvider: z.literal("baseten") })), + brainiallSchema.merge(z.object({ apiProvider: z.literal("brainiall") })), litellmSchema.merge(z.object({ apiProvider: z.literal("litellm") })), sambaNovaSchema.merge(z.object({ apiProvider: z.literal("sambanova") })), zaiSchema.merge(z.object({ apiProvider: z.literal("zai") })), @@ -440,6 +447,7 @@ export const providerSettingsSchema = z.object({ ...fakeAiSchema.shape, ...xaiSchema.shape, ...basetenSchema.shape, + ...brainiallSchema.shape, ...litellmSchema.shape, ...sambaNovaSchema.shape, ...zaiSchema.shape, @@ -515,6 +523,7 @@ export const modelIdKeysByProvider: Record = { unbound: "unboundModelId", xai: "apiModelId", baseten: "apiModelId", + brainiall: "apiModelId", litellm: "litellmModelId", sambanova: "apiModelId", zai: "apiModelId", @@ -630,6 +639,7 @@ export const MODELS_BY_PROVIDER: Record< xai: { id: "xai", label: "xAI (Grok)", models: Object.keys(xaiModels) }, zai: { id: "zai", label: "Z.ai", models: Object.keys(internationalZAiModels) }, baseten: { id: "baseten", label: "Baseten", models: Object.keys(basetenModels) }, + brainiall: { id: "brainiall", label: "Brainiall", models: Object.keys(brainiallModels) }, // Dynamic providers; models pulled from remote APIs. litellm: { id: "litellm", label: "LiteLLM", models: [] }, diff --git a/packages/types/src/providers/brainiall.ts b/packages/types/src/providers/brainiall.ts new file mode 100644 index 0000000000..ec22aac1a2 --- /dev/null +++ b/packages/types/src/providers/brainiall.ts @@ -0,0 +1,169 @@ +import type { ModelInfo } from "../model.js" + +// https://brainiall.com +export type BrainiallModelId = keyof typeof brainiallModels + +export const brainiallDefaultModelId: BrainiallModelId = "claude-sonnet-4-6" + +export const brainiallModels = { + "claude-opus-4-6": { + maxTokens: 64_000, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: true, + inputPrice: 5.0, + outputPrice: 25.0, + cacheWritesPrice: 6.25, + cacheReadsPrice: 0.5, + description: "Claude Opus 4.6", + }, + "claude-sonnet-4-6": { + maxTokens: 64_000, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: true, + inputPrice: 3.0, + outputPrice: 15.0, + cacheWritesPrice: 3.75, + cacheReadsPrice: 0.3, + description: "Claude Sonnet 4.6", + }, + "claude-haiku-4-5": { + maxTokens: 8192, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: true, + inputPrice: 1.0, + outputPrice: 5.0, + cacheWritesPrice: 1.25, + cacheReadsPrice: 0.1, + description: "Claude Haiku 4.5", + }, + "claude-opus-4-5": { + maxTokens: 64_000, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: true, + supportsReasoningBudget: true, + inputPrice: 15.0, + outputPrice: 75.0, + description: "Claude Opus 4.5", + }, + "deepseek-r1": { + maxTokens: 64_000, + contextWindow: 128_000, + supportsImages: false, + supportsPromptCache: false, + preserveReasoning: true, + inputPrice: 1.35, + outputPrice: 5.4, + description: "DeepSeek R1", + }, + "deepseek-v3": { + maxTokens: 8192, + contextWindow: 128_000, + supportsPromptCache: false, + inputPrice: 0.27, + outputPrice: 1.1, + description: "DeepSeek V3.2", + }, + "llama-3.3-70b": { + maxTokens: 8192, + contextWindow: 128_000, + supportsPromptCache: false, + inputPrice: 0.72, + outputPrice: 0.72, + description: "Meta Llama 3.3 70B", + }, + "llama-4-scout": { + maxTokens: 8192, + contextWindow: 512_000, + supportsPromptCache: false, + inputPrice: 0.17, + outputPrice: 0.17, + description: "Meta Llama 4 Scout 17B", + }, + "nova-pro": { + maxTokens: 5120, + contextWindow: 300_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 0.8, + outputPrice: 3.2, + description: "Amazon Nova Pro", + }, + "nova-lite": { + maxTokens: 5120, + contextWindow: 300_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 0.06, + outputPrice: 0.24, + description: "Amazon Nova Lite", + }, + "nova-micro": { + maxTokens: 5120, + contextWindow: 300_000, + supportsPromptCache: false, + inputPrice: 0.04, + outputPrice: 0.14, + description: "Amazon Nova Micro", + }, + "mistral-large-3": { + maxTokens: 8192, + contextWindow: 128_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 2.0, + outputPrice: 6.0, + description: "Mistral Large 3 675B", + }, + "devstral-2": { + maxTokens: 8192, + contextWindow: 128_000, + supportsPromptCache: false, + inputPrice: 0.5, + outputPrice: 1.5, + description: "Devstral 2 123B", + }, + "qwen3-80b": { + maxTokens: 8192, + contextWindow: 128_000, + supportsPromptCache: false, + inputPrice: 0.5, + outputPrice: 0.5, + description: "Qwen3 80B", + }, + "qwen3-32b": { + maxTokens: 8192, + contextWindow: 128_000, + supportsPromptCache: false, + inputPrice: 0.35, + outputPrice: 0.35, + description: "Qwen3 32B", + }, + "minimax-m2": { + maxTokens: 16_384, + contextWindow: 1_000_000, + supportsPromptCache: false, + inputPrice: 1.0, + outputPrice: 5.0, + description: "MiniMax M2", + }, + "kimi-k2.5": { + maxTokens: 8192, + contextWindow: 128_000, + supportsPromptCache: false, + inputPrice: 0.6, + outputPrice: 2.4, + description: "Moonshot Kimi K2.5", + }, + "gpt-oss-120b": { + maxTokens: 16_384, + contextWindow: 200_000, + supportsPromptCache: false, + inputPrice: 1.35, + outputPrice: 5.4, + description: "GPT OSS 120B", + }, +} as const satisfies Record diff --git a/packages/types/src/providers/index.ts b/packages/types/src/providers/index.ts index 6bb959c705..73b760c85e 100644 --- a/packages/types/src/providers/index.ts +++ b/packages/types/src/providers/index.ts @@ -1,5 +1,6 @@ export * from "./anthropic.js" export * from "./baseten.js" +export * from "./brainiall.js" export * from "./bedrock.js" export * from "./deepseek.js" export * from "./fireworks.js" @@ -27,6 +28,7 @@ export * from "./minimax.js" import { anthropicDefaultModelId } from "./anthropic.js" import { basetenDefaultModelId } from "./baseten.js" +import { brainiallDefaultModelId } from "./brainiall.js" import { bedrockDefaultModelId } from "./bedrock.js" import { deepSeekDefaultModelId } from "./deepseek.js" import { fireworksDefaultModelId } from "./fireworks.js" @@ -71,6 +73,8 @@ export function getProviderDefaultModelId( return xaiDefaultModelId case "baseten": return basetenDefaultModelId + case "brainiall": + return brainiallDefaultModelId case "bedrock": return bedrockDefaultModelId case "vertex": diff --git a/src/api/index.ts b/src/api/index.ts index ebc2682a1a..9deefaec43 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -33,6 +33,7 @@ import { VercelAiGatewayHandler, MiniMaxHandler, BasetenHandler, + BrainiallHandler, } from "./providers" import { NativeOllamaHandler } from "./providers/native-ollama" @@ -176,6 +177,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler { return new MiniMaxHandler(options) case "baseten": return new BasetenHandler(options) + case "brainiall": + return new BrainiallHandler(options) default: return new AnthropicHandler(options) } diff --git a/src/api/providers/brainiall.ts b/src/api/providers/brainiall.ts new file mode 100644 index 0000000000..ac6853e845 --- /dev/null +++ b/src/api/providers/brainiall.ts @@ -0,0 +1,172 @@ +import { Anthropic } from "@anthropic-ai/sdk" +import OpenAI from "openai" + +import { type BrainiallModelId, brainiallDefaultModelId, brainiallModels, ApiProviderError } from "@roo-code/types" +import { TelemetryService } from "@roo-code/telemetry" + +import { NativeToolCallParser } from "../../core/assistant-message/NativeToolCallParser" +import type { ApiHandlerOptions } from "../../shared/api" + +import { ApiStream } from "../transform/stream" +import { convertToOpenAiMessages } from "../transform/openai-format" +import { getModelParams } from "../transform/model-params" + +import { DEFAULT_HEADERS } from "./constants" +import { BaseProvider } from "./base-provider" +import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index" +import { handleOpenAIError } from "./utils/openai-error-handler" + +const BRAINIALL_DEFAULT_TEMPERATURE = 0 + +export class BrainiallHandler extends BaseProvider implements SingleCompletionHandler { + protected options: ApiHandlerOptions + private client: OpenAI + private readonly providerName = "Brainiall" + + constructor(options: ApiHandlerOptions) { + super() + this.options = options + + const apiKey = this.options.brainiallApiKey ?? "not-provided" + + this.client = new OpenAI({ + baseURL: "https://apim-ai-apis.azure-api.net/v1", + apiKey: apiKey, + defaultHeaders: DEFAULT_HEADERS, + }) + } + + override getModel() { + const id = + this.options.apiModelId && this.options.apiModelId in brainiallModels + ? (this.options.apiModelId as BrainiallModelId) + : brainiallDefaultModelId + + const info = brainiallModels[id] + const params = getModelParams({ + format: "openai", + modelId: id, + model: info, + settings: this.options, + defaultTemperature: BRAINIALL_DEFAULT_TEMPERATURE, + }) + return { id, info, ...params } + } + + override async *createMessage( + systemPrompt: string, + messages: Anthropic.Messages.MessageParam[], + metadata?: ApiHandlerCreateMessageMetadata, + ): ApiStream { + const { id: modelId, info: modelInfo, reasoning } = this.getModel() + + // Use the OpenAI-compatible API. + const requestOptions = { + model: modelId, + max_tokens: modelInfo.maxTokens, + temperature: this.options.modelTemperature ?? BRAINIALL_DEFAULT_TEMPERATURE, + messages: [ + { role: "system", content: systemPrompt }, + ...convertToOpenAiMessages(messages), + ] as OpenAI.Chat.ChatCompletionMessageParam[], + stream: true as const, + stream_options: { include_usage: true }, + ...(reasoning && reasoning), + tools: this.convertToolsForOpenAI(metadata?.tools), + tool_choice: metadata?.tool_choice, + parallel_tool_calls: metadata?.parallelToolCalls ?? true, + } + + let stream + try { + stream = await this.client.chat.completions.create(requestOptions) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + const apiError = new ApiProviderError(errorMessage, this.providerName, modelId, "createMessage") + TelemetryService.instance.captureException(apiError) + throw handleOpenAIError(error, this.providerName) + } + + for await (const chunk of stream) { + const delta = chunk.choices[0]?.delta + const finishReason = chunk.choices[0]?.finish_reason + + if (delta?.content) { + yield { + type: "text", + text: delta.content, + } + } + + if (delta && "reasoning_content" in delta && delta.reasoning_content) { + yield { + type: "reasoning", + text: delta.reasoning_content as string, + } + } + + // Handle tool calls in stream - emit partial chunks for NativeToolCallParser + if (delta?.tool_calls) { + for (const toolCall of delta.tool_calls) { + yield { + type: "tool_call_partial", + index: toolCall.index, + id: toolCall.id, + name: toolCall.function?.name, + arguments: toolCall.function?.arguments, + } + } + } + + // Process finish_reason to emit tool_call_end events + // This ensures tool calls are finalized even if the stream doesn't properly close + if (finishReason) { + const endEvents = NativeToolCallParser.processFinishReason(finishReason) + for (const event of endEvents) { + yield event + } + } + + if (chunk.usage) { + // Extract detailed token information if available + // First check for prompt_tokens_details structure (real API response) + const promptDetails = "prompt_tokens_details" in chunk.usage ? chunk.usage.prompt_tokens_details : null + const cachedTokens = promptDetails && "cached_tokens" in promptDetails ? promptDetails.cached_tokens : 0 + + // Fall back to direct fields in usage (used in test mocks) + const readTokens = + cachedTokens || + ("cache_read_input_tokens" in chunk.usage ? (chunk.usage as any).cache_read_input_tokens : 0) + const writeTokens = + "cache_creation_input_tokens" in chunk.usage ? (chunk.usage as any).cache_creation_input_tokens : 0 + + yield { + type: "usage", + inputTokens: chunk.usage.prompt_tokens || 0, + outputTokens: chunk.usage.completion_tokens || 0, + cacheReadTokens: readTokens, + cacheWriteTokens: writeTokens, + } + } + } + } + + async completePrompt(prompt: string): Promise { + const { id: modelId, reasoning } = this.getModel() + + try { + const response = await this.client.chat.completions.create({ + model: modelId, + messages: [{ role: "user", content: prompt }], + ...(reasoning && reasoning), + }) + + return response.choices[0]?.message.content || "" + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + const apiError = new ApiProviderError(errorMessage, this.providerName, modelId, "completePrompt") + TelemetryService.instance.captureException(apiError) + throw handleOpenAIError(error, this.providerName) + } + } +} diff --git a/src/api/providers/index.ts b/src/api/providers/index.ts index b6de795210..6a801756ac 100644 --- a/src/api/providers/index.ts +++ b/src/api/providers/index.ts @@ -27,3 +27,4 @@ export { RooHandler } from "./roo" export { VercelAiGatewayHandler } from "./vercel-ai-gateway" export { MiniMaxHandler } from "./minimax" export { BasetenHandler } from "./baseten" +export { BrainiallHandler } from "./brainiall" diff --git a/src/shared/ProfileValidator.ts b/src/shared/ProfileValidator.ts index 7246a90177..f8e8e1a168 100644 --- a/src/shared/ProfileValidator.ts +++ b/src/shared/ProfileValidator.ts @@ -61,6 +61,7 @@ export class ProfileValidator { case "mistral": case "deepseek": case "xai": + case "brainiall": case "sambanova": case "fireworks": return profile.apiModelId diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 4d914a4833..cbcdf63b62 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -22,6 +22,7 @@ import { mistralDefaultModelId, xaiDefaultModelId, basetenDefaultModelId, + brainiallDefaultModelId, bedrockDefaultModelId, vertexDefaultModelId, sambaNovaDefaultModelId, @@ -68,6 +69,7 @@ import { import { Anthropic, Baseten, + Brainiall, Bedrock, DeepSeek, Gemini, @@ -345,6 +347,7 @@ const ApiOptions = ({ mistral: { field: "apiModelId", default: mistralDefaultModelId }, xai: { field: "apiModelId", default: xaiDefaultModelId }, baseten: { field: "apiModelId", default: basetenDefaultModelId }, + brainiall: { field: "apiModelId", default: brainiallDefaultModelId }, bedrock: { field: "apiModelId", default: bedrockDefaultModelId }, vertex: { field: "apiModelId", default: vertexDefaultModelId }, sambanova: { field: "apiModelId", default: sambaNovaDefaultModelId }, @@ -575,6 +578,13 @@ const ApiOptions = ({ /> )} + {selectedProvider === "brainiall" && ( + + )} + {selectedProvider === "bedrock" && ( >> = { anthropic: anthropicModels, bedrock: bedrockModels, + brainiall: brainiallModels, deepseek: deepSeekModels, moonshot: moonshotModels, gemini: geminiModels, @@ -64,5 +66,6 @@ export const PROVIDERS = [ { value: "vercel-ai-gateway", label: "Vercel AI Gateway", proxy: false }, { value: "minimax", label: "MiniMax", proxy: false }, { value: "baseten", label: "Baseten", proxy: false }, + { value: "brainiall", label: "Brainiall", proxy: false }, { value: "unbound", label: "Unbound", proxy: false }, ].sort((a, b) => a.label.localeCompare(b.label)) diff --git a/webview-ui/src/components/settings/providers/Brainiall.tsx b/webview-ui/src/components/settings/providers/Brainiall.tsx new file mode 100644 index 0000000000..314c61c647 --- /dev/null +++ b/webview-ui/src/components/settings/providers/Brainiall.tsx @@ -0,0 +1,50 @@ +import { useCallback } from "react" +import { VSCodeTextField } from "@vscode/webview-ui-toolkit/react" + +import type { ProviderSettings } from "@roo-code/types" + +import { useAppTranslation } from "@src/i18n/TranslationContext" +import { VSCodeButtonLink } from "@src/components/common/VSCodeButtonLink" + +import { inputEventTransform } from "../transforms" + +type BrainiallProps = { + apiConfiguration: ProviderSettings + setApiConfigurationField: (field: keyof ProviderSettings, value: ProviderSettings[keyof ProviderSettings]) => void +} + +export const Brainiall = ({ apiConfiguration, setApiConfigurationField }: BrainiallProps) => { + const { t } = useAppTranslation() + + const handleInputChange = useCallback( + ( + field: K, + transform: (event: E) => ProviderSettings[K] = inputEventTransform, + ) => + (event: E | Event) => { + setApiConfigurationField(field, transform(event as E)) + }, + [setApiConfigurationField], + ) + + return ( + <> + + + +
+ {t("settings:providers.apiKeyStorageNotice")} +
+ {!apiConfiguration?.brainiallApiKey && ( + + {t("settings:providers.getBrainiallApiKey")} + + )} + + ) +} diff --git a/webview-ui/src/components/settings/providers/index.ts b/webview-ui/src/components/settings/providers/index.ts index 597caffd1d..757b0c0488 100644 --- a/webview-ui/src/components/settings/providers/index.ts +++ b/webview-ui/src/components/settings/providers/index.ts @@ -1,5 +1,6 @@ export { Anthropic } from "./Anthropic" export { Bedrock } from "./Bedrock" +export { Brainiall } from "./Brainiall" export { DeepSeek } from "./DeepSeek" export { Gemini } from "./Gemini" export { LMStudio } from "./LMStudio" diff --git a/webview-ui/src/components/settings/utils/providerModelConfig.ts b/webview-ui/src/components/settings/utils/providerModelConfig.ts index fa71814390..bd2e259bff 100644 --- a/webview-ui/src/components/settings/utils/providerModelConfig.ts +++ b/webview-ui/src/components/settings/utils/providerModelConfig.ts @@ -16,6 +16,7 @@ import { fireworksDefaultModelId, minimaxDefaultModelId, basetenDefaultModelId, + brainiallDefaultModelId, } from "@roo-code/types" import { MODELS_BY_PROVIDER } from "../constants" @@ -41,6 +42,7 @@ export const PROVIDER_SERVICE_CONFIG: Partial> = fireworks: fireworksDefaultModelId, minimax: minimaxDefaultModelId, baseten: basetenDefaultModelId, + brainiall: brainiallDefaultModelId, } export const getProviderServiceConfig = (provider: ProviderName): ProviderServiceConfig => { diff --git a/webview-ui/src/components/ui/hooks/useSelectedModel.ts b/webview-ui/src/components/ui/hooks/useSelectedModel.ts index c32a08990c..e3e1efb51f 100644 --- a/webview-ui/src/components/ui/hooks/useSelectedModel.ts +++ b/webview-ui/src/components/ui/hooks/useSelectedModel.ts @@ -23,6 +23,7 @@ import { mainlandZAiModels, fireworksModels, basetenModels, + brainiallModels, qwenCodeModels, litellmDefaultModelInfo, lMStudioDefaultModelInfo, @@ -179,6 +180,11 @@ function getSelectedModel({ const info = basetenModels[id as keyof typeof basetenModels] return { id, info } } + case "brainiall": { + const id = apiConfiguration.apiModelId ?? defaultModelId + const info = brainiallModels[id as keyof typeof brainiallModels] + return info ? { id, info } : { id, info: undefined } + } case "bedrock": { const id = apiConfiguration.apiModelId ?? defaultModelId const baseInfo = bedrockModels[id as keyof typeof bedrockModels] diff --git a/webview-ui/src/i18n/locales/ca/settings.json b/webview-ui/src/i18n/locales/ca/settings.json index 2c83cabbbc..7ea167e15a 100644 --- a/webview-ui/src/i18n/locales/ca/settings.json +++ b/webview-ui/src/i18n/locales/ca/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Establir una URL alternativa per al model Codestral.", "xaiApiKey": "Clau API de xAI", "getXaiApiKey": "Obtenir clau API de xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Clau API de LiteLLM", "litellmBaseUrl": "URL base de LiteLLM", "awsCredentials": "Credencials d'AWS", diff --git a/webview-ui/src/i18n/locales/de/settings.json b/webview-ui/src/i18n/locales/de/settings.json index c31d29147d..c685c9240b 100644 --- a/webview-ui/src/i18n/locales/de/settings.json +++ b/webview-ui/src/i18n/locales/de/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Legen Sie eine alternative URL für das Codestral-Modell fest.", "xaiApiKey": "xAI API-Schlüssel", "getXaiApiKey": "xAI API-Schlüssel erhalten", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API-Schlüssel", "litellmBaseUrl": "LiteLLM Basis-URL", "awsCredentials": "AWS Anmeldedaten", diff --git a/webview-ui/src/i18n/locales/en/settings.json b/webview-ui/src/i18n/locales/en/settings.json index 3b2497aaee..d2be489617 100644 --- a/webview-ui/src/i18n/locales/en/settings.json +++ b/webview-ui/src/i18n/locales/en/settings.json @@ -471,6 +471,8 @@ "codestralBaseUrlDesc": "Set an alternative URL for the Codestral model.", "xaiApiKey": "xAI API Key", "getXaiApiKey": "Get xAI API Key", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API Key", "litellmBaseUrl": "LiteLLM Base URL", "awsCredentials": "AWS Credentials", diff --git a/webview-ui/src/i18n/locales/es/settings.json b/webview-ui/src/i18n/locales/es/settings.json index 6595c4f907..47aaf7baa6 100644 --- a/webview-ui/src/i18n/locales/es/settings.json +++ b/webview-ui/src/i18n/locales/es/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Establecer una URL alternativa para el modelo Codestral.", "xaiApiKey": "Clave API de xAI", "getXaiApiKey": "Obtener clave API de xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Clave API de LiteLLM", "litellmBaseUrl": "URL base de LiteLLM", "awsCredentials": "Credenciales de AWS", diff --git a/webview-ui/src/i18n/locales/fr/settings.json b/webview-ui/src/i18n/locales/fr/settings.json index 56337bda14..95f44d067b 100644 --- a/webview-ui/src/i18n/locales/fr/settings.json +++ b/webview-ui/src/i18n/locales/fr/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Définir une URL alternative pour le modèle Codestral.", "xaiApiKey": "Clé API xAI", "getXaiApiKey": "Obtenir la clé API xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Clé API LiteLLM", "litellmBaseUrl": "URL de base LiteLLM", "awsCredentials": "Identifiants AWS", diff --git a/webview-ui/src/i18n/locales/hi/settings.json b/webview-ui/src/i18n/locales/hi/settings.json index abd334bec0..47e457c41a 100644 --- a/webview-ui/src/i18n/locales/hi/settings.json +++ b/webview-ui/src/i18n/locales/hi/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Codestral मॉडल के लिए वैकल्पिक URL सेट करें।", "xaiApiKey": "xAI API कुंजी", "getXaiApiKey": "xAI API कुंजी प्राप्त करें", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API कुंजी", "litellmBaseUrl": "LiteLLM आधार URL", "awsCredentials": "AWS क्रेडेंशियल्स", diff --git a/webview-ui/src/i18n/locales/id/settings.json b/webview-ui/src/i18n/locales/id/settings.json index 1ebcf2073b..a98666b3a8 100644 --- a/webview-ui/src/i18n/locales/id/settings.json +++ b/webview-ui/src/i18n/locales/id/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Atur URL alternatif untuk model Codestral.", "xaiApiKey": "xAI API Key", "getXaiApiKey": "Dapatkan xAI API Key", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API Key", "litellmBaseUrl": "LiteLLM Base URL", "awsCredentials": "AWS Credentials", diff --git a/webview-ui/src/i18n/locales/it/settings.json b/webview-ui/src/i18n/locales/it/settings.json index 4a0c716165..968fa81cec 100644 --- a/webview-ui/src/i18n/locales/it/settings.json +++ b/webview-ui/src/i18n/locales/it/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Imposta un URL opzionale per i modelli Codestral.", "xaiApiKey": "Chiave API xAI", "getXaiApiKey": "Ottieni chiave API xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Chiave API LiteLLM", "litellmBaseUrl": "URL base LiteLLM", "awsCredentials": "Credenziali AWS", diff --git a/webview-ui/src/i18n/locales/ja/settings.json b/webview-ui/src/i18n/locales/ja/settings.json index b0d921571a..0641be9435 100644 --- a/webview-ui/src/i18n/locales/ja/settings.json +++ b/webview-ui/src/i18n/locales/ja/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Codestralモデルの代替URLを設定します。", "xaiApiKey": "xAI APIキー", "getXaiApiKey": "xAI APIキーを取得", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM APIキー", "litellmBaseUrl": "LiteLLM ベースURL", "awsCredentials": "AWS認証情報", diff --git a/webview-ui/src/i18n/locales/ko/settings.json b/webview-ui/src/i18n/locales/ko/settings.json index 88fc8e6d79..0520c3692f 100644 --- a/webview-ui/src/i18n/locales/ko/settings.json +++ b/webview-ui/src/i18n/locales/ko/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Codestral 모델의 대체 URL을 설정합니다.", "xaiApiKey": "xAI API 키", "getXaiApiKey": "xAI API 키 받기", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API 키", "litellmBaseUrl": "LiteLLM 기본 URL", "awsCredentials": "AWS 자격 증명", diff --git a/webview-ui/src/i18n/locales/nl/settings.json b/webview-ui/src/i18n/locales/nl/settings.json index fcfad37d37..d7be6e93c3 100644 --- a/webview-ui/src/i18n/locales/nl/settings.json +++ b/webview-ui/src/i18n/locales/nl/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Stel een alternatieve URL in voor het Codestral-model.", "xaiApiKey": "xAI API-sleutel", "getXaiApiKey": "xAI API-sleutel ophalen", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API-sleutel", "litellmBaseUrl": "LiteLLM basis-URL", "awsCredentials": "AWS-inloggegevens", diff --git a/webview-ui/src/i18n/locales/pl/settings.json b/webview-ui/src/i18n/locales/pl/settings.json index fa48bc6b21..1435712e1f 100644 --- a/webview-ui/src/i18n/locales/pl/settings.json +++ b/webview-ui/src/i18n/locales/pl/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Ustaw opcjonalny URL dla modeli Codestral.", "xaiApiKey": "Klucz API xAI", "getXaiApiKey": "Uzyskaj klucz API xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Klucz API LiteLLM", "litellmBaseUrl": "URL bazowy LiteLLM", "awsCredentials": "Poświadczenia AWS", diff --git a/webview-ui/src/i18n/locales/pt-BR/settings.json b/webview-ui/src/i18n/locales/pt-BR/settings.json index a8387e0512..891fe5304a 100644 --- a/webview-ui/src/i18n/locales/pt-BR/settings.json +++ b/webview-ui/src/i18n/locales/pt-BR/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Defina uma URL alternativa para o modelo Codestral.", "xaiApiKey": "Chave de API xAI", "getXaiApiKey": "Obter chave de API xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Chave API LiteLLM", "litellmBaseUrl": "URL base LiteLLM", "awsCredentials": "Credenciais AWS", diff --git a/webview-ui/src/i18n/locales/ru/settings.json b/webview-ui/src/i18n/locales/ru/settings.json index fe24ebee29..52ad240f6c 100644 --- a/webview-ui/src/i18n/locales/ru/settings.json +++ b/webview-ui/src/i18n/locales/ru/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Укажите альтернативный URL для модели Codestral.", "xaiApiKey": "xAI API-ключ", "getXaiApiKey": "Получить xAI API-ключ", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "API-ключ LiteLLM", "litellmBaseUrl": "Базовый URL LiteLLM", "awsCredentials": "AWS-учётные данные", diff --git a/webview-ui/src/i18n/locales/tr/settings.json b/webview-ui/src/i18n/locales/tr/settings.json index 7171718f1c..c436b8fe56 100644 --- a/webview-ui/src/i18n/locales/tr/settings.json +++ b/webview-ui/src/i18n/locales/tr/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Codestral modeli için alternatif URL ayarlayın.", "xaiApiKey": "xAI API Anahtarı", "getXaiApiKey": "xAI API Anahtarı Al", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API Anahtarı", "litellmBaseUrl": "LiteLLM Temel URL", "awsCredentials": "AWS Kimlik Bilgileri", diff --git a/webview-ui/src/i18n/locales/vi/settings.json b/webview-ui/src/i18n/locales/vi/settings.json index 95b4f2d686..c04c84a3a0 100644 --- a/webview-ui/src/i18n/locales/vi/settings.json +++ b/webview-ui/src/i18n/locales/vi/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "Đặt URL thay thế cho mô hình Codestral.", "xaiApiKey": "Khóa API xAI", "getXaiApiKey": "Lấy khóa API xAI", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "Khóa API LiteLLM", "litellmBaseUrl": "URL cơ sở LiteLLM", "awsCredentials": "Thông tin xác thực AWS", diff --git a/webview-ui/src/i18n/locales/zh-CN/settings.json b/webview-ui/src/i18n/locales/zh-CN/settings.json index eeba6bb079..a6011cbd10 100644 --- a/webview-ui/src/i18n/locales/zh-CN/settings.json +++ b/webview-ui/src/i18n/locales/zh-CN/settings.json @@ -408,6 +408,8 @@ "codestralBaseUrlDesc": "为 Codestral 模型设置替代 URL。", "xaiApiKey": "xAI API 密钥", "getXaiApiKey": "获取 xAI API 密钥", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API 密钥", "litellmBaseUrl": "LiteLLM 基础 URL", "awsCredentials": "AWS 凭证", diff --git a/webview-ui/src/i18n/locales/zh-TW/settings.json b/webview-ui/src/i18n/locales/zh-TW/settings.json index 9f4241c3dd..274c559c61 100644 --- a/webview-ui/src/i18n/locales/zh-TW/settings.json +++ b/webview-ui/src/i18n/locales/zh-TW/settings.json @@ -418,6 +418,8 @@ "codestralBaseUrlDesc": "設定 Codestral 模型的替代 URL。", "xaiApiKey": "xAI API 金鑰", "getXaiApiKey": "取得 xAI API 金鑰", + "brainiallApiKey": "Brainiall API Key", + "getBrainiallApiKey": "Get Brainiall API Key", "litellmApiKey": "LiteLLM API 金鑰", "litellmBaseUrl": "LiteLLM 基礎 URL", "awsCredentials": "AWS 認證", diff --git a/webview-ui/src/utils/validate.ts b/webview-ui/src/utils/validate.ts index a4c950f8dd..08045a26c9 100644 --- a/webview-ui/src/utils/validate.ts +++ b/webview-ui/src/utils/validate.ts @@ -128,6 +128,11 @@ function validateModelsAndKeysProvided(apiConfiguration: ProviderSettings): stri return i18next.t("settings:validation.apiKey") } break + case "brainiall": + if (!apiConfiguration.brainiallApiKey) { + return i18next.t("settings:validation.apiKey") + } + break } return undefined