Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/types/src/global-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,7 @@ export const SECRET_STATE_KEYS = [
"fireworksApiKey",
"vercelAiGatewayApiKey",
"basetenApiKey",
"brainiallApiKey",
] as const

// Global secrets that are part of GlobalSettings (not ProviderSettings)
Expand Down
10 changes: 10 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { codebaseIndexProviderSchema } from "./codebase-index.js"
import {
anthropicModels,
basetenModels,
brainiallModels,
bedrockModels,
deepSeekModels,
fireworksModels,
Expand Down Expand Up @@ -105,6 +106,7 @@ export const providerNames = [
"anthropic",
"bedrock",
"baseten",
"brainiall",
"deepseek",
"fireworks",
"gemini",
Expand Down Expand Up @@ -377,6 +379,10 @@ const vercelAiGatewaySchema = baseProviderSettingsSchema.extend({
vercelAiGatewayModelId: z.string().optional(),
})

const brainiallSchema = apiModelIdProviderModelSchema.extend({
brainiallApiKey: z.string().optional(),
})

const basetenSchema = apiModelIdProviderModelSchema.extend({
basetenApiKey: z.string().optional(),
})
Expand Down Expand Up @@ -407,6 +413,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
fakeAiSchema.merge(z.object({ apiProvider: z.literal("fake-ai") })),
xaiSchema.merge(z.object({ apiProvider: z.literal("xai") })),
basetenSchema.merge(z.object({ apiProvider: z.literal("baseten") })),
brainiallSchema.merge(z.object({ apiProvider: z.literal("brainiall") })),
litellmSchema.merge(z.object({ apiProvider: z.literal("litellm") })),
sambaNovaSchema.merge(z.object({ apiProvider: z.literal("sambanova") })),
zaiSchema.merge(z.object({ apiProvider: z.literal("zai") })),
Expand Down Expand Up @@ -440,6 +447,7 @@ export const providerSettingsSchema = z.object({
...fakeAiSchema.shape,
...xaiSchema.shape,
...basetenSchema.shape,
...brainiallSchema.shape,
...litellmSchema.shape,
...sambaNovaSchema.shape,
...zaiSchema.shape,
Expand Down Expand Up @@ -515,6 +523,7 @@ export const modelIdKeysByProvider: Record<TypicalProvider, ModelIdKey> = {
unbound: "unboundModelId",
xai: "apiModelId",
baseten: "apiModelId",
brainiall: "apiModelId",
litellm: "litellmModelId",
sambanova: "apiModelId",
zai: "apiModelId",
Expand Down Expand Up @@ -630,6 +639,7 @@ export const MODELS_BY_PROVIDER: Record<
xai: { id: "xai", label: "xAI (Grok)", models: Object.keys(xaiModels) },
zai: { id: "zai", label: "Z.ai", models: Object.keys(internationalZAiModels) },
baseten: { id: "baseten", label: "Baseten", models: Object.keys(basetenModels) },
brainiall: { id: "brainiall", label: "Brainiall", models: Object.keys(brainiallModels) },

// Dynamic providers; models pulled from remote APIs.
litellm: { id: "litellm", label: "LiteLLM", models: [] },
Expand Down
169 changes: 169 additions & 0 deletions packages/types/src/providers/brainiall.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import type { ModelInfo } from "../model.js"

// https://brainiall.com
export type BrainiallModelId = keyof typeof brainiallModels

export const brainiallDefaultModelId: BrainiallModelId = "claude-sonnet-4-6"

export const brainiallModels = {
"claude-opus-4-6": {
maxTokens: 64_000,
contextWindow: 200_000,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 5.0,
outputPrice: 25.0,
cacheWritesPrice: 6.25,
cacheReadsPrice: 0.5,
description: "Claude Opus 4.6",
},
"claude-sonnet-4-6": {
maxTokens: 64_000,
contextWindow: 200_000,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 3.0,
outputPrice: 15.0,
cacheWritesPrice: 3.75,
cacheReadsPrice: 0.3,
description: "Claude Sonnet 4.6",
},
"claude-haiku-4-5": {
maxTokens: 8192,
contextWindow: 200_000,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 1.0,
outputPrice: 5.0,
cacheWritesPrice: 1.25,
cacheReadsPrice: 0.1,
description: "Claude Haiku 4.5",
},
"claude-opus-4-5": {
maxTokens: 64_000,
contextWindow: 200_000,
supportsImages: true,
supportsPromptCache: true,
supportsReasoningBudget: true,
inputPrice: 15.0,
outputPrice: 75.0,
description: "Claude Opus 4.5",
},
"deepseek-r1": {
maxTokens: 64_000,
contextWindow: 128_000,
supportsImages: false,
supportsPromptCache: false,
preserveReasoning: true,
inputPrice: 1.35,
outputPrice: 5.4,
description: "DeepSeek R1",
},
"deepseek-v3": {
maxTokens: 8192,
contextWindow: 128_000,
supportsPromptCache: false,
inputPrice: 0.27,
outputPrice: 1.1,
description: "DeepSeek V3.2",
},
"llama-3.3-70b": {
maxTokens: 8192,
contextWindow: 128_000,
supportsPromptCache: false,
inputPrice: 0.72,
outputPrice: 0.72,
description: "Meta Llama 3.3 70B",
},
"llama-4-scout": {
maxTokens: 8192,
contextWindow: 512_000,
supportsPromptCache: false,
inputPrice: 0.17,
outputPrice: 0.17,
description: "Meta Llama 4 Scout 17B",
},
"nova-pro": {
maxTokens: 5120,
contextWindow: 300_000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 0.8,
outputPrice: 3.2,
description: "Amazon Nova Pro",
},
"nova-lite": {
maxTokens: 5120,
contextWindow: 300_000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 0.06,
outputPrice: 0.24,
description: "Amazon Nova Lite",
},
"nova-micro": {
maxTokens: 5120,
contextWindow: 300_000,
supportsPromptCache: false,
inputPrice: 0.04,
outputPrice: 0.14,
description: "Amazon Nova Micro",
},
"mistral-large-3": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 2.0,
outputPrice: 6.0,
description: "Mistral Large 3 675B",
},
"devstral-2": {
maxTokens: 8192,
contextWindow: 128_000,
supportsPromptCache: false,
inputPrice: 0.5,
outputPrice: 1.5,
description: "Devstral 2 123B",
},
"qwen3-80b": {
maxTokens: 8192,
contextWindow: 128_000,
supportsPromptCache: false,
inputPrice: 0.5,
outputPrice: 0.5,
description: "Qwen3 80B",
},
"qwen3-32b": {
maxTokens: 8192,
contextWindow: 128_000,
supportsPromptCache: false,
inputPrice: 0.35,
outputPrice: 0.35,
description: "Qwen3 32B",
},
"minimax-m2": {
maxTokens: 16_384,
contextWindow: 1_000_000,
supportsPromptCache: false,
inputPrice: 1.0,
outputPrice: 5.0,
description: "MiniMax M2",
},
"kimi-k2.5": {
maxTokens: 8192,
contextWindow: 128_000,
supportsPromptCache: false,
inputPrice: 0.6,
outputPrice: 2.4,
description: "Moonshot Kimi K2.5",
},
"gpt-oss-120b": {
maxTokens: 16_384,
contextWindow: 200_000,
supportsPromptCache: false,
inputPrice: 1.35,
outputPrice: 5.4,
description: "GPT OSS 120B",
},
} as const satisfies Record<string, ModelInfo>
4 changes: 4 additions & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
export * from "./anthropic.js"
export * from "./baseten.js"
export * from "./brainiall.js"
export * from "./bedrock.js"
export * from "./deepseek.js"
export * from "./fireworks.js"
Expand Down Expand Up @@ -27,6 +28,7 @@ export * from "./minimax.js"

import { anthropicDefaultModelId } from "./anthropic.js"
import { basetenDefaultModelId } from "./baseten.js"
import { brainiallDefaultModelId } from "./brainiall.js"
import { bedrockDefaultModelId } from "./bedrock.js"
import { deepSeekDefaultModelId } from "./deepseek.js"
import { fireworksDefaultModelId } from "./fireworks.js"
Expand Down Expand Up @@ -71,6 +73,8 @@ export function getProviderDefaultModelId(
return xaiDefaultModelId
case "baseten":
return basetenDefaultModelId
case "brainiall":
return brainiallDefaultModelId
case "bedrock":
return bedrockDefaultModelId
case "vertex":
Expand Down
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import {
VercelAiGatewayHandler,
MiniMaxHandler,
BasetenHandler,
BrainiallHandler,
} from "./providers"
import { NativeOllamaHandler } from "./providers/native-ollama"

Expand Down Expand Up @@ -176,6 +177,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new MiniMaxHandler(options)
case "baseten":
return new BasetenHandler(options)
case "brainiall":
return new BrainiallHandler(options)
default:
return new AnthropicHandler(options)
}
Expand Down
Loading
Loading