diff --git a/packages/components/credentials/AIBadgrApi.credential.ts b/packages/components/credentials/AIBadgrApi.credential.ts
new file mode 100644
index 00000000000..3f1842ed3fe
--- /dev/null
+++ b/packages/components/credentials/AIBadgrApi.credential.ts
@@ -0,0 +1,23 @@
+import { INodeParams, INodeCredential } from '../src/Interface'
+
+class AIBadgrApi implements INodeCredential {
+ label: string
+ name: string
+ version: number
+ inputs: INodeParams[]
+
+ constructor() {
+ this.label = 'AI Badgr API'
+ this.name = 'aiBadgrApi'
+ this.version = 1.0
+ this.inputs = [
+ {
+ label: 'AI Badgr Api Key',
+ name: 'aiBadgrApiKey',
+ type: 'password'
+ }
+ ]
+ }
+}
+
+module.exports = { credClass: AIBadgrApi }
diff --git a/packages/components/nodes/chatmodels/ChatAIBadgr/ChatAIBadgr.ts b/packages/components/nodes/chatmodels/ChatAIBadgr/ChatAIBadgr.ts
new file mode 100644
index 00000000000..d4655c02988
--- /dev/null
+++ b/packages/components/nodes/chatmodels/ChatAIBadgr/ChatAIBadgr.ts
@@ -0,0 +1,176 @@
+import { BaseCache } from '@langchain/core/caches'
+import { ChatOpenAI, ChatOpenAIFields } from '@langchain/openai'
+import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
+import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
+
+class ChatAIBadgr_ChatModels implements INode {
+ readonly baseURL: string = 'https://aibadgr.com/api/v1'
+ label: string
+ name: string
+ version: number
+ type: string
+ icon: string
+ category: string
+ description: string
+ baseClasses: string[]
+ credential: INodeParams
+ inputs: INodeParams[]
+
+ constructor() {
+ this.label = 'ChatAIBadgr'
+ this.name = 'chatAIBadgr'
+ this.version = 1.0
+ this.type = 'ChatAIBadgr'
+ this.icon = 'aibadgr.svg'
+ this.category = 'Chat Models'
+ this.description = 'Wrapper around AI Badgr large language models that use the Chat endpoint'
+ this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)]
+ this.credential = {
+ label: 'Connect Credential',
+ name: 'credential',
+ type: 'credential',
+ credentialNames: ['aiBadgrApi']
+ }
+ this.inputs = [
+ {
+ label: 'Cache',
+ name: 'cache',
+ type: 'BaseCache',
+ optional: true
+ },
+ {
+ label: 'Model Name',
+ name: 'modelName',
+ type: 'string',
+ default: 'gpt-4o',
+ description: 'Refer to models page'
+ },
+ {
+ label: 'Temperature',
+ name: 'temperature',
+ type: 'number',
+ step: 0.1,
+ default: 0.7,
+ optional: true
+ },
+ {
+ label: 'Streaming',
+ name: 'streaming',
+ type: 'boolean',
+ default: true,
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Max Tokens',
+ name: 'maxTokens',
+ type: 'number',
+ step: 1,
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Top Probability',
+ name: 'topP',
+ type: 'number',
+ step: 0.1,
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Frequency Penalty',
+ name: 'frequencyPenalty',
+ type: 'number',
+ step: 0.1,
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Presence Penalty',
+ name: 'presencePenalty',
+ type: 'number',
+ step: 0.1,
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Base Options',
+ name: 'baseOptions',
+ type: 'json',
+ optional: true,
+ additionalParams: true,
+ description: 'Additional options to pass to the AI Badgr client. This should be a JSON object.'
+ }
+ ]
+ }
+
+ async init(nodeData: INodeData, _: string, options: ICommonObject): Promise {
+ const temperature = nodeData.inputs?.temperature as string
+ const modelName = nodeData.inputs?.modelName as string
+ const maxTokens = nodeData.inputs?.maxTokens as string
+ const topP = nodeData.inputs?.topP as string
+ const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
+ const presencePenalty = nodeData.inputs?.presencePenalty as string
+ const streaming = nodeData.inputs?.streaming as boolean
+ const baseOptions = nodeData.inputs?.baseOptions
+
+ if (nodeData.inputs?.credentialId) {
+ nodeData.credential = nodeData.inputs?.credentialId
+ }
+ const credentialData = await getCredentialData(nodeData.credential ?? '', options)
+ const openAIApiKey = getCredentialParam('aiBadgrApiKey', credentialData, nodeData)
+
+ // Custom error handling for missing API key
+ if (!openAIApiKey || openAIApiKey.trim() === '') {
+ throw new Error(
+ 'AI Badgr API Key is missing or empty. Please provide a valid AI Badgr API key in the credential configuration.'
+ )
+ }
+
+ // Custom error handling for missing model name
+ if (!modelName || modelName.trim() === '') {
+ throw new Error('Model Name is required. Please enter a valid model name (e.g., gpt-4o).')
+ }
+
+ const cache = nodeData.inputs?.cache as BaseCache
+
+ const obj: ChatOpenAIFields = {
+ modelName,
+ openAIApiKey,
+ apiKey: openAIApiKey,
+ streaming: streaming ?? true
+ }
+
+ if (temperature) obj.temperature = parseFloat(temperature)
+ if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
+ if (topP) obj.topP = parseFloat(topP)
+ if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
+ if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
+ if (cache) obj.cache = cache
+
+ let parsedBaseOptions: any | undefined = undefined
+
+ if (baseOptions) {
+ try {
+ parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
+ if (parsedBaseOptions.baseURL) {
+ console.warn("The 'baseURL' parameter is not allowed when using the ChatAIBadgr node.")
+ parsedBaseOptions.baseURL = undefined
+ }
+ } catch (exception) {
+ throw new Error('Invalid JSON in the BaseOptions: ' + exception)
+ }
+ }
+
+ const model = new ChatOpenAI({
+ ...obj,
+ configuration: {
+ baseURL: this.baseURL,
+ ...parsedBaseOptions
+ }
+ })
+ return model
+ }
+}
+
+module.exports = { nodeClass: ChatAIBadgr_ChatModels }
diff --git a/packages/components/nodes/chatmodels/ChatAIBadgr/aibadgr.svg b/packages/components/nodes/chatmodels/ChatAIBadgr/aibadgr.svg
new file mode 100644
index 00000000000..163df36aed0
--- /dev/null
+++ b/packages/components/nodes/chatmodels/ChatAIBadgr/aibadgr.svg
@@ -0,0 +1,174 @@
+
\ No newline at end of file
diff --git a/packages/components/nodes/embeddings/AIBadgrEmbedding/AIBadgrEmbedding.ts b/packages/components/nodes/embeddings/AIBadgrEmbedding/AIBadgrEmbedding.ts
new file mode 100644
index 00000000000..289538ad868
--- /dev/null
+++ b/packages/components/nodes/embeddings/AIBadgrEmbedding/AIBadgrEmbedding.ts
@@ -0,0 +1,123 @@
+import { ClientOptions, OpenAIEmbeddings, OpenAIEmbeddingsParams } from '@langchain/openai'
+import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
+import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
+
+class AIBadgrEmbedding_Embeddings implements INode {
+ readonly baseURL: string = 'https://aibadgr.com/api/v1'
+ label: string
+ name: string
+ version: number
+ type: string
+ icon: string
+ category: string
+ description: string
+ baseClasses: string[]
+ credential: INodeParams
+ inputs: INodeParams[]
+
+ constructor() {
+ this.label = 'AIBadgrEmbedding'
+ this.name = 'aiBadgrEmbedding'
+ this.version = 3.0
+ this.type = 'AIBadgrEmbedding'
+ this.icon = 'aibadgr.svg'
+ this.category = 'Embeddings'
+ this.description = 'AI Badgr API to generate embeddings for a given text'
+ this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)]
+ this.credential = {
+ label: 'Connect Credential',
+ name: 'credential',
+ type: 'credential',
+ credentialNames: ['aiBadgrApi']
+ }
+ this.inputs = [
+ {
+ label: 'Strip New Lines',
+ name: 'stripNewLines',
+ type: 'boolean',
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Batch Size',
+ name: 'batchSize',
+ type: 'number',
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'Timeout',
+ name: 'timeout',
+ type: 'number',
+ optional: true,
+ additionalParams: true
+ },
+ {
+ label: 'BaseOptions',
+ name: 'baseOptions',
+ type: 'json',
+ optional: true,
+ additionalParams: true,
+ description: 'Additional options to pass to the AI Badgr client. This should be a JSON object.'
+ },
+ {
+ label: 'Model Name',
+ name: 'modelName',
+ type: 'string',
+ optional: true
+ },
+ {
+ label: 'Dimensions',
+ name: 'dimensions',
+ type: 'number',
+ optional: true,
+ additionalParams: true
+ }
+ ]
+ }
+
+ async init(nodeData: INodeData, _: string, options: ICommonObject): Promise {
+ const stripNewLines = nodeData.inputs?.stripNewLines as boolean
+ const batchSize = nodeData.inputs?.batchSize as string
+ const timeout = nodeData.inputs?.timeout as string
+ const modelName = nodeData.inputs?.modelName as string
+ const dimensions = nodeData.inputs?.dimensions as string
+ const baseOptions = nodeData.inputs?.baseOptions
+
+ const credentialData = await getCredentialData(nodeData.credential ?? '', options)
+ const openAIApiKey = getCredentialParam('aiBadgrApiKey', credentialData, nodeData)
+
+ const obj: Partial & { openAIApiKey?: string; configuration?: ClientOptions } = {
+ openAIApiKey
+ }
+
+ if (stripNewLines) obj.stripNewLines = stripNewLines
+ if (batchSize) obj.batchSize = parseInt(batchSize, 10)
+ if (timeout) obj.timeout = parseInt(timeout, 10)
+ if (modelName) obj.modelName = modelName
+ if (dimensions) obj.dimensions = parseInt(dimensions, 10)
+
+ let parsedBaseOptions: any | undefined = undefined
+ if (baseOptions) {
+ try {
+ parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions)
+ if (parsedBaseOptions.baseURL) {
+ console.warn("The 'baseURL' parameter is not allowed when using the AIBadgrEmbedding node.")
+ parsedBaseOptions.baseURL = undefined
+ }
+ } catch (exception) {
+ throw new Error("Invalid JSON in the AIBadgrEmbedding's BaseOptions: " + exception)
+ }
+ }
+
+ obj.configuration = {
+ baseURL: this.baseURL,
+ ...(parsedBaseOptions ?? {})
+ }
+
+ const model = new OpenAIEmbeddings(obj)
+ return model
+ }
+}
+
+module.exports = { nodeClass: AIBadgrEmbedding_Embeddings }
diff --git a/packages/components/nodes/embeddings/AIBadgrEmbedding/aibadgr.svg b/packages/components/nodes/embeddings/AIBadgrEmbedding/aibadgr.svg
new file mode 100644
index 00000000000..163df36aed0
--- /dev/null
+++ b/packages/components/nodes/embeddings/AIBadgrEmbedding/aibadgr.svg
@@ -0,0 +1,174 @@
+
\ No newline at end of file