-
-
Notifications
You must be signed in to change notification settings - Fork 17.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Feat: Add deepseek models to components configuration * Feat: Implement Deepseek API integration with chat models and add SVG icon * Refactor: Remove image input options and add missing baseOptions in deepseek chat node
- Loading branch information
1 parent
0381a99
commit 93f3a5d
Showing
4 changed files
with
263 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
import { INodeCredential, INodeParams } from '../src/Interface' | ||
|
||
class DeepseekApi implements INodeCredential { | ||
label: string | ||
name: string | ||
version: number | ||
inputs: INodeParams[] | ||
|
||
constructor() { | ||
this.label = 'DeepseekAI API' | ||
this.name = 'deepseekApi' | ||
this.version = 1.0 | ||
this.inputs = [ | ||
{ | ||
label: 'DeepseekAI API Key', | ||
name: 'deepseekApiKey', | ||
type: 'password' | ||
} | ||
] | ||
} | ||
} | ||
|
||
module.exports = { credClass: DeepseekApi } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
197 changes: 197 additions & 0 deletions
197
packages/components/nodes/chatmodels/Deepseek/Deepseek.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,197 @@ | ||
import { BaseCache } from '@langchain/core/caches' | ||
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models' | ||
import { ChatOpenAI, LegacyOpenAIInput, OpenAIChatInput } from '@langchain/openai' | ||
import type { ClientOptions } from 'openai' | ||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' | ||
import { getModels, MODEL_TYPE } from '../../../src/modelLoader' | ||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' | ||
|
||
class Deepseek_ChatModels implements INode { | ||
readonly baseURL: string = 'https://api.deepseek.com' | ||
label: string | ||
name: string | ||
version: number | ||
type: string | ||
icon: string | ||
category: string | ||
description: string | ||
baseClasses: string[] | ||
credential: INodeParams | ||
inputs: INodeParams[] | ||
|
||
constructor() { | ||
this.label = 'ChatDeepseek' | ||
this.name = 'chatDeepseek' | ||
this.version = 1.0 | ||
this.type = 'chatDeepseek' | ||
this.icon = 'deepseek.svg' | ||
this.category = 'Chat Models' | ||
this.description = 'Wrapper around Deepseek large language models that use the Chat endpoint' | ||
this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] | ||
this.credential = { | ||
label: 'Connect Credential', | ||
name: 'credential', | ||
type: 'credential', | ||
credentialNames: ['deepseekApi'] | ||
} | ||
this.inputs = [ | ||
{ | ||
label: 'Cache', | ||
name: 'cache', | ||
type: 'BaseCache', | ||
optional: true | ||
}, | ||
{ | ||
label: 'Model Name', | ||
name: 'modelName', | ||
type: 'asyncOptions', | ||
loadMethod: 'listModels', | ||
default: 'deepseek-chat' | ||
}, | ||
{ | ||
label: 'Temperature', | ||
name: 'temperature', | ||
type: 'number', | ||
step: 0.1, | ||
default: 0.7, | ||
optional: true | ||
}, | ||
{ | ||
label: 'Streaming', | ||
name: 'streaming', | ||
type: 'boolean', | ||
default: true, | ||
optional: true, | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Max Tokens', | ||
name: 'maxTokens', | ||
type: 'number', | ||
step: 1, | ||
optional: true, | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Top Probability', | ||
name: 'topP', | ||
type: 'number', | ||
step: 0.1, | ||
optional: true, | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Frequency Penalty', | ||
name: 'frequencyPenalty', | ||
type: 'number', | ||
step: 0.1, | ||
optional: true, | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Presence Penalty', | ||
name: 'presencePenalty', | ||
type: 'number', | ||
step: 0.1, | ||
optional: true, | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Timeout', | ||
name: 'timeout', | ||
type: 'number', | ||
step: 1, | ||
optional: true, | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Stop Sequence', | ||
name: 'stopSequence', | ||
type: 'string', | ||
rows: 4, | ||
optional: true, | ||
description: 'List of stop words to use when generating. Use comma to separate multiple stop words.', | ||
additionalParams: true | ||
}, | ||
{ | ||
label: 'Base Options', | ||
name: 'baseOptions', | ||
type: 'json', | ||
optional: true, | ||
additionalParams: true, | ||
description: 'Additional options to pass to the Deepseek client. This should be a JSON object.' | ||
} | ||
] | ||
} | ||
|
||
//@ts-ignore | ||
loadMethods = { | ||
async listModels(): Promise<INodeOptionsValue[]> { | ||
return await getModels(MODEL_TYPE.CHAT, 'deepseek') | ||
} | ||
} | ||
|
||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> { | ||
const temperature = nodeData.inputs?.temperature as string | ||
const modelName = nodeData.inputs?.modelName as string | ||
const maxTokens = nodeData.inputs?.maxTokens as string | ||
const topP = nodeData.inputs?.topP as string | ||
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string | ||
const presencePenalty = nodeData.inputs?.presencePenalty as string | ||
const timeout = nodeData.inputs?.timeout as string | ||
const stopSequence = nodeData.inputs?.stopSequence as string | ||
const streaming = nodeData.inputs?.streaming as boolean | ||
const baseOptions = nodeData.inputs?.baseOptions | ||
|
||
if (nodeData.inputs?.credentialId) { | ||
nodeData.credential = nodeData.inputs?.credentialId | ||
} | ||
const credentialData = await getCredentialData(nodeData.credential ?? '', options) | ||
const openAIApiKey = getCredentialParam('deepseekApiKey', credentialData, nodeData) | ||
|
||
const cache = nodeData.inputs?.cache as BaseCache | ||
|
||
const obj: Partial<OpenAIChatInput> & BaseChatModelParams & { configuration?: ClientOptions & LegacyOpenAIInput } = { | ||
temperature: parseFloat(temperature), | ||
modelName, | ||
openAIApiKey, | ||
streaming: streaming ?? true | ||
} | ||
|
||
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) | ||
if (topP) obj.topP = parseFloat(topP) | ||
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty) | ||
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty) | ||
if (timeout) obj.timeout = parseInt(timeout, 10) | ||
if (cache) obj.cache = cache | ||
if (stopSequence) { | ||
const stopSequenceArray = stopSequence.split(',').map((item) => item.trim()) | ||
obj.stop = stopSequenceArray | ||
} | ||
|
||
let parsedBaseOptions: any | undefined = undefined | ||
|
||
if (baseOptions) { | ||
try { | ||
parsedBaseOptions = typeof baseOptions === 'object' ? baseOptions : JSON.parse(baseOptions) | ||
if (parsedBaseOptions.baseURL) { | ||
console.warn("The 'baseURL' parameter is not allowed when using the ChatDeepseek node.") | ||
parsedBaseOptions.baseURL = undefined | ||
} | ||
} catch (exception) { | ||
throw new Error('Invalid JSON in the BaseOptions: ' + exception) | ||
} | ||
} | ||
|
||
const model = new ChatOpenAI({ | ||
...obj, | ||
configuration: { | ||
baseURL: this.baseURL, | ||
...parsedBaseOptions | ||
} | ||
}) | ||
return model | ||
} | ||
} | ||
|
||
module.exports = { nodeClass: Deepseek_ChatModels } |
30 changes: 30 additions & 0 deletions
30
packages/components/nodes/chatmodels/Deepseek/deepseek.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.