mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-20 03:12:15 +00:00
feat(AI Agent Tool Node): Add Agent Tool (#17108)
This commit is contained in:
95
packages/@n8n/nodes-langchain/nodes/agents/Agent/V2/utils.ts
Normal file
95
packages/@n8n/nodes-langchain/nodes/agents/Agent/V2/utils.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
// Function used in the inputs expression to figure out which inputs to
|
||||
|
||||
import {
|
||||
type INodeInputConfiguration,
|
||||
type INodeInputFilter,
|
||||
type NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
// display based on the agent type
|
||||
export function getInputs(
|
||||
hasMainInput?: boolean,
|
||||
hasOutputParser?: boolean,
|
||||
needsFallback?: boolean,
|
||||
): Array<NodeConnectionType | INodeInputConfiguration> {
|
||||
interface SpecialInput {
|
||||
type: NodeConnectionType;
|
||||
filter?: INodeInputFilter;
|
||||
displayName: string;
|
||||
required?: boolean;
|
||||
}
|
||||
|
||||
const getInputData = (
|
||||
inputs: SpecialInput[],
|
||||
): Array<NodeConnectionType | INodeInputConfiguration> => {
|
||||
return inputs.map(({ type, filter, displayName, required }) => {
|
||||
const input: INodeInputConfiguration = {
|
||||
type,
|
||||
displayName,
|
||||
required,
|
||||
maxConnections: ['ai_languageModel', 'ai_memory', 'ai_outputParser'].includes(type)
|
||||
? 1
|
||||
: undefined,
|
||||
};
|
||||
|
||||
if (filter) {
|
||||
input.filter = filter;
|
||||
}
|
||||
|
||||
return input;
|
||||
});
|
||||
};
|
||||
|
||||
let specialInputs: SpecialInput[] = [
|
||||
{
|
||||
type: 'ai_languageModel',
|
||||
displayName: 'Chat Model',
|
||||
required: true,
|
||||
filter: {
|
||||
excludedNodes: [
|
||||
'@n8n/n8n-nodes-langchain.lmCohere',
|
||||
'@n8n/n8n-nodes-langchain.lmOllama',
|
||||
'n8n/n8n-nodes-langchain.lmOpenAi',
|
||||
'@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'ai_languageModel',
|
||||
displayName: 'Fallback Model',
|
||||
required: true,
|
||||
filter: {
|
||||
excludedNodes: [
|
||||
'@n8n/n8n-nodes-langchain.lmCohere',
|
||||
'@n8n/n8n-nodes-langchain.lmOllama',
|
||||
'n8n/n8n-nodes-langchain.lmOpenAi',
|
||||
'@n8n/n8n-nodes-langchain.lmOpenHuggingFaceInference',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Memory',
|
||||
type: 'ai_memory',
|
||||
},
|
||||
{
|
||||
displayName: 'Tool',
|
||||
type: 'ai_tool',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Parser',
|
||||
type: 'ai_outputParser',
|
||||
},
|
||||
];
|
||||
|
||||
if (hasOutputParser === false) {
|
||||
specialInputs = specialInputs.filter((input) => input.type !== 'ai_outputParser');
|
||||
}
|
||||
if (needsFallback === false) {
|
||||
specialInputs = specialInputs.filter((input) => input.displayName !== 'Fallback Model');
|
||||
}
|
||||
|
||||
// Note cannot use NodeConnectionType.Main
|
||||
// otherwise expression won't evaluate correctly on the FE
|
||||
const mainInputs = hasMainInput ? ['main' as NodeConnectionType] : [];
|
||||
return [...mainInputs, ...getInputData(specialInputs)];
|
||||
}
|
||||
Reference in New Issue
Block a user