feat: Introduce prompt type option for Agent, Basic LLM Chain, and QA Chain nodes (#8697)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Michael Kret <michael.k@radency.com>
This commit is contained in:
oleg
2024-02-21 14:59:37 +01:00
committed by GitHub
parent 40aecd1715
commit 2068f186ff
13 changed files with 348 additions and 55 deletions

View File

@@ -22,7 +22,11 @@ import { LLMChain } from 'langchain/chains';
import type { BaseChatModel } from 'langchain/chat_models/base';
import { HumanMessage } from 'langchain/schema';
import { getTemplateNoticeField } from '../../../utils/sharedFields';
import { isChatInstance } from '../../../utils/helpers';
import {
getOptionalOutputParsers,
getPromptInputByType,
isChatInstance,
} from '../../../utils/helpers';
interface MessagesTemplate {
type: string;
@@ -204,7 +208,7 @@ function getInputs(parameters: IDataObject) {
},
];
// If `hasOutputParser` is undefined it must be version 1.1 or earlier so we
// If `hasOutputParser` is undefined it must be version 1.3 or earlier so we
// always add the output parser input
if (hasOutputParser === undefined || hasOutputParser === true) {
inputs.push({ displayName: 'Output Parser', type: NodeConnectionType.AiOutputParser });
@@ -218,7 +222,7 @@ export class ChainLlm implements INodeType {
name: 'chainLlm',
icon: 'fa:link',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3],
version: [1, 1.1, 1.2, 1.3, 1.4],
description: 'A simple chain to prompt a large language model',
defaults: {
name: 'Basic LLM Chain',
@@ -279,6 +283,59 @@ export class ChainLlm implements INodeType {
},
},
},
{
displayName: 'Prompt',
name: 'promptType',
type: 'options',
options: [
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name: 'Take from previous node automatically',
value: 'auto',
description: 'Looks for an input field called chatInput',
},
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name: 'Define below',
value: 'define',
description:
'Use an expression to reference data in previous nodes or enter static text',
},
],
displayOptions: {
hide: {
'@version': [1, 1.1, 1.2, 1.3],
},
},
default: 'auto',
},
{
displayName: 'Text',
name: 'text',
type: 'string',
required: true,
default: '',
placeholder: 'e.g. Hello, how can you help me?',
typeOptions: {
rows: 2,
},
displayOptions: {
show: {
promptType: ['define'],
},
},
},
{
displayName: 'Require Specific Output Format',
name: 'hasOutputParser',
type: 'boolean',
default: false,
displayOptions: {
hide: {
'@version': [1, 1.1, 1.3],
},
},
},
{
displayName: 'Chat Messages (if Using a Chat Model)',
name: 'messages',
@@ -419,17 +476,6 @@ export class ChainLlm implements INodeType {
},
],
},
{
displayName: 'Require Specific Output Format',
name: 'hasOutputParser',
type: 'boolean',
default: false,
displayOptions: {
show: {
'@version': [1.2],
},
},
},
{
displayName: `Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='${NodeConnectionType.AiOutputParser}'>output parser</a> on the canvas to specify the output format you require`,
name: 'notice',
@@ -454,17 +500,20 @@ export class ChainLlm implements INodeType {
0,
)) as BaseLanguageModel;
let outputParsers: BaseOutputParser[] = [];
if (this.getNodeParameter('hasOutputParser', 0, true) === true) {
outputParsers = (await this.getInputConnectionData(
NodeConnectionType.AiOutputParser,
0,
)) as BaseOutputParser[];
}
const outputParsers = await getOptionalOutputParsers(this);
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const prompt = this.getNodeParameter('prompt', itemIndex) as string;
let prompt: string;
if (this.getNode().typeVersion <= 1.2) {
prompt = this.getNodeParameter('prompt', itemIndex) as string;
} else {
prompt = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
}
const messages = this.getNodeParameter(
'messages.messageValues',
itemIndex,