From 71e5584d28771400b7d8478d9c519de65c35ee6f Mon Sep 17 00:00:00 2001 From: Benjamin Schroth <68321970+schrothbn@users.noreply.github.com> Date: Wed, 14 May 2025 16:40:51 +0200 Subject: [PATCH] fix(Text Classifier Node): Return error for empty inputText (#15390) --- .../TextClassifier/TextClassifier.node.ts | 2 - .../chains/TextClassifier/processItem.ts | 2 +- .../test/TextClassifier.node.test.ts | 199 ++++++++++++++++++ .../TextClassifier/test/processItem.test.ts | 144 +++++++++++++ 4 files changed, 344 insertions(+), 3 deletions(-) create mode 100644 packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/TextClassifier.node.test.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/processItem.test.ts diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts index 656b3c829d..25c097a8c6 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts @@ -241,7 +241,6 @@ export class TextClassifier implements INodeType { const batchPromises = batch.map(async (_item, batchItemIndex) => { const itemIndex = i + batchItemIndex; const item = items[itemIndex]; - item.pairedItem = { item: itemIndex }; return await processItem( this, @@ -291,7 +290,6 @@ export class TextClassifier implements INodeType { } else { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { const item = items[itemIndex]; - item.pairedItem = { item: itemIndex }; try { const output = await processItem( diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/processItem.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/processItem.ts index 5c4da3de08..0aacd59f48 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/processItem.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/processItem.ts @@ -20,7 +20,7 @@ export async function processItem( ): Promise> { const input = ctx.getNodeParameter('inputText', itemIndex) as string; - if (input === undefined || input === null) { + if (!input) { throw new NodeOperationError( ctx.getNode(), `Text to classify for item ${itemIndex} is not defined`, diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/TextClassifier.node.test.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/TextClassifier.node.test.ts new file mode 100644 index 0000000000..86b1b54a8b --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/TextClassifier.node.test.ts @@ -0,0 +1,199 @@ +import { FakeChatModel } from '@langchain/core/utils/testing'; +import { mock } from 'jest-mock-extended'; +import type { IExecuteFunctions, INode } from 'n8n-workflow'; + +import { processItem } from '../processItem'; +import { TextClassifier } from '../TextClassifier.node'; + +jest.mock('../processItem', () => ({ + processItem: jest.fn(), +})); + +describe('TextClassifier Node', () => { + let node: TextClassifier; + let mockExecuteFunction: jest.Mocked; + + beforeEach(() => { + jest.resetAllMocks(); + node = new TextClassifier(); + mockExecuteFunction = mock(); + + mockExecuteFunction.logger = { + debug: jest.fn(), + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + }; + + mockExecuteFunction.getInputData.mockReturnValue([{ json: { testValue: 'none' } }]); + mockExecuteFunction.getNode.mockReturnValue({ + name: 'Text Classifier', + typeVersion: 1.1, + parameters: {}, + } as INode); + + mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return 'Test input'; + if (param === 'categories.categories') + return [{ category: 'test', description: 'test category' }]; + return defaultValue; + }); + + const fakeLLM = new FakeChatModel({}); + mockExecuteFunction.getInputConnectionData.mockResolvedValue(fakeLLM); + }); + + describe('execute', () => { + it('should process items with correct parameters', async () => { + (processItem as jest.Mock).mockResolvedValue({ test: true }); + + const result = await node.execute.call(mockExecuteFunction); + + expect(processItem).toHaveBeenCalledWith( + mockExecuteFunction, + 0, + { json: { testValue: 'none' } }, + expect.any(FakeChatModel), + expect.any(Object), + [{ category: 'test', description: 'test category' }], + expect.any(String), + 'If there is not a very fitting category, select none of the categories.', + ); + + expect(result).toEqual([[{ json: { testValue: 'none' } }]]); + }); + + it('should handle multiple input items', async () => { + mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return 'Test input'; + if (param === 'categories.categories') + return [ + { category: 'test1', description: 'test category' }, + { category: 'test2', description: 'some other category' }, + ]; + return defaultValue; + }); + mockExecuteFunction.getInputData.mockReturnValue([ + { json: { item: 1 } }, + { json: { item: 2 } }, + ]); + + (processItem as jest.Mock) + .mockResolvedValueOnce({ test1: true, test2: false }) + .mockResolvedValueOnce({ test1: false, test2: true }); + + const result = await node.execute.call(mockExecuteFunction); + + expect(processItem).toHaveBeenCalledTimes(2); + expect(result).toHaveLength(2); + expect(result[0][0].json).toEqual({ item: 1 }); + expect(result[1][0].json).toEqual({ item: 2 }); + }); + + it('should process items in batches when batchSize is set', async () => { + mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return 'Test input'; + if (param === 'categories.categories') + return [{ category: 'test', description: 'test category' }]; + if (param === 'batchSize') return 2; + return defaultValue; + }); + + mockExecuteFunction.getInputData.mockReturnValue([ + { json: { item: 1 } }, + { json: { item: 2 } }, + { json: { item: 3 } }, + { json: { item: 4 } }, + ]); + + (processItem as jest.Mock) + .mockResolvedValueOnce({ test: true }) + .mockResolvedValueOnce({ test: true }) + .mockResolvedValueOnce({ test: true }) + .mockResolvedValueOnce({ test: true }); + + const result = await node.execute.call(mockExecuteFunction); + + expect(processItem).toHaveBeenCalledTimes(4); + expect(result[0]).toHaveLength(4); + expect(result[0]).toEqual([ + { json: { item: 1 } }, + { json: { item: 2 } }, + { json: { item: 3 } }, + { json: { item: 4 } }, + ]); + }); + + it('should respect delayBetweenBatches', async () => { + mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return 'Test input'; + if (param === 'categories.categories') + return [{ category: 'test', description: 'test category' }]; + if (param === 'options.batching.batchSize') return 2; + if (param === 'options.batching.delayBetweenBatches') return 100; + return defaultValue; + }); + + mockExecuteFunction.getInputData.mockReturnValue([ + { json: { item: 1 } }, + { json: { item: 2 } }, + { json: { item: 3 } }, + { json: { item: 4 } }, + { json: { item: 5 } }, + { json: { item: 6 } }, + ]); + + (processItem as jest.Mock).mockResolvedValue({ test: true }); + + const startTime = Date.now(); + await node.execute.call(mockExecuteFunction); + const endTime = Date.now(); + + expect(endTime - startTime).toBeGreaterThanOrEqual(200); + }); + + it('should handle errors in batch processing', async () => { + mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return 'Test input'; + if (param === 'categories.categories') + return [{ category: 'test', description: 'test category' }]; + if (param === 'batchSize') return 2; + return defaultValue; + }); + + mockExecuteFunction.getInputData.mockReturnValue([ + { json: { item: 1 } }, + { json: { item: 2 } }, + { json: { item: 3 } }, + ]); + + (processItem as jest.Mock) + .mockResolvedValueOnce({ test: true }) + .mockRejectedValueOnce(new Error('Batch error')) + .mockResolvedValueOnce({ test: true }); + + mockExecuteFunction.continueOnFail.mockReturnValue(true); + + const result = await node.execute.call(mockExecuteFunction); + + expect(result[0]).toHaveLength(3); + expect(result[0][1].json).toHaveProperty('error', 'Batch error'); + }); + + it('should throw error when continueOnFail is false', async () => { + mockExecuteFunction.continueOnFail.mockReturnValue(false); + (processItem as jest.Mock).mockRejectedValue(new Error('Test error')); + + await expect(node.execute.call(mockExecuteFunction)).rejects.toThrow('Test error'); + }); + + it('should continue on failure when configured', async () => { + mockExecuteFunction.continueOnFail.mockReturnValue(true); + (processItem as jest.Mock).mockRejectedValue(new Error('Test error')); + + const result = await node.execute.call(mockExecuteFunction); + + expect(result).toEqual([[{ json: { error: 'Test error' }, pairedItem: { item: 0 } }]]); + }); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/processItem.test.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/processItem.test.ts new file mode 100644 index 0000000000..5078accf00 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/test/processItem.test.ts @@ -0,0 +1,144 @@ +import { ChatPromptTemplate } from '@langchain/core/prompts'; +import { FakeChatModel } from '@langchain/core/utils/testing'; +import { mock } from 'jest-mock-extended'; +import type { IExecuteFunctions } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; + +import * as tracing from '@utils/tracing'; + +import { processItem } from '../processItem'; + +jest.mock('@utils/tracing', () => ({ + getTracingConfig: jest.fn(() => ({})), +})); + +jest.mock('@langchain/core/prompts', () => ({ + ChatPromptTemplate: { + fromMessages: jest.fn(), + }, + SystemMessagePromptTemplate: { + fromTemplate: jest.fn().mockReturnValue({ + format: jest.fn(), + }), + }, +})); + +describe('processItem', () => { + let mockContext: jest.Mocked; + let fakeLLM: FakeChatModel; + + beforeEach(() => { + mockContext = mock(); + fakeLLM = new FakeChatModel({}); + + mockContext.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return 'Test input'; + if (param === 'options.systemPromptTemplate') return 'Test system prompt'; + return defaultValue; + }); + + jest.clearAllMocks(); + }); + + it('should throw error for empty input text', async () => { + mockContext.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => { + if (param === 'inputText') return ''; + return defaultValue; + }); + + await expect( + processItem( + mockContext, + 0, + { json: {} }, + fakeLLM, + { getFormatInstructions: () => 'format instructions' } as any, + [{ category: 'test', description: 'test category' }], + 'multi class prompt', + undefined, + ), + ).rejects.toThrow(NodeOperationError); + }); + + it('should process item with correct parameters', async () => { + const mockParser = { + getFormatInstructions: () => '[format instructions]', + }; + + const mockChain = { + invoke: jest.fn().mockResolvedValue({ test: true }), + }; + + const mockPipe = jest.fn().mockReturnValue({ + pipe: jest.fn().mockReturnValue({ + withConfig: jest.fn().mockReturnValue(mockChain), + }), + }); + + const mockPrompt = { + pipe: mockPipe, + }; + + jest.mocked(ChatPromptTemplate.fromMessages).mockReturnValue(mockPrompt as any); + + const result = await processItem( + mockContext, + 0, + { json: {} }, + fakeLLM, + mockParser as any, + [{ category: 'test', description: 'test category' }], + 'multi class prompt', + undefined, + ); + + expect(result).toEqual({ test: true }); + expect(mockContext.getNodeParameter).toHaveBeenCalledWith('inputText', 0); + expect(mockContext.getNodeParameter).toHaveBeenCalledWith( + 'options.systemPromptTemplate', + 0, + expect.any(String), + ); + expect(tracing.getTracingConfig).toHaveBeenCalledWith(mockContext); + }); + + it('should handle fallback prompt', async () => { + const mockParser = { + getFormatInstructions: () => '[format instructions]', + }; + + const mockChain = { + invoke: jest.fn().mockResolvedValue({ category: 'test' }), + }; + + const mockPipe = jest.fn().mockReturnValue({ + pipe: jest.fn().mockReturnValue({ + withConfig: jest.fn().mockReturnValue(mockChain), + }), + }); + + const mockPrompt = { + pipe: mockPipe, + }; + + jest.mocked(ChatPromptTemplate.fromMessages).mockReturnValue(mockPrompt as any); + + await processItem( + mockContext, + 0, + { json: {} }, + fakeLLM, + mockParser as any, + [{ category: 'test', description: 'test category' }], + 'multi class prompt', + 'fallback prompt', + ); + + expect(mockContext.getNodeParameter).toHaveBeenCalledWith( + 'options.systemPromptTemplate', + 0, + expect.any(String), + ); + expect(tracing.getTracingConfig).toHaveBeenCalledWith(mockContext); + }); +});