mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 10:31:15 +00:00
feat: Optimize langchain calls in batching mode (#15011)
Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
This commit is contained in:
@@ -3,7 +3,7 @@
|
||||
import { FakeChatModel } from '@langchain/core/utils/testing';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IExecuteFunctions, INode } from 'n8n-workflow';
|
||||
import { NodeConnectionTypes } from 'n8n-workflow';
|
||||
import { NodeConnectionTypes, UnexpectedError } from 'n8n-workflow';
|
||||
|
||||
import * as helperModule from '@utils/helpers';
|
||||
import * as outputParserModule from '@utils/output_parsers/N8nOutputParser';
|
||||
@@ -12,6 +12,11 @@ import { ChainLlm } from '../ChainLlm.node';
|
||||
import * as executeChainModule from '../methods/chainExecutor';
|
||||
import * as responseFormatterModule from '../methods/responseFormatter';
|
||||
|
||||
jest.mock('n8n-workflow', () => ({
|
||||
...jest.requireActual('n8n-workflow'),
|
||||
sleep: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@utils/helpers', () => ({
|
||||
getPromptInputByType: jest.fn(),
|
||||
}));
|
||||
@@ -25,12 +30,7 @@ jest.mock('../methods/chainExecutor', () => ({
|
||||
}));
|
||||
|
||||
jest.mock('../methods/responseFormatter', () => ({
|
||||
formatResponse: jest.fn().mockImplementation((response) => {
|
||||
if (typeof response === 'string') {
|
||||
return { text: response.trim() };
|
||||
}
|
||||
return response;
|
||||
}),
|
||||
formatResponse: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('ChainLlm Node', () => {
|
||||
@@ -38,6 +38,8 @@ describe('ChainLlm Node', () => {
|
||||
let mockExecuteFunction: jest.Mocked<IExecuteFunctions>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
||||
node = new ChainLlm();
|
||||
mockExecuteFunction = mock<IExecuteFunctions>();
|
||||
|
||||
@@ -63,7 +65,12 @@ describe('ChainLlm Node', () => {
|
||||
const fakeLLM = new FakeChatModel({});
|
||||
mockExecuteFunction.getInputConnectionData.mockResolvedValue(fakeLLM);
|
||||
|
||||
jest.clearAllMocks();
|
||||
(responseFormatterModule.formatResponse as jest.Mock).mockImplementation((response) => {
|
||||
if (typeof response === 'string') {
|
||||
return { text: response.trim() };
|
||||
}
|
||||
return response;
|
||||
});
|
||||
});
|
||||
|
||||
describe('description', () => {
|
||||
@@ -164,15 +171,14 @@ describe('ChainLlm Node', () => {
|
||||
});
|
||||
|
||||
it('should continue on failure when configured', async () => {
|
||||
mockExecuteFunction.continueOnFail.mockReturnValue(true);
|
||||
(helperModule.getPromptInputByType as jest.Mock).mockReturnValue('Test prompt');
|
||||
|
||||
const error = new Error('Test error');
|
||||
(executeChainModule.executeChain as jest.Mock).mockRejectedValue(error);
|
||||
|
||||
mockExecuteFunction.continueOnFail.mockReturnValue(true);
|
||||
(executeChainModule.executeChain as jest.Mock).mockRejectedValueOnce(
|
||||
new UnexpectedError('Test error'),
|
||||
);
|
||||
|
||||
const result = await node.execute.call(mockExecuteFunction);
|
||||
|
||||
expect(result).toEqual([[{ json: { error: 'Test error' }, pairedItem: { item: 0 } }]]);
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user