mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-22 12:19:09 +00:00
refactor(Basic LLM Chain Node): Refactor Basic LLM Chain & add tests (#13850)
This commit is contained in:
@@ -0,0 +1,187 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import { FakeChatModel } from '@langchain/core/utils/testing';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IExecuteFunctions, INode } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import * as helperModule from '@utils/helpers';
|
||||
import * as outputParserModule from '@utils/output_parsers/N8nOutputParser';
|
||||
|
||||
import { ChainLlm } from '../ChainLlm.node';
|
||||
import * as executeChainModule from '../methods/chainExecutor';
|
||||
|
||||
jest.mock('@utils/helpers', () => ({
|
||||
getPromptInputByType: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@utils/output_parsers/N8nOutputParser', () => ({
|
||||
getOptionalOutputParser: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('../methods/chainExecutor', () => ({
|
||||
executeChain: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('ChainLlm Node', () => {
|
||||
let node: ChainLlm;
|
||||
let mockExecuteFunction: jest.Mocked<IExecuteFunctions>;
|
||||
|
||||
beforeEach(() => {
|
||||
node = new ChainLlm();
|
||||
mockExecuteFunction = mock<IExecuteFunctions>();
|
||||
|
||||
mockExecuteFunction.logger = {
|
||||
debug: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
};
|
||||
|
||||
mockExecuteFunction.getInputData.mockReturnValue([{ json: {} }]);
|
||||
mockExecuteFunction.getNode.mockReturnValue({
|
||||
name: 'Chain LLM',
|
||||
typeVersion: 1.5,
|
||||
parameters: {},
|
||||
} as INode);
|
||||
|
||||
mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => {
|
||||
if (param === 'messages.messageValues') return [];
|
||||
return defaultValue;
|
||||
});
|
||||
|
||||
const fakeLLM = new FakeChatModel({});
|
||||
mockExecuteFunction.getInputConnectionData.mockResolvedValue(fakeLLM);
|
||||
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('description', () => {
|
||||
it('should have the expected properties', () => {
|
||||
expect(node.description).toBeDefined();
|
||||
expect(node.description.name).toBe('chainLlm');
|
||||
expect(node.description.displayName).toBe('Basic LLM Chain');
|
||||
expect(node.description.version).toContain(1.5);
|
||||
expect(node.description.properties).toBeDefined();
|
||||
expect(node.description.inputs).toBeDefined();
|
||||
expect(node.description.outputs).toEqual([NodeConnectionType.Main]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('execute', () => {
|
||||
it('should execute the chain with the correct parameters', async () => {
|
||||
(helperModule.getPromptInputByType as jest.Mock).mockReturnValue('Test prompt');
|
||||
|
||||
(outputParserModule.getOptionalOutputParser as jest.Mock).mockResolvedValue(undefined);
|
||||
|
||||
(executeChainModule.executeChain as jest.Mock).mockResolvedValue(['Test response']);
|
||||
|
||||
const result = await node.execute.call(mockExecuteFunction);
|
||||
|
||||
expect(executeChainModule.executeChain).toHaveBeenCalledWith({
|
||||
context: mockExecuteFunction,
|
||||
itemIndex: 0,
|
||||
query: 'Test prompt',
|
||||
llm: expect.any(FakeChatModel),
|
||||
outputParser: undefined,
|
||||
messages: [],
|
||||
});
|
||||
|
||||
expect(mockExecuteFunction.logger.debug).toHaveBeenCalledWith('Executing Basic LLM Chain');
|
||||
|
||||
expect(result).toEqual([[{ json: expect.any(Object) }]]);
|
||||
});
|
||||
|
||||
it('should handle multiple input items', async () => {
|
||||
// Set up multiple input items
|
||||
mockExecuteFunction.getInputData.mockReturnValue([
|
||||
{ json: { item: 1 } },
|
||||
{ json: { item: 2 } },
|
||||
]);
|
||||
|
||||
(helperModule.getPromptInputByType as jest.Mock)
|
||||
.mockReturnValueOnce('Test prompt 1')
|
||||
.mockReturnValueOnce('Test prompt 2');
|
||||
|
||||
(outputParserModule.getOptionalOutputParser as jest.Mock).mockResolvedValue(undefined);
|
||||
|
||||
(executeChainModule.executeChain as jest.Mock)
|
||||
.mockResolvedValueOnce(['Response 1'])
|
||||
.mockResolvedValueOnce(['Response 2']);
|
||||
|
||||
const result = await node.execute.call(mockExecuteFunction);
|
||||
|
||||
expect(executeChainModule.executeChain).toHaveBeenCalledTimes(2);
|
||||
|
||||
expect(result[0]).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should use the prompt parameter directly for older versions', async () => {
|
||||
// Set an older version
|
||||
mockExecuteFunction.getNode.mockReturnValue({
|
||||
name: 'Chain LLM',
|
||||
typeVersion: 1.3,
|
||||
parameters: {},
|
||||
} as INode);
|
||||
|
||||
mockExecuteFunction.getNodeParameter.mockImplementation((param, _itemIndex, defaultValue) => {
|
||||
if (param === 'prompt') return 'Old version prompt';
|
||||
if (param === 'messages.messageValues') return [];
|
||||
return defaultValue;
|
||||
});
|
||||
|
||||
(executeChainModule.executeChain as jest.Mock).mockResolvedValue(['Test response']);
|
||||
|
||||
(outputParserModule.getOptionalOutputParser as jest.Mock).mockResolvedValue(undefined);
|
||||
|
||||
await node.execute.call(mockExecuteFunction);
|
||||
|
||||
expect(executeChainModule.executeChain).toHaveBeenCalledWith({
|
||||
context: mockExecuteFunction,
|
||||
itemIndex: 0,
|
||||
query: 'Old version prompt',
|
||||
llm: expect.any(Object),
|
||||
outputParser: undefined,
|
||||
messages: expect.any(Array),
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error if prompt is empty', async () => {
|
||||
(helperModule.getPromptInputByType as jest.Mock).mockReturnValue(undefined);
|
||||
|
||||
(outputParserModule.getOptionalOutputParser as jest.Mock).mockResolvedValue(undefined);
|
||||
|
||||
mockExecuteFunction.getNode.mockReturnValue({ name: 'Test Node' } as INode);
|
||||
|
||||
await expect(node.execute.call(mockExecuteFunction)).rejects.toThrow(/prompt.*empty/);
|
||||
});
|
||||
|
||||
it('should continue on failure when configured', async () => {
|
||||
(helperModule.getPromptInputByType as jest.Mock).mockReturnValue('Test prompt');
|
||||
|
||||
const error = new Error('Test error');
|
||||
(executeChainModule.executeChain as jest.Mock).mockRejectedValue(error);
|
||||
|
||||
mockExecuteFunction.continueOnFail.mockReturnValue(true);
|
||||
|
||||
const result = await node.execute.call(mockExecuteFunction);
|
||||
|
||||
expect(result).toEqual([[{ json: { error: 'Test error' }, pairedItem: { item: 0 } }]]);
|
||||
});
|
||||
|
||||
it('should handle multiple response items from executeChain', async () => {
|
||||
(helperModule.getPromptInputByType as jest.Mock).mockReturnValue('Test prompt');
|
||||
|
||||
(outputParserModule.getOptionalOutputParser as jest.Mock).mockResolvedValue(undefined);
|
||||
|
||||
(executeChainModule.executeChain as jest.Mock).mockResolvedValue([
|
||||
'Response 1',
|
||||
'Response 2',
|
||||
]);
|
||||
|
||||
const result = await node.execute.call(mockExecuteFunction);
|
||||
|
||||
expect(result[0]).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,223 @@
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts';
|
||||
import { FakeLLM, FakeChatModel } from '@langchain/core/utils/testing';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
|
||||
import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';
|
||||
import * as tracing from '@utils/tracing';
|
||||
|
||||
import { executeChain } from '../methods/chainExecutor';
|
||||
import * as promptUtils from '../methods/promptUtils';
|
||||
|
||||
jest.mock('@utils/tracing', () => ({
|
||||
getTracingConfig: jest.fn(() => ({})),
|
||||
}));
|
||||
|
||||
jest.mock('../methods/promptUtils', () => ({
|
||||
createPromptTemplate: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('chainExecutor', () => {
|
||||
let mockContext: jest.Mocked<IExecuteFunctions>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockContext = mock<IExecuteFunctions>();
|
||||
mockContext.getExecutionCancelSignal = jest.fn().mockReturnValue(undefined);
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('executeChain', () => {
|
||||
it('should execute a simple chain without output parsers', async () => {
|
||||
const fakeLLM = new FakeLLM({ response: 'Test response' });
|
||||
const mockPromptTemplate = new PromptTemplate({
|
||||
template: '{query}',
|
||||
inputVariables: ['query'],
|
||||
});
|
||||
|
||||
const mockChain = {
|
||||
invoke: jest.fn().mockResolvedValue('Test response'),
|
||||
};
|
||||
const withConfigMock = jest.fn().mockReturnValue(mockChain);
|
||||
const pipeStringOutputParserMock = jest.fn().mockReturnValue({
|
||||
withConfig: withConfigMock,
|
||||
});
|
||||
const pipeMock = jest.fn().mockReturnValue({
|
||||
pipe: pipeStringOutputParserMock,
|
||||
});
|
||||
|
||||
mockPromptTemplate.pipe = pipeMock;
|
||||
fakeLLM.pipe = jest.fn();
|
||||
|
||||
(promptUtils.createPromptTemplate as jest.Mock).mockResolvedValue(mockPromptTemplate);
|
||||
|
||||
const result = await executeChain({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
query: 'Hello',
|
||||
llm: fakeLLM,
|
||||
});
|
||||
|
||||
expect(promptUtils.createPromptTemplate).toHaveBeenCalledWith({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeLLM,
|
||||
messages: undefined,
|
||||
query: 'Hello',
|
||||
});
|
||||
|
||||
expect(pipeMock).toHaveBeenCalledWith(fakeLLM);
|
||||
expect(pipeStringOutputParserMock).toHaveBeenCalledWith(expect.any(StringOutputParser));
|
||||
expect(withConfigMock).toHaveBeenCalledWith(expect.any(Object));
|
||||
|
||||
expect(result).toEqual(['Test response']);
|
||||
|
||||
expect(tracing.getTracingConfig).toHaveBeenCalledWith(mockContext);
|
||||
});
|
||||
|
||||
it('should execute a chain with a single output parser', async () => {
|
||||
const fakeLLM = new FakeLLM({ response: 'Test response' });
|
||||
const mockPromptTemplate = new PromptTemplate({
|
||||
template: '{query}\n{formatInstructions}',
|
||||
inputVariables: ['query'],
|
||||
partialVariables: { formatInstructions: 'Format as JSON' },
|
||||
});
|
||||
|
||||
const mockChain = {
|
||||
invoke: jest.fn().mockResolvedValue({ result: 'Test response' }),
|
||||
};
|
||||
const withConfigMock = jest.fn().mockReturnValue(mockChain);
|
||||
const pipeOutputParserMock = jest.fn().mockReturnValue({
|
||||
withConfig: withConfigMock,
|
||||
});
|
||||
const pipeMock = jest.fn().mockReturnValue({
|
||||
pipe: pipeOutputParserMock,
|
||||
});
|
||||
|
||||
mockPromptTemplate.pipe = pipeMock;
|
||||
fakeLLM.pipe = jest.fn();
|
||||
|
||||
(promptUtils.createPromptTemplate as jest.Mock).mockResolvedValue(mockPromptTemplate);
|
||||
|
||||
const result = await executeChain({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
query: 'Hello',
|
||||
llm: fakeLLM,
|
||||
outputParser: mock<N8nOutputParser>(),
|
||||
});
|
||||
|
||||
expect(promptUtils.createPromptTemplate).toHaveBeenCalledWith({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeLLM,
|
||||
messages: undefined,
|
||||
query: 'Hello',
|
||||
});
|
||||
|
||||
expect(result).toEqual([{ result: 'Test response' }]);
|
||||
});
|
||||
|
||||
it('should wrap non-array responses in an array', async () => {
|
||||
const fakeLLM = new FakeLLM({ response: 'Test response' });
|
||||
const mockPromptTemplate = new PromptTemplate({
|
||||
template: '{query}',
|
||||
inputVariables: ['query'],
|
||||
});
|
||||
|
||||
const mockOutputParser = mock<N8nOutputParser>();
|
||||
|
||||
const mockChain = {
|
||||
invoke: jest.fn().mockResolvedValue({ result: 'Test response' }),
|
||||
};
|
||||
const withConfigMock = jest.fn().mockReturnValue(mockChain);
|
||||
const pipeOutputParserMock = jest.fn().mockReturnValue({
|
||||
withConfig: withConfigMock,
|
||||
});
|
||||
const pipeMock = jest.fn().mockReturnValue({
|
||||
pipe: pipeOutputParserMock,
|
||||
});
|
||||
|
||||
mockPromptTemplate.pipe = pipeMock;
|
||||
fakeLLM.pipe = jest.fn();
|
||||
|
||||
(promptUtils.createPromptTemplate as jest.Mock).mockResolvedValue(mockPromptTemplate);
|
||||
|
||||
const result = await executeChain({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
query: 'Hello',
|
||||
llm: fakeLLM,
|
||||
outputParser: mockOutputParser,
|
||||
});
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result).toEqual([{ result: 'Test response' }]);
|
||||
});
|
||||
|
||||
it('should pass the execution cancel signal to the chain', async () => {
|
||||
// For this test, we'll just verify that getExecutionCancelSignal is called
|
||||
const fakeLLM = new FakeLLM({ response: 'Test response' });
|
||||
const mockPromptTemplate = new PromptTemplate({
|
||||
template: '{query}',
|
||||
inputVariables: ['query'],
|
||||
});
|
||||
|
||||
const mockChain = {
|
||||
invoke: jest.fn().mockResolvedValue('Test response'),
|
||||
};
|
||||
const withConfigMock = jest.fn().mockReturnValue(mockChain);
|
||||
const pipeStringOutputParserMock = jest.fn().mockReturnValue({
|
||||
withConfig: withConfigMock,
|
||||
});
|
||||
const pipeMock = jest.fn().mockReturnValue({
|
||||
pipe: pipeStringOutputParserMock,
|
||||
});
|
||||
|
||||
mockPromptTemplate.pipe = pipeMock;
|
||||
fakeLLM.pipe = jest.fn();
|
||||
|
||||
(promptUtils.createPromptTemplate as jest.Mock).mockResolvedValue(mockPromptTemplate);
|
||||
|
||||
await executeChain({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
query: 'Hello',
|
||||
llm: fakeLLM,
|
||||
});
|
||||
|
||||
expect(mockContext.getExecutionCancelSignal).toHaveBeenCalled();
|
||||
expect(mockChain.invoke).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should support chat models', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
const mockChatPromptTemplate = ChatPromptTemplate.fromMessages([]);
|
||||
|
||||
const mockChain = {
|
||||
invoke: jest.fn().mockResolvedValue('Test chat response'),
|
||||
};
|
||||
const withConfigMock = jest.fn().mockReturnValue(mockChain);
|
||||
const pipeStringOutputParserMock = jest.fn().mockReturnValue({
|
||||
withConfig: withConfigMock,
|
||||
});
|
||||
const pipeMock = jest.fn().mockReturnValue({
|
||||
pipe: pipeStringOutputParserMock,
|
||||
});
|
||||
|
||||
mockChatPromptTemplate.pipe = pipeMock;
|
||||
fakeChatModel.pipe = jest.fn();
|
||||
|
||||
(promptUtils.createPromptTemplate as jest.Mock).mockResolvedValue(mockChatPromptTemplate);
|
||||
|
||||
const result = await executeChain({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
query: 'Hello',
|
||||
llm: fakeChatModel,
|
||||
});
|
||||
|
||||
expect(result).toEqual(['Test chat response']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,49 @@
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { getInputs, nodeProperties } from '../methods/config';
|
||||
|
||||
describe('config', () => {
|
||||
describe('getInputs', () => {
|
||||
it('should return basic inputs for all parameters', () => {
|
||||
const inputs = getInputs({});
|
||||
|
||||
expect(inputs).toHaveLength(3);
|
||||
expect(inputs[0].type).toBe(NodeConnectionType.Main);
|
||||
expect(inputs[1].type).toBe(NodeConnectionType.AiLanguageModel);
|
||||
expect(inputs[2].type).toBe(NodeConnectionType.AiOutputParser);
|
||||
});
|
||||
|
||||
it('should exclude the OutputParser when hasOutputParser is false', () => {
|
||||
const inputs = getInputs({ hasOutputParser: false });
|
||||
|
||||
expect(inputs).toHaveLength(2);
|
||||
expect(inputs[0].type).toBe(NodeConnectionType.Main);
|
||||
expect(inputs[1].type).toBe(NodeConnectionType.AiLanguageModel);
|
||||
});
|
||||
|
||||
it('should include the OutputParser when hasOutputParser is true', () => {
|
||||
const inputs = getInputs({ hasOutputParser: true });
|
||||
|
||||
expect(inputs).toHaveLength(3);
|
||||
expect(inputs[2].type).toBe(NodeConnectionType.AiOutputParser);
|
||||
});
|
||||
});
|
||||
|
||||
describe('nodeProperties', () => {
|
||||
it('should have the expected properties', () => {
|
||||
expect(Array.isArray(nodeProperties)).toBe(true);
|
||||
expect(nodeProperties.length).toBeGreaterThan(0);
|
||||
|
||||
const promptParams = nodeProperties.filter((prop) => prop.name === 'prompt');
|
||||
expect(promptParams.length).toBeGreaterThan(0);
|
||||
|
||||
const messagesParam = nodeProperties.find((prop) => prop.name === 'messages');
|
||||
expect(messagesParam).toBeDefined();
|
||||
expect(messagesParam?.type).toBe('fixedCollection');
|
||||
|
||||
const hasOutputParserParam = nodeProperties.find((prop) => prop.name === 'hasOutputParser');
|
||||
expect(hasOutputParserParam).toBeDefined();
|
||||
expect(hasOutputParserParam?.type).toBe('boolean');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,262 @@
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import { HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IExecuteFunctions, IBinaryData, INode } from 'n8n-workflow';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
createImageMessage,
|
||||
dataUriFromImageData,
|
||||
UnsupportedMimeTypeError,
|
||||
} from '../methods/imageUtils';
|
||||
import type { MessageTemplate } from '../methods/types';
|
||||
|
||||
// Mock ChatGoogleGenerativeAI and ChatOllama
|
||||
jest.mock('@langchain/google-genai', () => ({
|
||||
ChatGoogleGenerativeAI: class MockChatGoogleGenerativeAI {},
|
||||
}));
|
||||
|
||||
jest.mock('@langchain/ollama', () => ({
|
||||
ChatOllama: class MockChatOllama {},
|
||||
}));
|
||||
|
||||
// Create a better mock for IExecuteFunctions that includes helpers
|
||||
const createMockExecuteFunctions = () => {
|
||||
const mockExec = mock<IExecuteFunctions>();
|
||||
// Add missing helpers property with mocked getBinaryDataBuffer
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
mockExec.helpers = {
|
||||
getBinaryDataBuffer: jest.fn().mockResolvedValue(Buffer.from('Test image data')),
|
||||
} as any;
|
||||
return mockExec;
|
||||
};
|
||||
|
||||
describe('imageUtils', () => {
|
||||
describe('dataUriFromImageData', () => {
|
||||
it('should convert image data to data URI', () => {
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
const mockBinaryData = mock<IBinaryData>({ mimeType: 'image/jpeg' });
|
||||
|
||||
const dataUri = dataUriFromImageData(mockBinaryData, mockBuffer);
|
||||
expect(dataUri).toBe('data:image/jpeg;base64,VGVzdCBkYXRh');
|
||||
});
|
||||
|
||||
it('should throw UnsupportedMimeTypeError for non-images', () => {
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
const mockBinaryData = mock<IBinaryData>({ mimeType: 'text/plain' });
|
||||
|
||||
expect(() => {
|
||||
dataUriFromImageData(mockBinaryData, mockBuffer);
|
||||
}).toThrow(UnsupportedMimeTypeError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createImageMessage', () => {
|
||||
let mockContext: jest.Mocked<IExecuteFunctions>;
|
||||
let mockBuffer: Buffer;
|
||||
let mockBinaryData: jest.Mocked<IBinaryData>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockContext = createMockExecuteFunctions();
|
||||
mockBuffer = Buffer.from('Test image data');
|
||||
mockBinaryData = mock<IBinaryData>({ mimeType: 'image/png' });
|
||||
|
||||
// Mock required methods
|
||||
mockContext.getInputData.mockReturnValue([{ binary: { data: mockBinaryData }, json: {} }]);
|
||||
(mockContext.helpers.getBinaryDataBuffer as jest.Mock).mockResolvedValue(mockBuffer);
|
||||
mockContext.getInputConnectionData.mockResolvedValue({});
|
||||
mockContext.getNode.mockReturnValue({ name: 'TestNode' } as INode);
|
||||
});
|
||||
|
||||
it('should throw an error for invalid message type', async () => {
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'text', // Invalid for this test case
|
||||
};
|
||||
|
||||
await expect(
|
||||
createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
}),
|
||||
).rejects.toThrow(NodeOperationError);
|
||||
});
|
||||
|
||||
it('should handle image URL messages', async () => {
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageUrl',
|
||||
imageUrl: 'https://example.com/image.jpg',
|
||||
imageDetail: 'high',
|
||||
};
|
||||
|
||||
const result = await createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(HumanMessage);
|
||||
expect(result.content).toEqual([
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: 'https://example.com/image.jpg',
|
||||
detail: 'high',
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle image URL messages with auto detail', async () => {
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageUrl',
|
||||
imageUrl: 'https://example.com/image.jpg',
|
||||
imageDetail: 'auto',
|
||||
};
|
||||
|
||||
const result = await createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(HumanMessage);
|
||||
expect(result.content).toEqual([
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: 'https://example.com/image.jpg',
|
||||
detail: undefined, // Auto becomes undefined
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should throw an error when binary data is missing', async () => {
|
||||
// Set up missing binary data
|
||||
mockContext.getInputData.mockReturnValue([{ json: {} }]); // No binary data
|
||||
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageBinary',
|
||||
binaryImageDataKey: 'data',
|
||||
};
|
||||
|
||||
await expect(
|
||||
createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
}),
|
||||
).rejects.toThrow('No binary data set.');
|
||||
});
|
||||
|
||||
it('should handle binary image data for regular models', async () => {
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageBinary',
|
||||
binaryImageDataKey: 'data',
|
||||
imageDetail: 'low',
|
||||
};
|
||||
|
||||
const result = await createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(HumanMessage);
|
||||
expect(result.content).toEqual([
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: 'data:image/png;base64,VGVzdCBpbWFnZSBkYXRh',
|
||||
detail: 'low',
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle image data differently for GoogleGenerativeAI models', async () => {
|
||||
// Mock a Google model - using our mocked class
|
||||
mockContext.getInputConnectionData.mockResolvedValue(new ChatGoogleGenerativeAI());
|
||||
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageBinary',
|
||||
binaryImageDataKey: 'data',
|
||||
};
|
||||
|
||||
const result = await createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(HumanMessage);
|
||||
expect(result.content).toEqual([
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: 'data:image/png;base64,VGVzdCBpbWFnZSBkYXRh',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle image data differently for Ollama models', async () => {
|
||||
// Mock an Ollama model - using our mocked class
|
||||
mockContext.getInputConnectionData.mockResolvedValue(new ChatOllama());
|
||||
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageBinary',
|
||||
binaryImageDataKey: 'data',
|
||||
};
|
||||
|
||||
const result = await createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(HumanMessage);
|
||||
expect(result.content).toEqual([
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: 'data:image/png;base64,VGVzdCBpbWFnZSBkYXRh',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should pass through UnsupportedMimeTypeError', async () => {
|
||||
// Mock a non-image mime type
|
||||
mockBinaryData.mimeType = 'application/pdf';
|
||||
|
||||
const message: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageBinary',
|
||||
binaryImageDataKey: 'data',
|
||||
};
|
||||
|
||||
await expect(
|
||||
createImageMessage({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message,
|
||||
}),
|
||||
).rejects.toThrow(NodeOperationError);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,218 @@
|
||||
import { HumanMessage } from '@langchain/core/messages';
|
||||
import { ChatPromptTemplate, PromptTemplate } from '@langchain/core/prompts';
|
||||
import { FakeLLM, FakeChatModel } from '@langchain/core/utils/testing';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
import { OperationalError } from 'n8n-workflow';
|
||||
|
||||
import * as imageUtils from '../methods/imageUtils';
|
||||
import { createPromptTemplate } from '../methods/promptUtils';
|
||||
import type { MessageTemplate } from '../methods/types';
|
||||
|
||||
jest.mock('../methods/imageUtils', () => ({
|
||||
createImageMessage: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('promptUtils', () => {
|
||||
describe('createPromptTemplate', () => {
|
||||
let mockContext: jest.Mocked<IExecuteFunctions>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockContext = mock<IExecuteFunctions>();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should create a simple prompt template for non-chat models', async () => {
|
||||
const fakeLLM = new FakeLLM({});
|
||||
const result = await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeLLM,
|
||||
query: 'Test query',
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(PromptTemplate);
|
||||
expect(result.inputVariables).toContain('query');
|
||||
});
|
||||
|
||||
it('should create a prompt template with format instructions', async () => {
|
||||
const fakeLLM = new FakeLLM({});
|
||||
const formatInstructions = 'Format your response as JSON';
|
||||
|
||||
const result = await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeLLM,
|
||||
formatInstructions,
|
||||
query: 'Test query',
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(PromptTemplate);
|
||||
expect(result.inputVariables).toContain('query');
|
||||
|
||||
// Check that format instructions are included in the template
|
||||
const formattedResult = await result.format({ query: 'Test' });
|
||||
expect(formattedResult).toContain(formatInstructions);
|
||||
});
|
||||
|
||||
it('should create a chat prompt template for chat models', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
|
||||
const result = await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeChatModel,
|
||||
query: 'Test query',
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(ChatPromptTemplate);
|
||||
});
|
||||
|
||||
it('should process text messages correctly', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
const messages: MessageTemplate[] = [
|
||||
{
|
||||
type: 'SystemMessagePromptTemplate',
|
||||
message: 'You are a helpful assistant',
|
||||
messageType: 'text',
|
||||
},
|
||||
{
|
||||
type: 'AIMessagePromptTemplate',
|
||||
message: 'How can I help you?',
|
||||
messageType: 'text',
|
||||
},
|
||||
];
|
||||
|
||||
const result = await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeChatModel,
|
||||
messages,
|
||||
query: 'Tell me a joke',
|
||||
});
|
||||
|
||||
expect(result).toBeInstanceOf(ChatPromptTemplate);
|
||||
|
||||
const formattedMessages = await (result as ChatPromptTemplate).formatMessages({
|
||||
query: 'Tell me a joke',
|
||||
});
|
||||
expect(formattedMessages).toHaveLength(3); // 2 messages + 1 query
|
||||
expect(formattedMessages[0].content).toBe('You are a helpful assistant');
|
||||
expect(formattedMessages[1].content).toBe('How can I help you?');
|
||||
});
|
||||
|
||||
it('should escape curly braces in messages', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
const messages: MessageTemplate[] = [
|
||||
{
|
||||
type: 'SystemMessagePromptTemplate',
|
||||
message: 'You are a {helpful} assistant',
|
||||
messageType: 'text',
|
||||
},
|
||||
];
|
||||
|
||||
const result = await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeChatModel,
|
||||
messages,
|
||||
query: 'Tell me a joke',
|
||||
});
|
||||
|
||||
// Validate the messages have escaped curly braces
|
||||
const formattedMessages = await (result as ChatPromptTemplate).formatMessages({
|
||||
query: 'Tell me a joke',
|
||||
});
|
||||
expect(formattedMessages[0].content).toBe('You are a {helpful} assistant');
|
||||
});
|
||||
|
||||
it('should handle image messages by calling createImageMessage', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
const imageMessage: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageUrl',
|
||||
imageUrl: 'https://example.com/image.jpg',
|
||||
};
|
||||
|
||||
// Mock the image message creation
|
||||
const mockHumanMessage = new HumanMessage({
|
||||
content: [{ type: 'image_url', image_url: { url: 'https://example.com/image.jpg' } }],
|
||||
});
|
||||
(imageUtils.createImageMessage as jest.Mock).mockResolvedValue(mockHumanMessage);
|
||||
|
||||
await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeChatModel,
|
||||
messages: [imageMessage],
|
||||
query: 'Describe this image',
|
||||
});
|
||||
|
||||
expect(imageUtils.createImageMessage).toHaveBeenCalledWith({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
message: imageMessage,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for invalid message types', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
const messages: MessageTemplate[] = [
|
||||
{
|
||||
type: 'InvalidMessageType',
|
||||
message: 'This is an invalid message',
|
||||
messageType: 'text',
|
||||
},
|
||||
];
|
||||
|
||||
await expect(
|
||||
createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeChatModel,
|
||||
messages,
|
||||
query: 'Test query',
|
||||
}),
|
||||
).rejects.toThrow(OperationalError);
|
||||
});
|
||||
|
||||
it('should add the query to an existing human message with content if it exists', async () => {
|
||||
const fakeChatModel = new FakeChatModel({});
|
||||
|
||||
// Create a mock image message with content array
|
||||
const mockHumanMessage = new HumanMessage({
|
||||
content: [{ type: 'image_url', image_url: { url: 'https://example.com/image.jpg' } }],
|
||||
});
|
||||
(imageUtils.createImageMessage as jest.Mock).mockResolvedValue(mockHumanMessage);
|
||||
|
||||
const imageMessage: MessageTemplate = {
|
||||
type: 'HumanMessagePromptTemplate',
|
||||
message: '',
|
||||
messageType: 'imageUrl',
|
||||
imageUrl: 'https://example.com/image.jpg',
|
||||
};
|
||||
|
||||
const result = await createPromptTemplate({
|
||||
context: mockContext,
|
||||
itemIndex: 0,
|
||||
llm: fakeChatModel,
|
||||
messages: [imageMessage],
|
||||
query: 'Describe this image',
|
||||
});
|
||||
|
||||
// Format the message and check that the query was added to the existing content
|
||||
const formattedMessages = await (result as ChatPromptTemplate).formatMessages({
|
||||
query: 'Describe this image',
|
||||
});
|
||||
expect(formattedMessages).toHaveLength(1);
|
||||
|
||||
// The content should now have the original image and the text query
|
||||
const content = formattedMessages[0].content as any[];
|
||||
expect(content).toHaveLength(2);
|
||||
expect(content[0].type).toBe('image_url');
|
||||
expect(content[1].type).toBe('text');
|
||||
expect(content[1].text).toContain('Describe this image');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,45 @@
|
||||
import { formatResponse } from '../methods/responseFormatter';
|
||||
|
||||
describe('responseFormatter', () => {
|
||||
describe('formatResponse', () => {
|
||||
it('should format string responses', () => {
|
||||
const result = formatResponse('Test response');
|
||||
expect(result).toEqual({
|
||||
response: {
|
||||
text: 'Test response',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should trim string responses', () => {
|
||||
const result = formatResponse(' Test response with whitespace ');
|
||||
expect(result).toEqual({
|
||||
response: {
|
||||
text: 'Test response with whitespace',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle array responses', () => {
|
||||
const testArray = [{ item: 1 }, { item: 2 }];
|
||||
const result = formatResponse(testArray);
|
||||
expect(result).toEqual({ data: testArray });
|
||||
});
|
||||
|
||||
it('should handle object responses', () => {
|
||||
const testObject = { key: 'value', nested: { key: 'value' } };
|
||||
const result = formatResponse(testObject);
|
||||
expect(result).toEqual(testObject);
|
||||
});
|
||||
|
||||
it('should handle primitive non-string responses', () => {
|
||||
const testNumber = 42;
|
||||
const result = formatResponse(testNumber);
|
||||
expect(result).toEqual({
|
||||
response: {
|
||||
text: 42,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,23 +0,0 @@
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { IBinaryData } from 'n8n-workflow';
|
||||
|
||||
import { dataUriFromImageData, UnsupportedMimeTypeError } from '../utils';
|
||||
|
||||
describe('dataUriFromImageData', () => {
|
||||
it('should not throw an error on images', async () => {
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
const mockBinaryData = mock<IBinaryData>({ mimeType: 'image/jpeg' });
|
||||
|
||||
const dataUri = dataUriFromImageData(mockBinaryData, mockBuffer);
|
||||
expect(dataUri).toBe('data:image/jpeg;base64,VGVzdCBkYXRh');
|
||||
});
|
||||
|
||||
it('should throw an UnsupportetMimeTypeError on non-images', async () => {
|
||||
const mockBuffer = Buffer.from('Test data');
|
||||
const mockBinaryData = mock<IBinaryData>({ mimeType: 'text/plain' });
|
||||
|
||||
expect(() => {
|
||||
dataUriFromImageData(mockBinaryData, mockBuffer);
|
||||
}).toThrow(UnsupportedMimeTypeError);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user