mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 10:02:05 +00:00
fix: Run evaluations successfully when offload manual executions is true with queue mode (#16307)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
This commit is contained in:
@@ -416,7 +416,7 @@ describe('TestRunnerService', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should call workflowRunner.run with correct data', async () => {
|
test('should call workflowRunner.run with correct data in normal execution mode', async () => {
|
||||||
// Create workflow with a trigger node
|
// Create workflow with a trigger node
|
||||||
const triggerNodeName = 'Dataset Trigger';
|
const triggerNodeName = 'Dataset Trigger';
|
||||||
const workflow = mock<IWorkflowBase>({
|
const workflow = mock<IWorkflowBase>({
|
||||||
@@ -460,13 +460,79 @@ describe('TestRunnerService', () => {
|
|||||||
expect(runCallArg).toHaveProperty('userId', metadata.userId);
|
expect(runCallArg).toHaveProperty('userId', metadata.userId);
|
||||||
expect(runCallArg).toHaveProperty('partialExecutionVersion', 2);
|
expect(runCallArg).toHaveProperty('partialExecutionVersion', 2);
|
||||||
|
|
||||||
// Verify node execution stack contains the requestDataset flag
|
|
||||||
expect(runCallArg).toHaveProperty('executionData.executionData.nodeExecutionStack');
|
expect(runCallArg).toHaveProperty('executionData.executionData.nodeExecutionStack');
|
||||||
const nodeExecutionStack = runCallArg.executionData?.executionData?.nodeExecutionStack;
|
const nodeExecutionStack = runCallArg.executionData?.executionData?.nodeExecutionStack;
|
||||||
expect(nodeExecutionStack).toBeInstanceOf(Array);
|
expect(nodeExecutionStack).toBeInstanceOf(Array);
|
||||||
expect(nodeExecutionStack).toHaveLength(1);
|
expect(nodeExecutionStack).toHaveLength(1);
|
||||||
expect(nodeExecutionStack?.[0]).toHaveProperty('node.name', triggerNodeName);
|
expect(nodeExecutionStack?.[0]).toHaveProperty('node.name', triggerNodeName);
|
||||||
expect(nodeExecutionStack?.[0]).toHaveProperty('data.main[0][0].json.requestDataset', true);
|
expect(nodeExecutionStack?.[0]).toHaveProperty('node.forceCustomOperation', {
|
||||||
|
resource: 'dataset',
|
||||||
|
operation: 'getRows',
|
||||||
|
});
|
||||||
|
expect(nodeExecutionStack?.[0]).toHaveProperty('data.main[0][0].json', {});
|
||||||
|
expect(runCallArg).toHaveProperty('workflowData.nodes[0].forceCustomOperation', {
|
||||||
|
resource: 'dataset',
|
||||||
|
operation: 'getRows',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should call workflowRunner.run with correct data in queue execution mode and manual offload', async () => {
|
||||||
|
config.set('executions.mode', 'queue');
|
||||||
|
process.env.OFFLOAD_MANUAL_EXECUTIONS_TO_WORKERS = 'true';
|
||||||
|
|
||||||
|
// Create workflow with a trigger node
|
||||||
|
const triggerNodeName = 'Dataset Trigger';
|
||||||
|
const workflow = mock<IWorkflowBase>({
|
||||||
|
nodes: [
|
||||||
|
{
|
||||||
|
id: 'node1',
|
||||||
|
name: triggerNodeName,
|
||||||
|
type: EVALUATION_TRIGGER_NODE_TYPE,
|
||||||
|
typeVersion: 1,
|
||||||
|
position: [0, 0],
|
||||||
|
parameters: {},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
connections: {},
|
||||||
|
settings: {
|
||||||
|
saveDataErrorExecution: 'all',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const metadata = {
|
||||||
|
testRunId: 'test-run-id',
|
||||||
|
userId: 'user-id',
|
||||||
|
};
|
||||||
|
|
||||||
|
// Call the method
|
||||||
|
await (testRunnerService as any).runDatasetTrigger(workflow, metadata);
|
||||||
|
|
||||||
|
// Verify workflowRunner.run was called
|
||||||
|
expect(workflowRunner.run).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Get the argument passed to workflowRunner.run
|
||||||
|
const runCallArg = workflowRunner.run.mock.calls[0][0];
|
||||||
|
|
||||||
|
// Verify it has the correct structure
|
||||||
|
expect(runCallArg).toHaveProperty('destinationNode', triggerNodeName);
|
||||||
|
expect(runCallArg).toHaveProperty('executionMode', 'manual');
|
||||||
|
expect(runCallArg).toHaveProperty('workflowData.settings.saveManualExecutions', false);
|
||||||
|
expect(runCallArg).toHaveProperty('workflowData.settings.saveDataErrorExecution', 'none');
|
||||||
|
expect(runCallArg).toHaveProperty('workflowData.settings.saveDataSuccessExecution', 'none');
|
||||||
|
expect(runCallArg).toHaveProperty('workflowData.settings.saveExecutionProgress', false);
|
||||||
|
expect(runCallArg).toHaveProperty('userId', metadata.userId);
|
||||||
|
expect(runCallArg).toHaveProperty('partialExecutionVersion', 2);
|
||||||
|
|
||||||
|
expect(runCallArg).not.toHaveProperty('executionData.executionData');
|
||||||
|
expect(runCallArg).not.toHaveProperty('executionData.executionData.nodeExecutionStack');
|
||||||
|
expect(runCallArg).toHaveProperty('workflowData.nodes[0].forceCustomOperation', {
|
||||||
|
resource: 'dataset',
|
||||||
|
operation: 'getRows',
|
||||||
|
});
|
||||||
|
|
||||||
|
// after reset
|
||||||
|
config.set('executions.mode', 'regular');
|
||||||
|
delete process.env.OFFLOAD_MANUAL_EXECUTIONS_TO_WORKERS;
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should wait for execution to finish and return result', async () => {
|
test('should wait for execution to finish and return result', async () => {
|
||||||
@@ -718,6 +784,7 @@ describe('TestRunnerService', () => {
|
|||||||
typeVersion: 1,
|
typeVersion: 1,
|
||||||
position: [0, 0],
|
position: [0, 0],
|
||||||
parameters: {},
|
parameters: {},
|
||||||
|
forceCustomOperation: undefined,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
connections: {},
|
connections: {},
|
||||||
|
|||||||
@@ -13,10 +13,10 @@ import type {
|
|||||||
IRun,
|
IRun,
|
||||||
IWorkflowBase,
|
IWorkflowBase,
|
||||||
IWorkflowExecutionDataProcess,
|
IWorkflowExecutionDataProcess,
|
||||||
IExecuteData,
|
|
||||||
INodeExecutionData,
|
INodeExecutionData,
|
||||||
AssignmentCollectionValue,
|
AssignmentCollectionValue,
|
||||||
GenericValue,
|
GenericValue,
|
||||||
|
IExecuteData,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import assert from 'node:assert';
|
import assert from 'node:assert';
|
||||||
|
|
||||||
@@ -259,16 +259,11 @@ export class TestRunnerService {
|
|||||||
throw new TestRunError('EVALUATION_TRIGGER_NOT_FOUND');
|
throw new TestRunError('EVALUATION_TRIGGER_NOT_FOUND');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialize the input data for dataset trigger
|
// Call custom operation to fetch the whole dataset
|
||||||
// Provide a flag indicating that we want to get the whole dataset
|
triggerNode.forceCustomOperation = {
|
||||||
const nodeExecutionStack: IExecuteData[] = [];
|
resource: 'dataset',
|
||||||
nodeExecutionStack.push({
|
operation: 'getRows',
|
||||||
node: triggerNode,
|
};
|
||||||
data: {
|
|
||||||
main: [[{ json: { requestDataset: true } }]],
|
|
||||||
},
|
|
||||||
source: null,
|
|
||||||
});
|
|
||||||
|
|
||||||
const data: IWorkflowExecutionDataProcess = {
|
const data: IWorkflowExecutionDataProcess = {
|
||||||
destinationNode: triggerNode.name,
|
destinationNode: triggerNode.name,
|
||||||
@@ -293,13 +288,6 @@ export class TestRunnerService {
|
|||||||
resultData: {
|
resultData: {
|
||||||
runData: {},
|
runData: {},
|
||||||
},
|
},
|
||||||
executionData: {
|
|
||||||
contextData: {},
|
|
||||||
metadata: {},
|
|
||||||
nodeExecutionStack,
|
|
||||||
waitingExecution: {},
|
|
||||||
waitingExecutionSource: {},
|
|
||||||
},
|
|
||||||
manualData: {
|
manualData: {
|
||||||
userId: metadata.userId,
|
userId: metadata.userId,
|
||||||
partialExecutionVersion: 2,
|
partialExecutionVersion: 2,
|
||||||
@@ -313,6 +301,33 @@ export class TestRunnerService {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (
|
||||||
|
!(
|
||||||
|
config.get('executions.mode') === 'queue' &&
|
||||||
|
process.env.OFFLOAD_MANUAL_EXECUTIONS_TO_WORKERS === 'true'
|
||||||
|
) &&
|
||||||
|
data.executionData
|
||||||
|
) {
|
||||||
|
const nodeExecutionStack: IExecuteData[] = [];
|
||||||
|
nodeExecutionStack.push({
|
||||||
|
node: triggerNode,
|
||||||
|
data: {
|
||||||
|
main: [[{ json: {} }]],
|
||||||
|
},
|
||||||
|
source: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
data.executionData.executionData = {
|
||||||
|
contextData: {},
|
||||||
|
metadata: {},
|
||||||
|
// workflow does not evaluate correctly if this is passed in queue mode with offload manual executions
|
||||||
|
// but this is expected otherwise in regular execution mode
|
||||||
|
nodeExecutionStack,
|
||||||
|
waitingExecution: {},
|
||||||
|
waitingExecutionSource: {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// Trigger the workflow under test with mocked data
|
// Trigger the workflow under test with mocked data
|
||||||
const executionId = await this.workflowRunner.run(data);
|
const executionId = await this.workflowRunner.run(data);
|
||||||
assert(executionId);
|
assert(executionId);
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ import type {
|
|||||||
WorkflowTestData,
|
WorkflowTestData,
|
||||||
RelatedExecution,
|
RelatedExecution,
|
||||||
IExecuteFunctions,
|
IExecuteFunctions,
|
||||||
|
IDataObject,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import {
|
import {
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
@@ -1982,75 +1983,113 @@ describe('WorkflowExecute', () => {
|
|||||||
|
|
||||||
describe('customOperations', () => {
|
describe('customOperations', () => {
|
||||||
const nodeTypes = mock<INodeTypes>();
|
const nodeTypes = mock<INodeTypes>();
|
||||||
const testNode = mock<INode>();
|
|
||||||
|
|
||||||
const workflow = new Workflow({
|
|
||||||
nodeTypes,
|
|
||||||
nodes: [testNode],
|
|
||||||
connections: {},
|
|
||||||
active: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
const executionData = mock<IExecuteData>({
|
|
||||||
node: { parameters: { resource: 'test', operation: 'test' } },
|
|
||||||
data: { main: [[{ json: {} }]] },
|
|
||||||
});
|
|
||||||
const runExecutionData = mock<IRunExecutionData>();
|
const runExecutionData = mock<IRunExecutionData>();
|
||||||
const additionalData = mock<IWorkflowExecuteAdditionalData>();
|
const additionalData = mock<IWorkflowExecuteAdditionalData>();
|
||||||
const workflowExecute = new WorkflowExecute(additionalData, 'manual');
|
const workflowExecute = new WorkflowExecute(additionalData, 'manual');
|
||||||
|
|
||||||
test('should execute customOperations', async () => {
|
const testCases: Array<{
|
||||||
const nodeType = mock<INodeType>({
|
title: string;
|
||||||
description: {
|
parameters?: INode['parameters'];
|
||||||
properties: [],
|
forceCustomOperation?: INode['forceCustomOperation'];
|
||||||
},
|
expectedOutput: IDataObject | undefined;
|
||||||
execute: undefined,
|
}> = [
|
||||||
customOperations: {
|
{
|
||||||
test: {
|
title: 'only parameters are set',
|
||||||
async test(this: IExecuteFunctions) {
|
parameters: { resource: 'test', operation: 'test1' },
|
||||||
return [[{ json: { customOperationsRun: true } }]];
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: [[{ json: { customOperationsRun: 1 } }]], hints: [] },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'both parameters and forceCustomOperation are set',
|
||||||
|
parameters: { resource: 'test', operation: 'test1' },
|
||||||
|
forceCustomOperation: { resource: 'test', operation: 'test2' },
|
||||||
|
expectedOutput: { data: [[{ json: { customOperationsRun: 2 } }]], hints: [] },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'only forceCustomOperation is set',
|
||||||
|
parameters: undefined,
|
||||||
|
forceCustomOperation: { resource: 'test', operation: 'test1' },
|
||||||
|
expectedOutput: { data: [[{ json: { customOperationsRun: 1 } }]], hints: [] },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'neither option is set',
|
||||||
|
parameters: undefined,
|
||||||
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: undefined },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'non relevant parameters are set',
|
||||||
|
parameters: { test: 1 },
|
||||||
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: undefined },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'only parameter.resource is set',
|
||||||
|
parameters: { resource: 'test' },
|
||||||
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: undefined },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'only parameter.operation is set',
|
||||||
|
parameters: { operation: 'test1' },
|
||||||
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: undefined },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'unknown parameter.resource is set',
|
||||||
|
parameters: { resource: 'unknown', operation: 'test1' },
|
||||||
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: undefined },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'unknown parameter.operation is set',
|
||||||
|
parameters: { resource: 'test', operation: 'unknown' },
|
||||||
|
forceCustomOperation: undefined,
|
||||||
|
expectedOutput: { data: undefined, hints: [] },
|
||||||
|
},
|
||||||
|
];
|
||||||
|
testCases.forEach(({ title, parameters, forceCustomOperation, expectedOutput }) => {
|
||||||
|
test(`should execute customOperations - ${title}`, async () => {
|
||||||
|
const testNode = mock<INode>({
|
||||||
|
name: 'nodeName',
|
||||||
|
parameters,
|
||||||
|
forceCustomOperation,
|
||||||
|
});
|
||||||
|
|
||||||
|
const workflow = new Workflow({
|
||||||
|
nodeTypes,
|
||||||
|
nodes: [testNode],
|
||||||
|
connections: {},
|
||||||
|
active: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
const executionData: IExecuteData = {
|
||||||
|
node: testNode,
|
||||||
|
data: { main: [[{ json: {} }]] },
|
||||||
|
source: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
const nodeType = mock<INodeType>({
|
||||||
|
description: {
|
||||||
|
properties: [],
|
||||||
|
},
|
||||||
|
execute: undefined,
|
||||||
|
customOperations: {
|
||||||
|
test: {
|
||||||
|
async test1(this: IExecuteFunctions) {
|
||||||
|
return [[{ json: { customOperationsRun: 1 } }]];
|
||||||
|
},
|
||||||
|
async test2(this: IExecuteFunctions) {
|
||||||
|
return [[{ json: { customOperationsRun: 2 } }]];
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
});
|
||||||
});
|
|
||||||
|
|
||||||
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
|
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
|
||||||
|
|
||||||
const runPromise = workflowExecute.runNode(
|
const runPromise = workflowExecute.runNode(
|
||||||
workflow,
|
|
||||||
executionData,
|
|
||||||
runExecutionData,
|
|
||||||
0,
|
|
||||||
additionalData,
|
|
||||||
'manual',
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await runPromise;
|
|
||||||
|
|
||||||
expect(result).toEqual({ data: [[{ json: { customOperationsRun: true } }]], hints: [] });
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should throw error if customOperation and execute both defined', async () => {
|
|
||||||
const nodeType = mock<INodeType>({
|
|
||||||
description: {
|
|
||||||
properties: [],
|
|
||||||
},
|
|
||||||
async execute(this: IExecuteFunctions) {
|
|
||||||
return [];
|
|
||||||
},
|
|
||||||
customOperations: {
|
|
||||||
test: {
|
|
||||||
async test(this: IExecuteFunctions) {
|
|
||||||
return [];
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await workflowExecute.runNode(
|
|
||||||
workflow,
|
workflow,
|
||||||
executionData,
|
executionData,
|
||||||
runExecutionData,
|
runExecutionData,
|
||||||
@@ -2058,11 +2097,11 @@ describe('WorkflowExecute', () => {
|
|||||||
additionalData,
|
additionalData,
|
||||||
'manual',
|
'manual',
|
||||||
);
|
);
|
||||||
} catch (error) {
|
|
||||||
expect(error.message).toBe(
|
const result = await runPromise;
|
||||||
'Node type cannot have both customOperations and execute defined',
|
|
||||||
);
|
expect(result).toEqual(expectedOutput);
|
||||||
}
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1050,11 +1050,10 @@ export class WorkflowExecute {
|
|||||||
|
|
||||||
private getCustomOperation(node: INode, type: INodeType) {
|
private getCustomOperation(node: INode, type: INodeType) {
|
||||||
if (!type.customOperations) return undefined;
|
if (!type.customOperations) return undefined;
|
||||||
|
if (!node.parameters && !node.forceCustomOperation) return undefined;
|
||||||
if (!node.parameters) return undefined;
|
|
||||||
|
|
||||||
const { customOperations } = type;
|
const { customOperations } = type;
|
||||||
const { resource, operation } = node.parameters;
|
const { resource, operation } = node.forceCustomOperation ?? node.parameters;
|
||||||
|
|
||||||
if (typeof resource !== 'string' || typeof operation !== 'string') return undefined;
|
if (typeof resource !== 'string' || typeof operation !== 'string') return undefined;
|
||||||
if (!customOperations[resource] || !customOperations[resource][operation]) return undefined;
|
if (!customOperations[resource] || !customOperations[resource][operation]) return undefined;
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import type {
|
|||||||
INodeTypeDescription,
|
INodeTypeDescription,
|
||||||
IExecuteFunctions,
|
IExecuteFunctions,
|
||||||
INodeExecutionData,
|
INodeExecutionData,
|
||||||
|
NodeExecutionWithMetadata,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
|
import { NodeConnectionTypes, NodeOperationError } from 'n8n-workflow';
|
||||||
|
|
||||||
@@ -22,6 +23,8 @@ import {
|
|||||||
|
|
||||||
export const DEFAULT_STARTING_ROW = 2;
|
export const DEFAULT_STARTING_ROW = 2;
|
||||||
|
|
||||||
|
const MAX_ROWS = 1000;
|
||||||
|
|
||||||
export class EvaluationTrigger implements INodeType {
|
export class EvaluationTrigger implements INodeType {
|
||||||
description: INodeTypeDescription = {
|
description: INodeTypeDescription = {
|
||||||
displayName: 'Evaluation Trigger',
|
displayName: 'Evaluation Trigger',
|
||||||
@@ -108,10 +111,8 @@ export class EvaluationTrigger implements INodeType {
|
|||||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||||
const inputData = this.getInputData();
|
const inputData = this.getInputData();
|
||||||
|
|
||||||
const MAX_ROWS = 1000;
|
const maxRows = this.getNodeParameter('limitRows', 0, false)
|
||||||
|
? (this.getNodeParameter('maxRows', 0, MAX_ROWS) as number) + 1
|
||||||
const maxRows = this.getNodeParameter('limitRows', 0)
|
|
||||||
? (this.getNodeParameter('maxRows', 0) as number) + 1
|
|
||||||
: MAX_ROWS;
|
: MAX_ROWS;
|
||||||
|
|
||||||
const previousRunRowNumber = inputData?.[0]?.json?.row_number;
|
const previousRunRowNumber = inputData?.[0]?.json?.row_number;
|
||||||
@@ -133,21 +134,6 @@ export class EvaluationTrigger implements INodeType {
|
|||||||
|
|
||||||
const allRows = await getResults.call(this, [], googleSheetInstance, googleSheet, rangeOptions);
|
const allRows = await getResults.call(this, [], googleSheetInstance, googleSheet, rangeOptions);
|
||||||
|
|
||||||
// This is for test runner which requires a different return format
|
|
||||||
if (inputData[0].json.requestDataset) {
|
|
||||||
const testRunnerResult = await getResults.call(
|
|
||||||
this,
|
|
||||||
[],
|
|
||||||
googleSheetInstance,
|
|
||||||
googleSheet,
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = testRunnerResult.slice(0, maxRows - 1);
|
|
||||||
|
|
||||||
return [result];
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasFilter = this.getNodeParameter('filtersUI.values', 0, []) as ILookupValues[];
|
const hasFilter = this.getNodeParameter('filtersUI.values', 0, []) as ILookupValues[];
|
||||||
|
|
||||||
if (hasFilter.length > 0) {
|
if (hasFilter.length > 0) {
|
||||||
@@ -188,4 +174,28 @@ export class EvaluationTrigger implements INodeType {
|
|||||||
return [[currentRow]];
|
return [[currentRow]];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
customOperations = {
|
||||||
|
dataset: {
|
||||||
|
async getRows(
|
||||||
|
this: IExecuteFunctions,
|
||||||
|
): Promise<INodeExecutionData[][] | NodeExecutionWithMetadata[][] | null> {
|
||||||
|
try {
|
||||||
|
const maxRows = this.getNodeParameter('limitRows', 0, false)
|
||||||
|
? (this.getNodeParameter('maxRows', 0, MAX_ROWS) as number) + 1
|
||||||
|
: MAX_ROWS;
|
||||||
|
|
||||||
|
const googleSheetInstance = getGoogleSheet.call(this);
|
||||||
|
const googleSheet = await getSheet.call(this, googleSheetInstance);
|
||||||
|
|
||||||
|
const results = await getResults.call(this, [], googleSheetInstance, googleSheet, {});
|
||||||
|
const result = results.slice(0, maxRows - 1);
|
||||||
|
|
||||||
|
return [result];
|
||||||
|
} catch (error) {
|
||||||
|
throw new NodeOperationError(this.getNode(), error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,12 +14,303 @@ describe('Evaluation Trigger Node', () => {
|
|||||||
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Without filters', () => {
|
describe('execute', () => {
|
||||||
|
describe('without filters', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.resetAllMocks();
|
||||||
|
|
||||||
|
mockExecuteFunctions = mock<IExecuteFunctions>({
|
||||||
|
getInputData: jest.fn().mockReturnValue([{ json: {} }]),
|
||||||
|
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.spyOn(GoogleSheet.prototype, 'spreadsheetGetSheet').mockImplementation(async () => {
|
||||||
|
return { sheetId: 1, title: sheetName };
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mocks getResults() and getRowsLeft()
|
||||||
|
jest.spyOn(GoogleSheet.prototype, 'getData').mockImplementation(async (range: string) => {
|
||||||
|
if (range === `${sheetName}!1:1`) {
|
||||||
|
return [['Header1', 'Header2']];
|
||||||
|
} else if (range === `${sheetName}!2:1000`) {
|
||||||
|
return [
|
||||||
|
['Header1', 'Header2'],
|
||||||
|
['Value1', 'Value2'],
|
||||||
|
['Value3', 'Value4'],
|
||||||
|
];
|
||||||
|
} else if (range === `${sheetName}!2:2`) {
|
||||||
|
// getRowsLeft with limit
|
||||||
|
return [];
|
||||||
|
} else if (range === sheetName) {
|
||||||
|
return [
|
||||||
|
['Header1', 'Header2'],
|
||||||
|
['Value1', 'Value2'],
|
||||||
|
['Value3', 'Value4'],
|
||||||
|
];
|
||||||
|
} else {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return a single row from google sheet', async () => {
|
||||||
|
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||||
|
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||||
|
const mockParams: { [key: string]: unknown } = {
|
||||||
|
options: {},
|
||||||
|
'filtersUI.values': [],
|
||||||
|
combineFilters: 'AND',
|
||||||
|
documentId: {
|
||||||
|
mode: 'id',
|
||||||
|
value: spreadsheetId,
|
||||||
|
},
|
||||||
|
sheetName,
|
||||||
|
sheetMode: 'id',
|
||||||
|
};
|
||||||
|
return mockParams[key] ?? fallbackValue;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||||
|
|
||||||
|
expect(result).toEqual([
|
||||||
|
[
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 2,
|
||||||
|
Header1: 'Value1',
|
||||||
|
Header2: 'Value2',
|
||||||
|
_rowsLeft: 2,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return the next row from google sheet', async () => {
|
||||||
|
mockExecuteFunctions.getInputData.mockReturnValue([
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 2,
|
||||||
|
Header1: 'Value1',
|
||||||
|
Header2: 'Value2',
|
||||||
|
_rowsLeft: 1,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
input: undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||||
|
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||||
|
const mockParams: { [key: string]: unknown } = {
|
||||||
|
options: {},
|
||||||
|
'filtersUI.values': [],
|
||||||
|
combineFilters: 'AND',
|
||||||
|
documentId: {
|
||||||
|
mode: 'id',
|
||||||
|
value: spreadsheetId,
|
||||||
|
},
|
||||||
|
sheetName,
|
||||||
|
sheetMode: 'id',
|
||||||
|
};
|
||||||
|
return mockParams[key] ?? fallbackValue;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||||
|
|
||||||
|
expect(result).toEqual([
|
||||||
|
[
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 3,
|
||||||
|
Header1: 'Value3',
|
||||||
|
Header2: 'Value4',
|
||||||
|
_rowsLeft: 0,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return the first row from google sheet if no rows left', async () => {
|
||||||
|
mockExecuteFunctions.getInputData.mockReturnValue([
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 3,
|
||||||
|
Header1: 'Value3',
|
||||||
|
Header2: 'Value4',
|
||||||
|
_rowsLeft: 0,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
input: undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||||
|
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||||
|
const mockParams: { [key: string]: unknown } = {
|
||||||
|
options: {},
|
||||||
|
'filtersUI.values': [],
|
||||||
|
combineFilters: 'AND',
|
||||||
|
documentId: {
|
||||||
|
mode: 'id',
|
||||||
|
value: spreadsheetId,
|
||||||
|
},
|
||||||
|
sheetName,
|
||||||
|
sheetMode: 'id',
|
||||||
|
};
|
||||||
|
return mockParams[key] ?? fallbackValue;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||||
|
|
||||||
|
expect(result).toEqual([
|
||||||
|
[
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 2,
|
||||||
|
Header1: 'Value1',
|
||||||
|
Header2: 'Value2',
|
||||||
|
_rowsLeft: 2,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return a single row from google sheet with limit', async () => {
|
||||||
|
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||||
|
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||||
|
const mockParams: { [key: string]: unknown } = {
|
||||||
|
options: {},
|
||||||
|
'filtersUI.values': [],
|
||||||
|
combineFilters: 'AND',
|
||||||
|
documentId: {
|
||||||
|
mode: 'id',
|
||||||
|
value: spreadsheetId,
|
||||||
|
},
|
||||||
|
sheetName,
|
||||||
|
sheetMode: 'id',
|
||||||
|
limitRows: true,
|
||||||
|
maxRows: 1,
|
||||||
|
};
|
||||||
|
return mockParams[key] ?? fallbackValue;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||||
|
|
||||||
|
expect(result).toEqual([
|
||||||
|
[
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 2,
|
||||||
|
Header1: 'Value1',
|
||||||
|
Header2: 'Value2',
|
||||||
|
_rowsLeft: 0,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('with filters', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.resetAllMocks();
|
||||||
|
|
||||||
|
mockExecuteFunctions = mock<IExecuteFunctions>({
|
||||||
|
getInputData: jest.fn().mockReturnValue([{ json: {} }]),
|
||||||
|
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.spyOn(GoogleSheet.prototype, 'spreadsheetGetSheet').mockImplementation(async () => {
|
||||||
|
return { sheetId: 1, title: sheetName };
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should return a single row from google sheet using filter', async () => {
|
||||||
|
jest
|
||||||
|
.spyOn(GoogleSheet.prototype, 'getData')
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
// operationResult
|
||||||
|
['Header1', 'Header2'],
|
||||||
|
['Value1', 'Value2'],
|
||||||
|
['Value3', 'Value4'],
|
||||||
|
])
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
// rowsLeft
|
||||||
|
['Header1', 'Header2'],
|
||||||
|
['Value1', 'Value2'],
|
||||||
|
['Value3', 'Value4'],
|
||||||
|
]);
|
||||||
|
|
||||||
|
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||||
|
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||||
|
const mockParams: { [key: string]: unknown } = {
|
||||||
|
limitRows: true,
|
||||||
|
maxRows: 2,
|
||||||
|
'filtersUI.values': [{ lookupColumn: 'Header1', lookupValue: 'Value1' }],
|
||||||
|
options: {},
|
||||||
|
combineFilters: 'AND',
|
||||||
|
documentId: {
|
||||||
|
mode: 'id',
|
||||||
|
value: spreadsheetId,
|
||||||
|
},
|
||||||
|
sheetName,
|
||||||
|
sheetMode: 'id',
|
||||||
|
};
|
||||||
|
return mockParams[key] ?? fallbackValue;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
jest.spyOn(utils, 'getRowsLeft').mockResolvedValue(0);
|
||||||
|
|
||||||
|
const evaluationTrigger = new EvaluationTrigger();
|
||||||
|
|
||||||
|
const result = await evaluationTrigger.execute.call(mockExecuteFunctions);
|
||||||
|
|
||||||
|
expect(result).toEqual([
|
||||||
|
[
|
||||||
|
{
|
||||||
|
json: {
|
||||||
|
row_number: 2,
|
||||||
|
Header1: 'Value1',
|
||||||
|
Header2: 'Value2',
|
||||||
|
_rowsLeft: 0,
|
||||||
|
},
|
||||||
|
pairedItem: {
|
||||||
|
item: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('customOperations.dataset.getRows', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.resetAllMocks();
|
jest.resetAllMocks();
|
||||||
|
|
||||||
mockExecuteFunctions = mock<IExecuteFunctions>({
|
mockExecuteFunctions = mock<IExecuteFunctions>({
|
||||||
getInputData: jest.fn().mockReturnValue([{ json: {} }]),
|
|
||||||
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -52,187 +343,7 @@ describe('Evaluation Trigger Node', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should return a single row from google sheet', async () => {
|
test('should return the sheet with limits applied, without filters', async () => {
|
||||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
|
||||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
|
||||||
const mockParams: { [key: string]: unknown } = {
|
|
||||||
options: {},
|
|
||||||
'filtersUI.values': [],
|
|
||||||
combineFilters: 'AND',
|
|
||||||
documentId: {
|
|
||||||
mode: 'id',
|
|
||||||
value: spreadsheetId,
|
|
||||||
},
|
|
||||||
sheetName,
|
|
||||||
sheetMode: 'id',
|
|
||||||
};
|
|
||||||
return mockParams[key] ?? fallbackValue;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
[
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 2,
|
|
||||||
Header1: 'Value1',
|
|
||||||
Header2: 'Value2',
|
|
||||||
_rowsLeft: 2,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should return the next row from google sheet', async () => {
|
|
||||||
mockExecuteFunctions.getInputData.mockReturnValue([
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 2,
|
|
||||||
Header1: 'Value1',
|
|
||||||
Header2: 'Value2',
|
|
||||||
_rowsLeft: 1,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
input: undefined,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
|
||||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
|
||||||
const mockParams: { [key: string]: unknown } = {
|
|
||||||
options: {},
|
|
||||||
'filtersUI.values': [],
|
|
||||||
combineFilters: 'AND',
|
|
||||||
documentId: {
|
|
||||||
mode: 'id',
|
|
||||||
value: spreadsheetId,
|
|
||||||
},
|
|
||||||
sheetName,
|
|
||||||
sheetMode: 'id',
|
|
||||||
};
|
|
||||||
return mockParams[key] ?? fallbackValue;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
[
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 3,
|
|
||||||
Header1: 'Value3',
|
|
||||||
Header2: 'Value4',
|
|
||||||
_rowsLeft: 0,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should return the first row from google sheet if no rows left', async () => {
|
|
||||||
mockExecuteFunctions.getInputData.mockReturnValue([
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 3,
|
|
||||||
Header1: 'Value3',
|
|
||||||
Header2: 'Value4',
|
|
||||||
_rowsLeft: 0,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
input: undefined,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
|
||||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
|
||||||
const mockParams: { [key: string]: unknown } = {
|
|
||||||
options: {},
|
|
||||||
'filtersUI.values': [],
|
|
||||||
combineFilters: 'AND',
|
|
||||||
documentId: {
|
|
||||||
mode: 'id',
|
|
||||||
value: spreadsheetId,
|
|
||||||
},
|
|
||||||
sheetName,
|
|
||||||
sheetMode: 'id',
|
|
||||||
};
|
|
||||||
return mockParams[key] ?? fallbackValue;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
[
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 2,
|
|
||||||
Header1: 'Value1',
|
|
||||||
Header2: 'Value2',
|
|
||||||
_rowsLeft: 2,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should return a single row from google sheet with limit', async () => {
|
|
||||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
|
||||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
|
||||||
const mockParams: { [key: string]: unknown } = {
|
|
||||||
options: {},
|
|
||||||
'filtersUI.values': [],
|
|
||||||
combineFilters: 'AND',
|
|
||||||
documentId: {
|
|
||||||
mode: 'id',
|
|
||||||
value: spreadsheetId,
|
|
||||||
},
|
|
||||||
sheetName,
|
|
||||||
sheetMode: 'id',
|
|
||||||
limitRows: true,
|
|
||||||
maxRows: 1,
|
|
||||||
};
|
|
||||||
return mockParams[key] ?? fallbackValue;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
[
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 2,
|
|
||||||
Header1: 'Value1',
|
|
||||||
Header2: 'Value2',
|
|
||||||
_rowsLeft: 0,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should return the sheet with limits applied when test runner is enabled', async () => {
|
|
||||||
mockExecuteFunctions.getInputData.mockReturnValue([{ json: { requestDataset: true } }]);
|
|
||||||
|
|
||||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||||
const mockParams: { [key: string]: unknown } = {
|
const mockParams: { [key: string]: unknown } = {
|
||||||
@@ -252,7 +363,9 @@ describe('Evaluation Trigger Node', () => {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
const result = await new EvaluationTrigger().customOperations.dataset.getRows.call(
|
||||||
|
mockExecuteFunctions,
|
||||||
|
);
|
||||||
|
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
[
|
[
|
||||||
@@ -279,24 +392,9 @@ describe('Evaluation Trigger Node', () => {
|
|||||||
],
|
],
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
describe('With filters', () => {
|
test('should return all relevant rows from google sheet using filters', async () => {
|
||||||
beforeEach(() => {
|
mockExecuteFunctions.getInputData.mockReturnValue([{ json: {} }]);
|
||||||
jest.resetAllMocks();
|
|
||||||
|
|
||||||
mockExecuteFunctions = mock<IExecuteFunctions>({
|
|
||||||
getInputData: jest.fn().mockReturnValue([{ json: {} }]),
|
|
||||||
getNode: jest.fn().mockReturnValue({ typeVersion: 4.6 }),
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.spyOn(GoogleSheet.prototype, 'spreadsheetGetSheet').mockImplementation(async () => {
|
|
||||||
return { sheetId: 1, title: sheetName };
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test('should return all relevant rows from google sheet using filter and test runner enabled', async () => {
|
|
||||||
mockExecuteFunctions.getInputData.mockReturnValue([{ json: { requestDataset: true } }]);
|
|
||||||
|
|
||||||
jest
|
jest
|
||||||
.spyOn(GoogleSheet.prototype, 'getData')
|
.spyOn(GoogleSheet.prototype, 'getData')
|
||||||
@@ -336,7 +434,8 @@ describe('Evaluation Trigger Node', () => {
|
|||||||
|
|
||||||
const evaluationTrigger = new EvaluationTrigger();
|
const evaluationTrigger = new EvaluationTrigger();
|
||||||
|
|
||||||
const result = await evaluationTrigger.execute.call(mockExecuteFunctions);
|
const result =
|
||||||
|
await evaluationTrigger.customOperations.dataset.getRows.call(mockExecuteFunctions);
|
||||||
|
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
[
|
[
|
||||||
@@ -355,63 +454,5 @@ describe('Evaluation Trigger Node', () => {
|
|||||||
],
|
],
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('should return a single row from google sheet using filter', async () => {
|
|
||||||
jest
|
|
||||||
.spyOn(GoogleSheet.prototype, 'getData')
|
|
||||||
.mockResolvedValueOnce([
|
|
||||||
// operationResult
|
|
||||||
['Header1', 'Header2'],
|
|
||||||
['Value1', 'Value2'],
|
|
||||||
['Value3', 'Value4'],
|
|
||||||
])
|
|
||||||
.mockResolvedValueOnce([
|
|
||||||
// rowsLeft
|
|
||||||
['Header1', 'Header2'],
|
|
||||||
['Value1', 'Value2'],
|
|
||||||
['Value3', 'Value4'],
|
|
||||||
]);
|
|
||||||
|
|
||||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
|
||||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
|
||||||
const mockParams: { [key: string]: unknown } = {
|
|
||||||
limitRows: true,
|
|
||||||
maxRows: 2,
|
|
||||||
'filtersUI.values': [{ lookupColumn: 'Header1', lookupValue: 'Value1' }],
|
|
||||||
options: {},
|
|
||||||
combineFilters: 'AND',
|
|
||||||
documentId: {
|
|
||||||
mode: 'id',
|
|
||||||
value: spreadsheetId,
|
|
||||||
},
|
|
||||||
sheetName,
|
|
||||||
sheetMode: 'id',
|
|
||||||
};
|
|
||||||
return mockParams[key] ?? fallbackValue;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
jest.spyOn(utils, 'getRowsLeft').mockResolvedValue(0);
|
|
||||||
|
|
||||||
const evaluationTrigger = new EvaluationTrigger();
|
|
||||||
|
|
||||||
const result = await evaluationTrigger.execute.call(mockExecuteFunctions);
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
[
|
|
||||||
{
|
|
||||||
json: {
|
|
||||||
row_number: 2,
|
|
||||||
Header1: 'Value1',
|
|
||||||
Header2: 'Value2',
|
|
||||||
_rowsLeft: 0,
|
|
||||||
},
|
|
||||||
pairedItem: {
|
|
||||||
item: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1148,6 +1148,15 @@ export interface INode {
|
|||||||
webhookId?: string;
|
webhookId?: string;
|
||||||
extendsCredential?: string;
|
extendsCredential?: string;
|
||||||
rewireOutputLogTo?: NodeConnectionType;
|
rewireOutputLogTo?: NodeConnectionType;
|
||||||
|
|
||||||
|
// forces the node to execute a particular custom operation
|
||||||
|
// based on resource and operation
|
||||||
|
// instead of calling default execute function
|
||||||
|
// used by evaluations test-runner
|
||||||
|
forceCustomOperation?: {
|
||||||
|
resource: string;
|
||||||
|
operation: string;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IPinData {
|
export interface IPinData {
|
||||||
|
|||||||
Reference in New Issue
Block a user