mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 17:46:45 +00:00
fix: Run evaluations loop manually always from first row (#15794)
This commit is contained in:
@@ -0,0 +1,172 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { mock } from 'vitest-mock-extended';
|
||||
import { continueEvaluationLoop, type SimplifiedExecution } from './executionFinished';
|
||||
import type { ITaskData } from 'n8n-workflow';
|
||||
import { EVALUATION_TRIGGER_NODE_TYPE } from 'n8n-workflow';
|
||||
import type { INodeUi } from '@/Interface';
|
||||
import type { Router } from 'vue-router';
|
||||
|
||||
const runWorkflow = vi.fn();
|
||||
|
||||
vi.mock('@/composables/useRunWorkflow', () => ({
|
||||
useRunWorkflow: vi.fn(() => ({
|
||||
runWorkflow,
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('continueEvaluationLoop()', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should call runWorkflow() if workflow has eval trigger that executed successfully with rows left', () => {
|
||||
const evalTriggerNodeName = 'eval-trigger';
|
||||
const evalTriggerNodeData = mock<ITaskData>({
|
||||
data: {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
json: {
|
||||
row_number: 1,
|
||||
_rowsLeft: 1,
|
||||
header1: 'value1',
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
});
|
||||
const execution = mock<SimplifiedExecution>({
|
||||
status: 'success',
|
||||
workflowData: {
|
||||
nodes: [
|
||||
mock<INodeUi>({
|
||||
type: EVALUATION_TRIGGER_NODE_TYPE,
|
||||
name: evalTriggerNodeName,
|
||||
}),
|
||||
],
|
||||
},
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
[evalTriggerNodeName]: [evalTriggerNodeData],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
continueEvaluationLoop(execution, mock<Router>());
|
||||
|
||||
expect(runWorkflow).toHaveBeenCalledWith({
|
||||
triggerNode: evalTriggerNodeName,
|
||||
nodeData: evalTriggerNodeData,
|
||||
rerunTriggerNode: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not call runWorkflow() if workflow execution status is not success', () => {
|
||||
const execution = mock<SimplifiedExecution>({
|
||||
status: 'error',
|
||||
workflowData: {
|
||||
nodes: [
|
||||
mock<INodeUi>({
|
||||
type: EVALUATION_TRIGGER_NODE_TYPE,
|
||||
name: 'eval-trigger',
|
||||
}),
|
||||
],
|
||||
},
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
continueEvaluationLoop(execution, mock<Router>());
|
||||
|
||||
expect(runWorkflow).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not call runWorkflow() if eval trigger node does not exist in workflow', () => {
|
||||
const execution = mock<SimplifiedExecution>({
|
||||
status: 'success',
|
||||
workflowData: {
|
||||
nodes: [],
|
||||
},
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
continueEvaluationLoop(execution, mock<Router>());
|
||||
|
||||
expect(runWorkflow).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not call runWorkflow() if eval trigger node exists but has no run data', () => {
|
||||
const evalTriggerNodeName = 'eval-trigger';
|
||||
const execution = mock<SimplifiedExecution>({
|
||||
status: 'success',
|
||||
workflowData: {
|
||||
nodes: [
|
||||
mock<INodeUi>({
|
||||
type: EVALUATION_TRIGGER_NODE_TYPE,
|
||||
name: evalTriggerNodeName,
|
||||
}),
|
||||
],
|
||||
},
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
continueEvaluationLoop(execution, mock<Router>());
|
||||
|
||||
expect(runWorkflow).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not call runWorkflow() if eval trigger node run data has no rows left', () => {
|
||||
const evalTriggerNodeName = 'eval-trigger';
|
||||
const evalTriggerNodeData = mock<ITaskData>({
|
||||
data: {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
json: {
|
||||
row_number: 1,
|
||||
_rowsLeft: 0,
|
||||
header1: 'value1',
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
});
|
||||
const execution = mock<SimplifiedExecution>({
|
||||
status: 'success',
|
||||
workflowData: {
|
||||
nodes: [
|
||||
mock<INodeUi>({
|
||||
type: EVALUATION_TRIGGER_NODE_TYPE,
|
||||
name: evalTriggerNodeName,
|
||||
}),
|
||||
],
|
||||
},
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
[evalTriggerNodeName]: [evalTriggerNodeData],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
continueEvaluationLoop(execution, mock<Router>());
|
||||
|
||||
expect(runWorkflow).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -25,6 +25,7 @@ import { getTriggerNodeServiceName } from '@/utils/nodeTypesUtils';
|
||||
import { useExternalHooks } from '@/composables/useExternalHooks';
|
||||
import { useNodeHelpers } from '@/composables/useNodeHelpers';
|
||||
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
|
||||
import { useRunWorkflow } from '@/composables/useRunWorkflow';
|
||||
|
||||
export type SimplifiedExecution = Pick<
|
||||
IExecutionResponse,
|
||||
@@ -94,34 +95,6 @@ export async function executionFinished(
|
||||
}
|
||||
}
|
||||
|
||||
// Implicit looping: This will re-trigger the evaluation trigger if it exists on a successful execution of the workflow.
|
||||
if (execution.status === 'success' && execution.data?.startData?.destinationNode === undefined) {
|
||||
// check if we have an evaluation trigger in our workflow and whether it has any run data
|
||||
const evalTrigger = execution.workflowData.nodes.find(
|
||||
(node) => node.type === EVALUATION_TRIGGER_NODE_TYPE,
|
||||
);
|
||||
const triggerRunData = evalTrigger
|
||||
? execution?.data?.resultData?.runData[evalTrigger.name]
|
||||
: undefined;
|
||||
|
||||
if (evalTrigger && triggerRunData !== undefined) {
|
||||
const mainData = triggerRunData[0]?.data?.main[0];
|
||||
const rowsLeft = mainData ? (mainData[0]?.json?._rowsLeft as number) : 0;
|
||||
|
||||
if (rowsLeft && rowsLeft > 0) {
|
||||
// Find the button that belongs to the evaluation trigger, and click it.
|
||||
const testId = `execute-workflow-button-${evalTrigger.name}`;
|
||||
|
||||
setTimeout(() => {
|
||||
const button = Array.from(document.querySelectorAll('[data-test-id]')).filter((x) =>
|
||||
(x as HTMLElement)?.dataset?.testId?.startsWith(testId),
|
||||
)[0];
|
||||
(button as HTMLElement)?.click();
|
||||
}, 2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const runExecutionData = getRunExecutionData(execution);
|
||||
uiStore.setProcessingExecutionResults(false);
|
||||
|
||||
@@ -134,6 +107,47 @@ export async function executionFinished(
|
||||
}
|
||||
|
||||
setRunExecutionData(execution, runExecutionData);
|
||||
|
||||
continueEvaluationLoop(execution, options.router);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implicit looping: This will re-trigger the evaluation trigger if it exists on a successful execution of the workflow.
|
||||
* @param execution
|
||||
* @param router
|
||||
*/
|
||||
export function continueEvaluationLoop(
|
||||
execution: SimplifiedExecution,
|
||||
router: ReturnType<typeof useRouter>,
|
||||
) {
|
||||
if (execution.status !== 'success' || execution.data?.startData?.destinationNode !== undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check if we have an evaluation trigger in our workflow and whether it has any run data
|
||||
const evaluationTrigger = execution.workflowData.nodes.find(
|
||||
(node) => node.type === EVALUATION_TRIGGER_NODE_TYPE,
|
||||
);
|
||||
const triggerRunData = evaluationTrigger
|
||||
? execution?.data?.resultData?.runData[evaluationTrigger.name]
|
||||
: undefined;
|
||||
|
||||
if (!evaluationTrigger || triggerRunData === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const mainData = triggerRunData[0]?.data?.main[0];
|
||||
const rowsLeft = mainData ? (mainData[0]?.json?._rowsLeft as number) : 0;
|
||||
|
||||
if (rowsLeft && rowsLeft > 0) {
|
||||
const { runWorkflow } = useRunWorkflow({ router });
|
||||
void runWorkflow({
|
||||
triggerNode: evaluationTrigger.name,
|
||||
// pass output of previous node run to trigger next run
|
||||
nodeData: triggerRunData[0],
|
||||
rerunTriggerNode: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -511,6 +511,77 @@ describe('useRunWorkflow({ router })', () => {
|
||||
name: triggerNode,
|
||||
data: nodeData,
|
||||
},
|
||||
startNodes: [],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should retrigger workflow from child node if triggerNode and nodeData are passed in', async () => {
|
||||
// ARRANGE
|
||||
const composable = useRunWorkflow({ router });
|
||||
const triggerNode = 'Chat Trigger';
|
||||
const nodeData = mock<ITaskData>();
|
||||
vi.mocked(workflowHelpers).getCurrentWorkflow.mockReturnValue(
|
||||
mock<Workflow>({
|
||||
getChildNodes: vi.fn().mockReturnValue([{ name: 'Child node', type: 'nodes.child' }]),
|
||||
}),
|
||||
);
|
||||
vi.mocked(workflowHelpers).getWorkflowDataToSave.mockResolvedValue(
|
||||
mock<IWorkflowData>({ nodes: [] }),
|
||||
);
|
||||
|
||||
const { runWorkflow } = composable;
|
||||
|
||||
// ACT
|
||||
await runWorkflow({ triggerNode, nodeData });
|
||||
|
||||
// ASSERT
|
||||
expect(workflowsStore.runWorkflow).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
triggerToStartFrom: {
|
||||
name: triggerNode,
|
||||
data: nodeData,
|
||||
},
|
||||
startNodes: [
|
||||
{
|
||||
name: {
|
||||
name: 'Child node',
|
||||
type: 'nodes.child',
|
||||
},
|
||||
sourceData: null,
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should retrigger workflow from trigger node if rerunTriggerNode is set', async () => {
|
||||
// ARRANGE
|
||||
const composable = useRunWorkflow({ router });
|
||||
const triggerNode = 'Chat Trigger';
|
||||
const nodeData = mock<ITaskData>();
|
||||
vi.mocked(workflowHelpers).getCurrentWorkflow.mockReturnValue(
|
||||
mock<Workflow>({
|
||||
getChildNodes: vi.fn().mockReturnValue([{ name: 'Child node', type: 'nodes.child' }]),
|
||||
}),
|
||||
);
|
||||
vi.mocked(workflowHelpers).getWorkflowDataToSave.mockResolvedValue(
|
||||
mock<IWorkflowData>({ nodes: [] }),
|
||||
);
|
||||
|
||||
const { runWorkflow } = composable;
|
||||
|
||||
// ACT
|
||||
await runWorkflow({ triggerNode, nodeData, rerunTriggerNode: true });
|
||||
|
||||
// ASSERT
|
||||
expect(workflowsStore.runWorkflow).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
triggerToStartFrom: {
|
||||
name: triggerNode,
|
||||
data: nodeData,
|
||||
},
|
||||
startNodes: [],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -118,6 +118,7 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
||||
async function runWorkflow(options: {
|
||||
destinationNode?: string;
|
||||
triggerNode?: string;
|
||||
rerunTriggerNode?: boolean;
|
||||
nodeData?: ITaskData;
|
||||
source?: string;
|
||||
}): Promise<IExecutionPushResponse | undefined> {
|
||||
@@ -171,7 +172,8 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
||||
) {
|
||||
executedNode = options.destinationNode;
|
||||
startNodeNames.push(options.destinationNode);
|
||||
} else if (options.triggerNode && options.nodeData) {
|
||||
} else if (options.triggerNode && options.nodeData && !options.rerunTriggerNode) {
|
||||
// starts execution from downstream nodes of trigger node
|
||||
startNodeNames.push(
|
||||
...workflow.getChildNodes(options.triggerNode, NodeConnectionTypes.Main, 1),
|
||||
);
|
||||
|
||||
@@ -20,7 +20,7 @@ import {
|
||||
getSheet,
|
||||
} from '../utils/evaluationTriggerUtils';
|
||||
|
||||
export let startingRow = 2;
|
||||
export const DEFAULT_STARTING_ROW = 2;
|
||||
|
||||
export class EvaluationTrigger implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
@@ -105,12 +105,7 @@ export class EvaluationTrigger implements INodeType {
|
||||
|
||||
methods = { loadOptions, listSearch };
|
||||
|
||||
async execute(this: IExecuteFunctions, startRow?: number): Promise<INodeExecutionData[][]> {
|
||||
// We need to allow tests to reset the startingRow
|
||||
if (startRow) {
|
||||
startingRow = startRow;
|
||||
}
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const inputData = this.getInputData();
|
||||
|
||||
const MAX_ROWS = 1000;
|
||||
@@ -119,10 +114,17 @@ export class EvaluationTrigger implements INodeType {
|
||||
? (this.getNodeParameter('maxRows', 0) as number) + 1
|
||||
: MAX_ROWS;
|
||||
|
||||
const previousRunRowNumber = inputData?.[0]?.json?.row_number;
|
||||
const previousRunRowsLeft = inputData?.[0]?.json?._rowsLeft;
|
||||
|
||||
const firstDataRow =
|
||||
typeof previousRunRowNumber === 'number' && previousRunRowsLeft !== 0
|
||||
? previousRunRowNumber + 1
|
||||
: DEFAULT_STARTING_ROW;
|
||||
const rangeOptions = {
|
||||
rangeDefinition: 'specifyRange',
|
||||
headerRow: 1,
|
||||
firstDataRow: startingRow,
|
||||
firstDataRow,
|
||||
};
|
||||
|
||||
const googleSheetInstance = getGoogleSheet.call(this);
|
||||
@@ -153,8 +155,6 @@ export class EvaluationTrigger implements INodeType {
|
||||
const currentRowNumber = currentRow.json?.row_number as number;
|
||||
|
||||
if (currentRow === undefined) {
|
||||
startingRow = 2;
|
||||
|
||||
throw new NodeOperationError(this.getNode(), 'No row found');
|
||||
}
|
||||
|
||||
@@ -168,37 +168,23 @@ export class EvaluationTrigger implements INodeType {
|
||||
|
||||
currentRow.json._rowsLeft = rowsLeft;
|
||||
|
||||
startingRow = currentRowNumber + 1;
|
||||
|
||||
if (rowsLeft === 0) {
|
||||
startingRow = 2;
|
||||
}
|
||||
|
||||
return [[currentRow]];
|
||||
} else {
|
||||
const currentRow = allRows.find((row) => (row?.json?.row_number as number) === startingRow);
|
||||
const currentRow = allRows.find((row) => (row?.json?.row_number as number) === firstDataRow);
|
||||
|
||||
const rowsLeft = await getRowsLeft.call(
|
||||
this,
|
||||
googleSheetInstance,
|
||||
googleSheet.title,
|
||||
`${googleSheet.title}!${startingRow}:${maxRows}`,
|
||||
`${googleSheet.title}!${firstDataRow}:${maxRows}`,
|
||||
);
|
||||
|
||||
if (currentRow === undefined) {
|
||||
startingRow = 2;
|
||||
|
||||
throw new NodeOperationError(this.getNode(), 'No row found');
|
||||
}
|
||||
|
||||
currentRow.json._rowsLeft = rowsLeft;
|
||||
|
||||
startingRow += 1;
|
||||
|
||||
if (rowsLeft === 0) {
|
||||
startingRow = 2;
|
||||
}
|
||||
|
||||
return [[currentRow]];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { mock } from 'jest-mock-extended';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
|
||||
import { GoogleSheet } from '../../Google/Sheet/v2/helpers/GoogleSheet';
|
||||
import { EvaluationTrigger, startingRow } from '../EvaluationTrigger/EvaluationTrigger.node.ee';
|
||||
import { EvaluationTrigger } from '../EvaluationTrigger/EvaluationTrigger.node.ee';
|
||||
import * as utils from '../utils/evaluationTriggerUtils';
|
||||
|
||||
describe('Evaluation Trigger Node', () => {
|
||||
@@ -87,8 +87,108 @@ describe('Evaluation Trigger Node', () => {
|
||||
},
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
expect(startingRow).toBe(3);
|
||||
test('should return the next row from google sheet', async () => {
|
||||
mockExecuteFunctions.getInputData.mockReturnValue([
|
||||
{
|
||||
json: {
|
||||
row_number: 2,
|
||||
Header1: 'Value1',
|
||||
Header2: 'Value2',
|
||||
_rowsLeft: 1,
|
||||
},
|
||||
pairedItem: {
|
||||
item: 0,
|
||||
input: undefined,
|
||||
},
|
||||
},
|
||||
]);
|
||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||
const mockParams: { [key: string]: unknown } = {
|
||||
options: {},
|
||||
'filtersUI.values': [],
|
||||
combineFilters: 'AND',
|
||||
documentId: {
|
||||
mode: 'id',
|
||||
value: spreadsheetId,
|
||||
},
|
||||
sheetName,
|
||||
sheetMode: 'id',
|
||||
};
|
||||
return mockParams[key] ?? fallbackValue;
|
||||
},
|
||||
);
|
||||
|
||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||
|
||||
expect(result).toEqual([
|
||||
[
|
||||
{
|
||||
json: {
|
||||
row_number: 3,
|
||||
Header1: 'Value3',
|
||||
Header2: 'Value4',
|
||||
_rowsLeft: 0,
|
||||
},
|
||||
pairedItem: {
|
||||
item: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
test('should return the first row from google sheet if no rows left', async () => {
|
||||
mockExecuteFunctions.getInputData.mockReturnValue([
|
||||
{
|
||||
json: {
|
||||
row_number: 3,
|
||||
Header1: 'Value3',
|
||||
Header2: 'Value4',
|
||||
_rowsLeft: 0,
|
||||
},
|
||||
pairedItem: {
|
||||
item: 0,
|
||||
input: undefined,
|
||||
},
|
||||
},
|
||||
]);
|
||||
mockExecuteFunctions.getNodeParameter.mockImplementation(
|
||||
(key: string, _: number, fallbackValue?: string | number | boolean | object) => {
|
||||
const mockParams: { [key: string]: unknown } = {
|
||||
options: {},
|
||||
'filtersUI.values': [],
|
||||
combineFilters: 'AND',
|
||||
documentId: {
|
||||
mode: 'id',
|
||||
value: spreadsheetId,
|
||||
},
|
||||
sheetName,
|
||||
sheetMode: 'id',
|
||||
};
|
||||
return mockParams[key] ?? fallbackValue;
|
||||
},
|
||||
);
|
||||
|
||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||
|
||||
expect(result).toEqual([
|
||||
[
|
||||
{
|
||||
json: {
|
||||
row_number: 2,
|
||||
Header1: 'Value1',
|
||||
Header2: 'Value2',
|
||||
_rowsLeft: 2,
|
||||
},
|
||||
pairedItem: {
|
||||
item: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
]);
|
||||
});
|
||||
|
||||
test('should return a single row from google sheet with limit', async () => {
|
||||
@@ -111,7 +211,7 @@ describe('Evaluation Trigger Node', () => {
|
||||
},
|
||||
);
|
||||
|
||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions, 2);
|
||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||
|
||||
expect(result).toEqual([
|
||||
[
|
||||
@@ -128,8 +228,6 @@ describe('Evaluation Trigger Node', () => {
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
expect(startingRow).toBe(2);
|
||||
});
|
||||
|
||||
test('should return the sheet with limits applied when test runner is enabled', async () => {
|
||||
@@ -154,7 +252,7 @@ describe('Evaluation Trigger Node', () => {
|
||||
},
|
||||
);
|
||||
|
||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions, 2);
|
||||
const result = await new EvaluationTrigger().execute.call(mockExecuteFunctions);
|
||||
|
||||
expect(result).toEqual([
|
||||
[
|
||||
@@ -180,8 +278,6 @@ describe('Evaluation Trigger Node', () => {
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
expect(startingRow).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -240,7 +336,7 @@ describe('Evaluation Trigger Node', () => {
|
||||
|
||||
const evaluationTrigger = new EvaluationTrigger();
|
||||
|
||||
const result = await evaluationTrigger.execute.call(mockExecuteFunctions, 1);
|
||||
const result = await evaluationTrigger.execute.call(mockExecuteFunctions);
|
||||
|
||||
expect(result).toEqual([
|
||||
[
|
||||
@@ -299,7 +395,7 @@ describe('Evaluation Trigger Node', () => {
|
||||
|
||||
const evaluationTrigger = new EvaluationTrigger();
|
||||
|
||||
const result = await evaluationTrigger.execute.call(mockExecuteFunctions, 1);
|
||||
const result = await evaluationTrigger.execute.call(mockExecuteFunctions);
|
||||
|
||||
expect(result).toEqual([
|
||||
[
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
"lint": "eslint nodes credentials utils test --quiet && node ./scripts/validate-load-options-methods.js",
|
||||
"lintfix": "eslint nodes credentials utils test --fix",
|
||||
"watch": "tsup --watch --tsconfig tsconfig.build.json --onSuccess \"pnpm copy-nodes-json && tsc-alias -p tsconfig.build.json && pnpm n8n-generate-metadata\"",
|
||||
"test": "jest"
|
||||
"test": "jest",
|
||||
"test:dev": "jest --watch"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
|
||||
Reference in New Issue
Block a user