mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 01:26:44 +00:00
feat: Send node execution finished and node execution data in separate events (no-changelog) (#18875)
Co-authored-by: Tomi Turtiainen <10324676+tomi@users.noreply.github.com>
This commit is contained in:
@@ -76,8 +76,9 @@ export function clickOpenNdvAtRow(rowIndex: number) {
|
||||
}
|
||||
|
||||
export function clickTriggerPartialExecutionAtRow(rowIndex: number) {
|
||||
getLogEntries().eq(rowIndex).scrollIntoView();
|
||||
getLogEntries().eq(rowIndex).realHover();
|
||||
getLogEntries().eq(rowIndex).find('[aria-label="Execute step"]').click();
|
||||
getLogEntries().eq(rowIndex).find('[aria-label="Execute step"]').click({ force: true });
|
||||
}
|
||||
|
||||
export function setInputDisplayMode(mode: 'table' | 'ai' | 'json' | 'schema') {
|
||||
|
||||
@@ -117,10 +117,19 @@ export function runMockWorkflowExecution({
|
||||
nodeName,
|
||||
data: pick(nodeRunData, ['startTime', 'executionIndex', 'source', 'hints']),
|
||||
});
|
||||
const { data: _, ...taskData } = nodeRunData;
|
||||
const itemCount = nodeRunData.data?.main?.[0]?.length ?? 0;
|
||||
cy.push('nodeExecuteAfter', {
|
||||
executionId,
|
||||
nodeName,
|
||||
data: taskData,
|
||||
itemCount,
|
||||
});
|
||||
cy.push('nodeExecuteAfterData', {
|
||||
executionId,
|
||||
nodeName,
|
||||
data: nodeRunData,
|
||||
itemCount,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -52,23 +52,36 @@ export type NodeExecuteBefore = {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Message sent after a node has finished executing that contains all that node's data
|
||||
* except for the output items which are sent in the `NodeExecuteAfterData` message.
|
||||
*/
|
||||
export type NodeExecuteAfter = {
|
||||
type: 'nodeExecuteAfter';
|
||||
data: {
|
||||
executionId: string;
|
||||
nodeName: string;
|
||||
data: ITaskData;
|
||||
data: Omit<ITaskData, 'data'>;
|
||||
itemCount: number;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Message sent after a node has finished executing that contains the entire output data
|
||||
* of that node. This is sent immediately after `NodeExecuteAfter`.
|
||||
*/
|
||||
export type NodeExecuteAfterData = {
|
||||
type: 'nodeExecuteAfterData';
|
||||
data: {
|
||||
executionId: string;
|
||||
nodeName: string;
|
||||
/**
|
||||
* When a worker relays updates about a manual execution to main, if the
|
||||
* payload size is above a limit, we send only a placeholder to the client.
|
||||
* Later we fetch the entire execution data and fill in any placeholders.
|
||||
*
|
||||
* When sending a placheolder, we also send the number of output items, so
|
||||
* the client knows ahead of time how many items are there, to prevent the
|
||||
* items count from jumping up when the execution finishes.
|
||||
*/
|
||||
itemCount?: number;
|
||||
data: ITaskData;
|
||||
itemCount: number;
|
||||
};
|
||||
};
|
||||
|
||||
@@ -78,4 +91,5 @@ export type ExecutionPushMessage =
|
||||
| ExecutionFinished
|
||||
| ExecutionRecovered
|
||||
| NodeExecuteBefore
|
||||
| NodeExecuteAfter;
|
||||
| NodeExecuteAfter
|
||||
| NodeExecuteAfterData;
|
||||
|
||||
@@ -309,13 +309,56 @@ describe('Execution Lifecycle Hooks', () => {
|
||||
});
|
||||
|
||||
describe('nodeExecuteAfter', () => {
|
||||
it('should send nodeExecuteAfter push event', async () => {
|
||||
await lifecycleHooks.runHook('nodeExecuteAfter', [nodeName, taskData, runExecutionData]);
|
||||
it('should send nodeExecuteAfter and nodeExecuteAfterData push events', async () => {
|
||||
const mockTaskData: ITaskData = {
|
||||
startTime: 1,
|
||||
executionTime: 1,
|
||||
executionIndex: 0,
|
||||
source: [],
|
||||
data: {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
json: { key: 'value' },
|
||||
binary: {
|
||||
data: {
|
||||
id: '123',
|
||||
data: '',
|
||||
mimeType: 'text/plain',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(push.send).toHaveBeenCalledWith(
|
||||
{ type: 'nodeExecuteAfter', data: { executionId, nodeName, data: taskData } },
|
||||
await lifecycleHooks.runHook('nodeExecuteAfter', [
|
||||
nodeName,
|
||||
mockTaskData,
|
||||
runExecutionData,
|
||||
]);
|
||||
|
||||
const { data: _, ...taskDataWithoutData } = mockTaskData;
|
||||
|
||||
expect(push.send).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
{
|
||||
type: 'nodeExecuteAfter',
|
||||
data: { executionId, nodeName, itemCount: 1, data: taskDataWithoutData },
|
||||
},
|
||||
pushRef,
|
||||
);
|
||||
|
||||
expect(push.send).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
{
|
||||
type: 'nodeExecuteAfterData',
|
||||
data: { executionId, nodeName, itemCount: 1, data: mockTaskData },
|
||||
},
|
||||
pushRef,
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('should save execution progress when enabled', async () => {
|
||||
|
||||
@@ -185,7 +185,29 @@ function hookFunctionsPush(
|
||||
workflowId: this.workflowData.id,
|
||||
});
|
||||
|
||||
pushInstance.send({ type: 'nodeExecuteAfter', data: { executionId, nodeName, data } }, pushRef);
|
||||
const itemCount = data.data?.main?.[0]?.length ?? 0;
|
||||
const { data: _, ...taskData } = data;
|
||||
|
||||
pushInstance.send(
|
||||
{ type: 'nodeExecuteAfter', data: { executionId, nodeName, itemCount, data: taskData } },
|
||||
pushRef,
|
||||
);
|
||||
|
||||
// We send the node execution data as a WS binary message to the FE. Not
|
||||
// because it's more efficient on the wire: the content is a JSON string
|
||||
// so both text and binary would end the same on the wire. The reason
|
||||
// is that the FE can then receive the data directly as an ArrayBuffer,
|
||||
// and we can pass it directly to a web worker for processing without
|
||||
// extra copies.
|
||||
const asBinary = true;
|
||||
pushInstance.send(
|
||||
{
|
||||
type: 'nodeExecuteAfterData',
|
||||
data: { executionId, nodeName, itemCount, data },
|
||||
},
|
||||
pushRef,
|
||||
asBinary,
|
||||
);
|
||||
});
|
||||
hooks.addHandler('workflowExecuteBefore', function (_workflow, data) {
|
||||
const { executionId } = this;
|
||||
|
||||
@@ -75,7 +75,7 @@ describe('WebSocketPush', () => {
|
||||
webSocketPush.add(pushRef2, userId, mockWebSocket2);
|
||||
webSocketPush.sendToOne(pushMessage, pushRef1);
|
||||
|
||||
expect(mockWebSocket1.send).toHaveBeenCalledWith(expectedMsg);
|
||||
expect(mockWebSocket1.send).toHaveBeenCalledWith(expectedMsg, { binary: false });
|
||||
expect(mockWebSocket2.send).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -84,8 +84,8 @@ describe('WebSocketPush', () => {
|
||||
webSocketPush.add(pushRef2, userId, mockWebSocket2);
|
||||
webSocketPush.sendToAll(pushMessage);
|
||||
|
||||
expect(mockWebSocket1.send).toHaveBeenCalledWith(expectedMsg);
|
||||
expect(mockWebSocket2.send).toHaveBeenCalledWith(expectedMsg);
|
||||
expect(mockWebSocket1.send).toHaveBeenCalledWith(expectedMsg, { binary: false });
|
||||
expect(mockWebSocket2.send).toHaveBeenCalledWith(expectedMsg, { binary: false });
|
||||
});
|
||||
|
||||
it('pings all connections', () => {
|
||||
@@ -103,8 +103,8 @@ describe('WebSocketPush', () => {
|
||||
webSocketPush.add(pushRef2, userId, mockWebSocket2);
|
||||
webSocketPush.sendToUsers(pushMessage, [userId]);
|
||||
|
||||
expect(mockWebSocket1.send).toHaveBeenCalledWith(expectedMsg);
|
||||
expect(mockWebSocket2.send).toHaveBeenCalledWith(expectedMsg);
|
||||
expect(mockWebSocket1.send).toHaveBeenCalledWith(expectedMsg, { binary: false });
|
||||
expect(mockWebSocket2.send).toHaveBeenCalledWith(expectedMsg, { binary: false });
|
||||
});
|
||||
|
||||
it('emits message event when connection receives data', async () => {
|
||||
|
||||
@@ -25,7 +25,11 @@ export abstract class AbstractPush<Connection> extends TypedEmitter<AbstractPush
|
||||
protected userIdByPushRef: Record<string, string> = {};
|
||||
|
||||
protected abstract close(connection: Connection): void;
|
||||
protected abstract sendToOneConnection(connection: Connection, data: string): void;
|
||||
protected abstract sendToOneConnection(
|
||||
connection: Connection,
|
||||
data: string,
|
||||
isBinary: boolean,
|
||||
): void;
|
||||
protected abstract ping(connection: Connection): void;
|
||||
|
||||
constructor(
|
||||
@@ -69,7 +73,7 @@ export abstract class AbstractPush<Connection> extends TypedEmitter<AbstractPush
|
||||
delete this.userIdByPushRef[pushRef];
|
||||
}
|
||||
|
||||
private sendTo({ type, data }: PushMessage, pushRefs: string[]) {
|
||||
private sendTo({ type, data }: PushMessage, pushRefs: string[], asBinary: boolean = false) {
|
||||
this.logger.debug(`Pushed to frontend: ${type}`, {
|
||||
dataType: type,
|
||||
pushRefs: pushRefs.join(', '),
|
||||
@@ -80,7 +84,7 @@ export abstract class AbstractPush<Connection> extends TypedEmitter<AbstractPush
|
||||
for (const pushRef of pushRefs) {
|
||||
const connection = this.connections[pushRef];
|
||||
assert(connection);
|
||||
this.sendToOneConnection(connection, stringifiedPayload);
|
||||
this.sendToOneConnection(connection, stringifiedPayload, asBinary);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,13 +98,13 @@ export abstract class AbstractPush<Connection> extends TypedEmitter<AbstractPush
|
||||
this.sendTo(pushMsg, Object.keys(this.connections));
|
||||
}
|
||||
|
||||
sendToOne(pushMsg: PushMessage, pushRef: string) {
|
||||
sendToOne(pushMsg: PushMessage, pushRef: string, asBinary: boolean = false) {
|
||||
if (this.connections[pushRef] === undefined) {
|
||||
this.logger.debug(`The session "${pushRef}" is not registered.`, { pushRef });
|
||||
return;
|
||||
}
|
||||
|
||||
this.sendTo(pushMsg, [pushRef]);
|
||||
this.sendTo(pushMsg, [pushRef], asBinary);
|
||||
}
|
||||
|
||||
sendToUsers(pushMsg: PushMessage, userIds: Array<User['id']>) {
|
||||
|
||||
@@ -13,7 +13,6 @@ import { parse as parseUrl } from 'url';
|
||||
import { Server as WSServer } from 'ws';
|
||||
|
||||
import { AuthService } from '@/auth/auth.service';
|
||||
import { TRIMMED_TASK_DATA_CONNECTIONS } from '@/constants';
|
||||
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
|
||||
import { Publisher } from '@/scaling/pubsub/publisher.service';
|
||||
import { TypedEmitter } from '@/typed-emitter';
|
||||
@@ -166,13 +165,18 @@ export class Push extends TypedEmitter<PushEvents> {
|
||||
return this.backend.hasPushRef(pushRef);
|
||||
}
|
||||
|
||||
send(pushMsg: PushMessage, pushRef: string) {
|
||||
/**
|
||||
* Send a push message to a specific push ref.
|
||||
*
|
||||
* @param asBinary - Whether to send the message as a binary frames or text frames
|
||||
*/
|
||||
send(pushMsg: PushMessage, pushRef: string, asBinary: boolean = false) {
|
||||
if (this.shouldRelayViaPubSub(pushRef)) {
|
||||
this.relayViaPubSub(pushMsg, pushRef);
|
||||
this.relayViaPubSub(pushMsg, pushRef, asBinary);
|
||||
return;
|
||||
}
|
||||
|
||||
this.backend.sendToOne(pushMsg, pushRef);
|
||||
this.backend.sendToOne(pushMsg, pushRef, asBinary);
|
||||
}
|
||||
|
||||
sendToUsers(pushMsg: PushMessage, userIds: Array<User['id']>) {
|
||||
@@ -208,9 +212,13 @@ export class Push extends TypedEmitter<PushEvents> {
|
||||
}
|
||||
|
||||
@OnPubSubEvent('relay-execution-lifecycle-event', { instanceType: 'main' })
|
||||
handleRelayExecutionLifecycleEvent({ pushRef, ...pushMsg }: PushMessage & { pushRef: string }) {
|
||||
handleRelayExecutionLifecycleEvent({
|
||||
pushRef,
|
||||
asBinary,
|
||||
...pushMsg
|
||||
}: PushMessage & { asBinary: boolean; pushRef: string }) {
|
||||
if (!this.hasPushRef(pushRef)) return;
|
||||
this.send(pushMsg, pushRef);
|
||||
this.send(pushMsg, pushRef, asBinary);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -219,38 +227,46 @@ export class Push extends TypedEmitter<PushEvents> {
|
||||
*
|
||||
* See {@link shouldRelayViaPubSub} for more details.
|
||||
*/
|
||||
private relayViaPubSub(pushMsg: PushMessage, pushRef: string) {
|
||||
private relayViaPubSub(pushMsg: PushMessage, pushRef: string, asBinary: boolean = false) {
|
||||
const eventSizeBytes = new TextEncoder().encode(JSON.stringify(pushMsg.data)).length;
|
||||
|
||||
if (eventSizeBytes <= MAX_PAYLOAD_SIZE_BYTES) {
|
||||
void this.publisher.publishCommand({
|
||||
command: 'relay-execution-lifecycle-event',
|
||||
payload: { ...pushMsg, pushRef },
|
||||
payload: { ...pushMsg, pushRef, asBinary },
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// too large for pubsub channel, trim it
|
||||
|
||||
const pushMsgCopy = deepCopy(pushMsg);
|
||||
|
||||
const { type } = pushMsg;
|
||||
const toMb = (bytes: number) => (bytes / (1024 * 1024)).toFixed(0);
|
||||
const eventMb = toMb(eventSizeBytes);
|
||||
const maxMb = toMb(MAX_PAYLOAD_SIZE_BYTES);
|
||||
const { type } = pushMsgCopy;
|
||||
|
||||
if (type === 'nodeExecuteAfterData') {
|
||||
this.logger.warn(
|
||||
`Size of "${type}" (${eventMb} MB) exceeds max size ${maxMb} MB. Skipping...`,
|
||||
);
|
||||
// In case of nodeExecuteAfterData, we omit the message entirely. We
|
||||
// already include the amount of items in the nodeExecuteAfter message,
|
||||
// based on which the FE will construct placeholder data. The actual
|
||||
// data is then fetched at the end of the execution.
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.warn(`Size of "${type}" (${eventMb} MB) exceeds max size ${maxMb} MB. Trimming...`);
|
||||
|
||||
if (type === 'nodeExecuteAfter') {
|
||||
pushMsgCopy.data.itemCount = pushMsgCopy.data.data.data?.main[0]?.length ?? 1;
|
||||
pushMsgCopy.data.data.data = TRIMMED_TASK_DATA_CONNECTIONS;
|
||||
} else if (type === 'executionFinished') {
|
||||
const pushMsgCopy = deepCopy(pushMsg);
|
||||
|
||||
if (pushMsgCopy.type === 'executionFinished') {
|
||||
pushMsgCopy.data.rawData = ''; // prompt client to fetch from DB
|
||||
}
|
||||
|
||||
void this.publisher.publishCommand({
|
||||
command: 'relay-execution-lifecycle-event',
|
||||
payload: { ...pushMsgCopy, pushRef },
|
||||
payload: { ...pushMsgCopy, pushRef, asBinary },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,8 +68,8 @@ export class WebSocketPush extends AbstractPush<WebSocket> {
|
||||
connection.close();
|
||||
}
|
||||
|
||||
protected sendToOneConnection(connection: WebSocket, data: string): void {
|
||||
connection.send(data);
|
||||
protected sendToOneConnection(connection: WebSocket, data: string, asBinary: boolean): void {
|
||||
connection.send(data, { binary: asBinary });
|
||||
}
|
||||
|
||||
protected ping(connection: WebSocket): void {
|
||||
|
||||
@@ -63,6 +63,7 @@ export type PubSubCommandMap = {
|
||||
|
||||
'relay-execution-lifecycle-event': PushMessage & {
|
||||
pushRef: string;
|
||||
asBinary: boolean;
|
||||
};
|
||||
|
||||
'clear-test-webhooks': {
|
||||
|
||||
@@ -1917,8 +1917,8 @@
|
||||
"runData.aiContentBlock.tokens": "{count} Tokens",
|
||||
"runData.aiContentBlock.tokens.prompt": "Prompt:",
|
||||
"runData.aiContentBlock.tokens.completion": "Completion:",
|
||||
"runData.trimmedData.title": "Data not viewable yet",
|
||||
"runData.trimmedData.message": "It will be available here once the execution has finished.",
|
||||
"runData.trimmedData.title": "No data yet",
|
||||
"runData.trimmedData.message": "Data will be available here once the execution has finished.",
|
||||
"runData.trimmedData.loading": "Loading data",
|
||||
"runData.panel.actions.collapse": "Collapse panel",
|
||||
"runData.panel.actions.open": "Open panel",
|
||||
@@ -2932,7 +2932,6 @@
|
||||
"settings.ldap.confirmMessage.beforeSaveForm.confirmButtonText": "Yes, disable it",
|
||||
"settings.ldap.confirmMessage.beforeSaveForm.headline": "Are you sure you want to disable LDAP login?",
|
||||
"settings.ldap.confirmMessage.beforeSaveForm.message": "If you do so, all LDAP users will be converted to email users.",
|
||||
|
||||
"settings.ldap.disabled.title": "Available on the Enterprise plan",
|
||||
"settings.ldap.disabled.description": "LDAP is available as a paid feature. Learn more about it.",
|
||||
"settings.ldap.disabled.buttonText": "See plans",
|
||||
|
||||
@@ -16,12 +16,7 @@ import type {
|
||||
Workflow,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
import {
|
||||
parseErrorMetadata,
|
||||
NodeConnectionTypes,
|
||||
NodeHelpers,
|
||||
TRIMMED_TASK_DATA_CONNECTIONS_KEY,
|
||||
} from 'n8n-workflow';
|
||||
import { parseErrorMetadata, NodeConnectionTypes, NodeHelpers } from 'n8n-workflow';
|
||||
import { computed, defineAsyncComponent, onBeforeUnmount, onMounted, ref, toRef, watch } from 'vue';
|
||||
|
||||
import type { INodeUi, IRunDataDisplayMode, ITab, NodePanelType } from '@/Interface';
|
||||
@@ -93,6 +88,7 @@ import { parseAiContent } from '@/utils/aiUtils';
|
||||
import { usePostHog } from '@/stores/posthog.store';
|
||||
import { I18nT } from 'vue-i18n';
|
||||
import RunDataBinary from '@/components/RunDataBinary.vue';
|
||||
import { hasTrimmedRunData } from '@/utils/executionUtils';
|
||||
|
||||
const LazyRunDataTable = defineAsyncComponent(
|
||||
async () => await import('@/components/RunDataTable.vue'),
|
||||
@@ -299,10 +295,6 @@ const isArtificialRecoveredEventItem = computed(
|
||||
() => rawInputData.value?.[0]?.json?.isArtificialRecoveredEventItem,
|
||||
);
|
||||
|
||||
const isTrimmedManualExecutionDataItem = computed(
|
||||
() => rawInputData.value?.[0]?.json?.[TRIMMED_TASK_DATA_CONNECTIONS_KEY],
|
||||
);
|
||||
|
||||
const subworkflowExecutionError = computed(() => {
|
||||
if (!node.value) return null;
|
||||
return {
|
||||
@@ -359,6 +351,10 @@ const dataCount = computed(() =>
|
||||
getDataCount(props.runIndex, currentOutputIndex.value, connectionType.value),
|
||||
);
|
||||
|
||||
const isTrimmedManualExecutionDataItem = computed(() =>
|
||||
workflowRunData.value ? hasTrimmedRunData(workflowRunData.value) : false,
|
||||
);
|
||||
|
||||
const unfilteredDataCount = computed(() =>
|
||||
pinnedData.data.value ? pinnedData.data.value.length : rawInputData.value.length,
|
||||
);
|
||||
@@ -673,9 +669,10 @@ watch(node, (newNode, prevNode) => {
|
||||
init();
|
||||
});
|
||||
|
||||
watch(hasNodeRun, () => {
|
||||
if (props.paneType === 'output') setDisplayMode();
|
||||
else {
|
||||
watch([hasNodeRun, isTrimmedManualExecutionDataItem], () => {
|
||||
if (props.paneType === 'output') {
|
||||
setDisplayMode();
|
||||
} else {
|
||||
// InputPanel relies on the outputIndex to check if we have data
|
||||
outputIndex.value = determineInitialOutputIndex();
|
||||
}
|
||||
|
||||
@@ -23,8 +23,6 @@ import {
|
||||
clearPopupWindowState,
|
||||
getExecutionErrorMessage,
|
||||
getExecutionErrorToastConfiguration,
|
||||
hasTrimmedData,
|
||||
hasTrimmedItem,
|
||||
} from '@/utils/executionUtils';
|
||||
import { getTriggerNodeServiceName } from '@/utils/nodeTypesUtils';
|
||||
import type { ExecutionFinished } from '@n8n/api-types/push/execution';
|
||||
@@ -206,25 +204,12 @@ export async function fetchExecutionData(
|
||||
* Returns the run execution data from the execution object in a normalized format
|
||||
*/
|
||||
export function getRunExecutionData(execution: SimplifiedExecution): IRunExecutionData {
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
|
||||
const runExecutionData: IRunExecutionData = {
|
||||
return {
|
||||
...execution.data,
|
||||
startData: execution.data?.startData,
|
||||
resultData: execution.data?.resultData ?? { runData: {} },
|
||||
executionData: execution.data?.executionData,
|
||||
};
|
||||
|
||||
if (workflowsStore.workflowExecutionData?.workflowId === execution.workflowId) {
|
||||
const activeRunData = workflowsStore.workflowExecutionData?.data?.resultData?.runData;
|
||||
if (activeRunData) {
|
||||
for (const key of Object.keys(activeRunData)) {
|
||||
if (hasTrimmedItem(activeRunData[key])) continue;
|
||||
runExecutionData.resultData.runData[key] = activeRunData[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return runExecutionData;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -433,14 +418,6 @@ export function setRunExecutionData(
|
||||
const runDataExecutedErrorMessage = getRunDataExecutedErrorMessage(execution);
|
||||
const workflowExecution = workflowsStore.getWorkflowExecution;
|
||||
|
||||
// It does not push the runData as it got already pushed with each
|
||||
// node that did finish. For that reason copy in here the data
|
||||
// which we already have. But if the run data in the store is trimmed,
|
||||
// we skip copying so we use the full data from the final message.
|
||||
if (workflowsStore.getWorkflowRunData && !hasTrimmedData(workflowsStore.getWorkflowRunData)) {
|
||||
runExecutionData.resultData.runData = workflowsStore.getWorkflowRunData;
|
||||
}
|
||||
|
||||
workflowsStore.executingNode.length = 0;
|
||||
|
||||
workflowsStore.setWorkflowExecutionData({
|
||||
|
||||
@@ -3,6 +3,7 @@ export * from './executionRecovered';
|
||||
export * from './executionStarted';
|
||||
export * from './nodeDescriptionUpdated';
|
||||
export * from './nodeExecuteAfter';
|
||||
export * from './nodeExecuteAfterData';
|
||||
export * from './nodeExecuteBefore';
|
||||
export * from './reloadNodeType';
|
||||
export * from './removeNodeType';
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import type { NodeExecuteAfter } from '@n8n/api-types/push/execution';
|
||||
import { useSchemaPreviewStore } from '@/stores/schemaPreview.store';
|
||||
import { useAssistantStore } from '@/stores/assistant.store';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import type { ITaskData } from 'n8n-workflow';
|
||||
import { TRIMMED_TASK_DATA_CONNECTIONS_KEY } from 'n8n-workflow';
|
||||
import type { PushPayload } from '@n8n/api-types';
|
||||
|
||||
/**
|
||||
* Handles the 'nodeExecuteAfter' event, which happens after a node is executed.
|
||||
@@ -9,26 +11,34 @@ import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
export async function nodeExecuteAfter({ data: pushData }: NodeExecuteAfter) {
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const assistantStore = useAssistantStore();
|
||||
const schemaPreviewStore = useSchemaPreviewStore();
|
||||
|
||||
/**
|
||||
* When we receive a placeholder in `nodeExecuteAfter`, we fake the items
|
||||
* to be the same count as the data the placeholder is standing in for.
|
||||
* This prevents the items count from jumping up when the execution
|
||||
* finishes and the full data replaces the placeholder.
|
||||
* We trim the actual data returned from the node execution to avoid performance issues
|
||||
* when dealing with large datasets. Instead of storing the actual data, we initially store
|
||||
* a placeholder object indicating that the data has been trimmed until the
|
||||
* `nodeExecuteAfterData` event comes in.
|
||||
*/
|
||||
if (
|
||||
pushData.itemCount &&
|
||||
pushData.data?.data?.main &&
|
||||
Array.isArray(pushData.data.data.main[0]) &&
|
||||
pushData.data.data.main[0].length < pushData.itemCount
|
||||
) {
|
||||
pushData.data.data.main[0]?.push(...new Array(pushData.itemCount - 1).fill({ json: {} }));
|
||||
const placeholderOutputData: ITaskData['data'] = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
if (typeof pushData.itemCount === 'number') {
|
||||
const fillObject = { json: { [TRIMMED_TASK_DATA_CONNECTIONS_KEY]: true } };
|
||||
const fillArray = new Array(pushData.itemCount).fill(fillObject);
|
||||
|
||||
placeholderOutputData.main = [fillArray];
|
||||
}
|
||||
|
||||
workflowsStore.updateNodeExecutionData(pushData);
|
||||
const pushDataWithPlaceholderOutputData: PushPayload<'nodeExecuteAfterData'> = {
|
||||
...pushData,
|
||||
data: {
|
||||
...pushData.data,
|
||||
data: placeholderOutputData,
|
||||
},
|
||||
};
|
||||
|
||||
workflowsStore.updateNodeExecutionData(pushDataWithPlaceholderOutputData);
|
||||
workflowsStore.removeExecutingNode(pushData.nodeName);
|
||||
|
||||
void assistantStore.onNodeExecution(pushData);
|
||||
void schemaPreviewStore.trackSchemaPreviewExecution(pushData);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { setActivePinia } from 'pinia';
|
||||
import { nodeExecuteAfterData } from './nodeExecuteAfterData';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { mockedStore } from '@/__tests__/utils';
|
||||
import type { NodeExecuteAfterData } from '@n8n/api-types/push/execution';
|
||||
|
||||
describe('nodeExecuteAfterData', () => {
|
||||
beforeEach(() => {
|
||||
const pinia = createTestingPinia({
|
||||
stubActions: true,
|
||||
});
|
||||
setActivePinia(pinia);
|
||||
});
|
||||
|
||||
it('should update node execution data with incoming payload', async () => {
|
||||
const workflowsStore = mockedStore(useWorkflowsStore);
|
||||
|
||||
const event: NodeExecuteAfterData = {
|
||||
type: 'nodeExecuteAfterData',
|
||||
data: {
|
||||
executionId: 'exec-1',
|
||||
nodeName: 'Test Node',
|
||||
itemCount: 1,
|
||||
data: {
|
||||
executionTime: 0,
|
||||
startTime: 0,
|
||||
executionIndex: 0,
|
||||
source: [],
|
||||
data: {
|
||||
main: [[{ json: { foo: 'bar' } }]],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await nodeExecuteAfterData(event);
|
||||
|
||||
expect(workflowsStore.updateNodeExecutionData).toHaveBeenCalledTimes(1);
|
||||
expect(workflowsStore.updateNodeExecutionData).toHaveBeenCalledWith(event.data);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,15 @@
|
||||
import type { NodeExecuteAfterData } from '@n8n/api-types/push/execution';
|
||||
import { useSchemaPreviewStore } from '@/stores/schemaPreview.store';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
|
||||
/**
|
||||
* Handles the 'nodeExecuteAfterData' event, which is sent after a node has executed and contains the resulting data.
|
||||
*/
|
||||
export async function nodeExecuteAfterData({ data: pushData }: NodeExecuteAfterData) {
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const schemaPreviewStore = useSchemaPreviewStore();
|
||||
|
||||
workflowsStore.updateNodeExecutionData(pushData);
|
||||
|
||||
void schemaPreviewStore.trackSchemaPreviewExecution(pushData);
|
||||
}
|
||||
@@ -22,6 +22,7 @@ vi.mock('@/composables/usePushConnection/handlers', () => ({
|
||||
nodeDescriptionUpdated: vi.fn(),
|
||||
nodeExecuteBefore: vi.fn(),
|
||||
nodeExecuteAfter: vi.fn(),
|
||||
nodeExecuteAfterData: vi.fn(),
|
||||
executionStarted: vi.fn(),
|
||||
executionWaiting: vi.fn(),
|
||||
sendWorkerStatusMessage: vi.fn(),
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
nodeDescriptionUpdated,
|
||||
nodeExecuteBefore,
|
||||
nodeExecuteAfter,
|
||||
nodeExecuteAfterData,
|
||||
executionStarted,
|
||||
sendWorkerStatusMessage,
|
||||
sendConsoleMessage,
|
||||
@@ -60,6 +61,8 @@ export function usePushConnection(options: { router: ReturnType<typeof useRouter
|
||||
return await nodeExecuteBefore(event);
|
||||
case 'nodeExecuteAfter':
|
||||
return await nodeExecuteAfter(event);
|
||||
case 'nodeExecuteAfterData':
|
||||
return await nodeExecuteAfterData(event);
|
||||
case 'executionStarted':
|
||||
return await executionStarted(event);
|
||||
case 'sendWorkerStatusMessage':
|
||||
|
||||
@@ -9,7 +9,7 @@ import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { computed, h, nextTick, ref } from 'vue';
|
||||
import {
|
||||
aiAgentNode,
|
||||
aiChatExecutionResponse,
|
||||
aiChatExecutionResponse as aiChatExecutionResponseTemplate,
|
||||
aiChatWorkflow,
|
||||
aiManualExecutionResponse,
|
||||
aiManualWorkflow,
|
||||
@@ -62,6 +62,8 @@ describe('LogsPanel', () => {
|
||||
let ndvStore: ReturnType<typeof mockedStore<typeof useNDVStore>>;
|
||||
let uiStore: ReturnType<typeof mockedStore<typeof useUIStore>>;
|
||||
|
||||
let aiChatExecutionResponse: typeof aiChatExecutionResponseTemplate;
|
||||
|
||||
function render() {
|
||||
const wrapper = renderComponent(LogsPanel, {
|
||||
global: {
|
||||
@@ -116,6 +118,8 @@ describe('LogsPanel', () => {
|
||||
} as DOMRect);
|
||||
|
||||
localStorage.clear();
|
||||
|
||||
aiChatExecutionResponse = deepCopy(aiChatExecutionResponseTemplate);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -326,6 +330,7 @@ describe('LogsPanel', () => {
|
||||
workflowsStore.updateNodeExecutionData({
|
||||
nodeName: 'AI Agent',
|
||||
executionId: '567',
|
||||
itemCount: 1,
|
||||
data: {
|
||||
executionIndex: 0,
|
||||
startTime: Date.parse('2025-04-20T12:34:51.000Z'),
|
||||
@@ -459,7 +464,7 @@ describe('LogsPanel', () => {
|
||||
|
||||
// Create deep copy so that renaming doesn't affect other test cases
|
||||
workflowsStore.setWorkflow(deepCopy(aiChatWorkflow));
|
||||
workflowsStore.setWorkflowExecutionData(deepCopy(aiChatExecutionResponse));
|
||||
workflowsStore.setWorkflowExecutionData(aiChatExecutionResponse);
|
||||
|
||||
const rendered = render();
|
||||
|
||||
@@ -541,7 +546,7 @@ describe('LogsPanel', () => {
|
||||
const canvasOperations = useCanvasOperations();
|
||||
|
||||
workflowsStore.setWorkflow(deepCopy(aiChatWorkflow));
|
||||
workflowsStore.setWorkflowExecutionData(deepCopy(aiChatExecutionResponse));
|
||||
workflowsStore.setWorkflowExecutionData(aiChatExecutionResponse);
|
||||
|
||||
logsStore.toggleLogSelectionSync(true);
|
||||
|
||||
@@ -600,7 +605,10 @@ describe('LogsPanel', () => {
|
||||
|
||||
// Verify message and response
|
||||
expect(await findByText('Hello AI!')).toBeInTheDocument();
|
||||
workflowsStore.setWorkflowExecutionData({ ...aiChatExecutionResponse, status: 'success' });
|
||||
workflowsStore.setWorkflowExecutionData({
|
||||
...aiChatExecutionResponse,
|
||||
status: 'success',
|
||||
});
|
||||
await waitFor(() => expect(getByText('AI response message')).toBeInTheDocument());
|
||||
|
||||
// Verify workflow execution
|
||||
|
||||
@@ -79,6 +79,7 @@ export const useWebSocketClient = <T>(options: UseWebSocketClientOptions<T>) =>
|
||||
socket.value.addEventListener('message', onMessage);
|
||||
socket.value.addEventListener('error', onError);
|
||||
socket.value.addEventListener('close', onConnectionLost);
|
||||
socket.value.binaryType = 'arraybuffer';
|
||||
};
|
||||
|
||||
const reconnectTimer = useReconnectTimer({
|
||||
|
||||
@@ -60,14 +60,20 @@ export const usePushConnectionStore = defineStore(STORES.PUSH, () => {
|
||||
* Process a newly received message
|
||||
*/
|
||||
async function onMessage(data: unknown) {
|
||||
let receivedData: PushMessage;
|
||||
// The `nodeExecuteAfterData` message is sent as binary data
|
||||
// to be handled by a web worker in the future.
|
||||
if (data instanceof ArrayBuffer) {
|
||||
data = new TextDecoder('utf-8').decode(new Uint8Array(data));
|
||||
}
|
||||
|
||||
let parsedData: PushMessage;
|
||||
try {
|
||||
receivedData = JSON.parse(data as string);
|
||||
parsedData = JSON.parse(data as string);
|
||||
} catch (error) {
|
||||
return;
|
||||
}
|
||||
|
||||
onMessageReceivedHandlers.value.forEach((handler) => handler(receivedData));
|
||||
onMessageReceivedHandlers.value.forEach((handler) => handler(parsedData));
|
||||
}
|
||||
|
||||
const url = getConnectionUrl();
|
||||
|
||||
@@ -119,7 +119,7 @@ describe('schemaPreview.store', () => {
|
||||
}),
|
||||
);
|
||||
await store.trackSchemaPreviewExecution(
|
||||
mock<PushPayload<'nodeExecuteAfter'>>({
|
||||
mock<PushPayload<'nodeExecuteAfterData'>>({
|
||||
nodeName: 'Test',
|
||||
data: {
|
||||
executionStatus: 'success',
|
||||
@@ -145,7 +145,7 @@ describe('schemaPreview.store', () => {
|
||||
const store = useSchemaPreviewStore();
|
||||
vi.mocked(useWorkflowsStore().getNodeByName).mockReturnValueOnce(mock<INode>());
|
||||
await store.trackSchemaPreviewExecution(
|
||||
mock<PushPayload<'nodeExecuteAfter'>>({
|
||||
mock<PushPayload<'nodeExecuteAfterData'>>({
|
||||
nodeName: 'Test',
|
||||
data: {
|
||||
executionStatus: 'success',
|
||||
@@ -160,7 +160,7 @@ describe('schemaPreview.store', () => {
|
||||
it('should not track failed executions', async () => {
|
||||
const store = useSchemaPreviewStore();
|
||||
await store.trackSchemaPreviewExecution(
|
||||
mock<PushPayload<'nodeExecuteAfter'>>({
|
||||
mock<PushPayload<'nodeExecuteAfterData'>>({
|
||||
data: {
|
||||
executionStatus: 'error',
|
||||
},
|
||||
|
||||
@@ -48,7 +48,7 @@ export const useSchemaPreviewStore = defineStore('schemaPreview', () => {
|
||||
}
|
||||
}
|
||||
|
||||
async function trackSchemaPreviewExecution(pushEvent: PushPayload<'nodeExecuteAfter'>) {
|
||||
async function trackSchemaPreviewExecution(pushEvent: PushPayload<'nodeExecuteAfterData'>) {
|
||||
if (schemaPreviews.size === 0 || pushEvent.data.executionStatus !== 'success') {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -679,7 +679,17 @@ describe('useWorkflowsStore', () => {
|
||||
});
|
||||
|
||||
describe('updateNodeExecutionData', () => {
|
||||
const { successEvent, errorEvent, executionResponse } = generateMockExecutionEvents();
|
||||
let successEvent: ReturnType<typeof generateMockExecutionEvents>['successEvent'];
|
||||
let errorEvent: ReturnType<typeof generateMockExecutionEvents>['errorEvent'];
|
||||
let executionResponse: ReturnType<typeof generateMockExecutionEvents>['executionResponse'];
|
||||
|
||||
beforeEach(() => {
|
||||
const events = generateMockExecutionEvents();
|
||||
successEvent = events.successEvent;
|
||||
errorEvent = events.errorEvent;
|
||||
executionResponse = events.executionResponse;
|
||||
});
|
||||
|
||||
it('should throw error if not initialized', () => {
|
||||
expect(() => workflowsStore.updateNodeExecutionData(successEvent)).toThrowError();
|
||||
});
|
||||
@@ -1420,6 +1430,7 @@ function generateMockExecutionEvents() {
|
||||
const successEvent: PushPayload<'nodeExecuteAfter'> = {
|
||||
executionId: '59',
|
||||
nodeName: 'When clicking ‘Execute workflow’',
|
||||
itemCount: 1,
|
||||
data: {
|
||||
hints: [],
|
||||
startTime: 1727867966633,
|
||||
@@ -1427,18 +1438,6 @@ function generateMockExecutionEvents() {
|
||||
executionTime: 1,
|
||||
source: [],
|
||||
executionStatus: 'success',
|
||||
data: {
|
||||
main: [
|
||||
[
|
||||
{
|
||||
json: {},
|
||||
pairedItem: {
|
||||
item: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -567,8 +567,8 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
isArchived?: boolean;
|
||||
parentFolderId?: string;
|
||||
} = {},
|
||||
includeFolders: boolean = false,
|
||||
onlySharedWithMe: boolean = false,
|
||||
includeFolders = false,
|
||||
onlySharedWithMe = false,
|
||||
): Promise<WorkflowListResource[]> {
|
||||
const filter = { ...filters, projectId };
|
||||
const options = {
|
||||
@@ -1154,7 +1154,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
connections[index].index === destinationData.index
|
||||
) {
|
||||
// Found the connection to remove
|
||||
connections.splice(parseInt(index, 10), 1);
|
||||
connections.splice(Number.parseInt(index, 10), 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1196,16 +1196,16 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
for (sourceIndex of Object.keys(workflow.value.connections[sourceNode][type])) {
|
||||
indexesToRemove.length = 0;
|
||||
const connectionsToRemove =
|
||||
workflow.value.connections[sourceNode][type][parseInt(sourceIndex, 10)];
|
||||
workflow.value.connections[sourceNode][type][Number.parseInt(sourceIndex, 10)];
|
||||
if (connectionsToRemove) {
|
||||
for (connectionIndex of Object.keys(connectionsToRemove)) {
|
||||
connectionData = connectionsToRemove[parseInt(connectionIndex, 10)];
|
||||
connectionData = connectionsToRemove[Number.parseInt(connectionIndex, 10)];
|
||||
if (connectionData.node === node.name) {
|
||||
indexesToRemove.push(connectionIndex);
|
||||
}
|
||||
}
|
||||
indexesToRemove.forEach((index) => {
|
||||
connectionsToRemove.splice(parseInt(index, 10), 1);
|
||||
connectionsToRemove.splice(Number.parseInt(index, 10), 1);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1560,7 +1560,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
];
|
||||
}
|
||||
|
||||
function updateNodeExecutionData(pushData: PushPayload<'nodeExecuteAfter'>): void {
|
||||
function updateNodeExecutionData(pushData: PushPayload<'nodeExecuteAfterData'>): void {
|
||||
if (!workflowExecutionData.value?.data) {
|
||||
throw new Error('The "workflowExecutionData" is not initialized!');
|
||||
}
|
||||
@@ -1571,19 +1571,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
if (!node) return;
|
||||
|
||||
if (workflowExecutionData.value.data.resultData.runData[nodeName] === undefined) {
|
||||
workflowExecutionData.value = {
|
||||
...workflowExecutionData.value,
|
||||
data: {
|
||||
...workflowExecutionData.value.data,
|
||||
resultData: {
|
||||
...workflowExecutionData.value.data.resultData,
|
||||
runData: {
|
||||
...workflowExecutionData.value.data.resultData.runData,
|
||||
[nodeName]: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
workflowExecutionData.value.data.resultData.runData[nodeName] = [];
|
||||
}
|
||||
|
||||
const tasksData = workflowExecutionData.value.data!.resultData.runData[nodeName];
|
||||
@@ -1611,7 +1599,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
existingRunIndex > -1 && !hasWaitingItems ? existingRunIndex : tasksData.length - 1;
|
||||
const status = tasksData[index]?.executionStatus ?? 'unknown';
|
||||
|
||||
if ('waiting' === status || 'running' === status) {
|
||||
if ('waiting' === status || 'running' === status || tasksData[existingRunIndex]) {
|
||||
tasksData.splice(index, 1, data);
|
||||
} else {
|
||||
tasksData.push(data);
|
||||
@@ -1630,16 +1618,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
|
||||
const { [nodeName]: removedRunData, ...remainingRunData } =
|
||||
workflowExecutionData.value.data.resultData.runData;
|
||||
workflowExecutionData.value = {
|
||||
...workflowExecutionData.value,
|
||||
data: {
|
||||
...workflowExecutionData.value.data,
|
||||
resultData: {
|
||||
...workflowExecutionData.value.data.resultData,
|
||||
runData: remainingRunData,
|
||||
},
|
||||
},
|
||||
};
|
||||
workflowExecutionData.value.data.resultData.runData = remainingRunData;
|
||||
}
|
||||
|
||||
function pinDataByNodeName(nodeName: string): INodeExecutionData[] | undefined {
|
||||
|
||||
@@ -12,6 +12,7 @@ import type {
|
||||
IRunData,
|
||||
ExecutionError,
|
||||
INodeTypeBaseDescription,
|
||||
INodeExecutionData,
|
||||
} from 'n8n-workflow';
|
||||
import type {
|
||||
ExecutionFilterType,
|
||||
@@ -107,7 +108,7 @@ export const executionFilterToQueryFilter = (
|
||||
return queryFilter;
|
||||
};
|
||||
|
||||
let formPopupWindow: boolean = false;
|
||||
let formPopupWindow = false;
|
||||
|
||||
export const openFormPopupWindow = (url: string) => {
|
||||
if (!formPopupWindow) {
|
||||
@@ -226,26 +227,42 @@ export const waitingNodeTooltip = (node: INodeUi | null | undefined) => {
|
||||
return '';
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether node execution data contains a trimmed item.
|
||||
*/
|
||||
export function isTrimmedNodeExecutionData(data: INodeExecutionData[] | null) {
|
||||
return data?.some((entry) => entry.json?.[TRIMMED_TASK_DATA_CONNECTIONS_KEY]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether task data contains a trimmed item.
|
||||
*
|
||||
* In manual executions in scaling mode, the payload in push messages may be
|
||||
* arbitrarily large. To protect Redis as it relays run data from workers to
|
||||
* main process, we set a limit on payload size. If the payload is oversize,
|
||||
* the main process, we set a limit on payload size. If the payload is oversize,
|
||||
* we replace it with a placeholder, which is later overridden on execution
|
||||
* finish, when the client receives the full data.
|
||||
*/
|
||||
export function hasTrimmedItem(taskData: ITaskData[]) {
|
||||
return taskData[0]?.data?.main?.[0]?.[0]?.json?.[TRIMMED_TASK_DATA_CONNECTIONS_KEY] ?? false;
|
||||
export function isTrimmedTaskData(taskData: ITaskData) {
|
||||
return taskData.data?.main?.some((main) => isTrimmedNodeExecutionData(main));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether task data contains a trimmed item.
|
||||
*
|
||||
* See {@link isTrimmedTaskData} for more details.
|
||||
*/
|
||||
export function hasTrimmedTaskData(taskData: ITaskData[]) {
|
||||
return taskData.some(isTrimmedTaskData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether run data contains any trimmed items.
|
||||
*
|
||||
* See {@link hasTrimmedItem} for more details.
|
||||
* See {@link hasTrimmedTaskData} for more details.
|
||||
*/
|
||||
export function hasTrimmedData(runData: IRunData) {
|
||||
return Object.keys(runData).some((nodeName) => hasTrimmedItem(runData[nodeName]));
|
||||
export function hasTrimmedRunData(runData: IRunData) {
|
||||
return Object.keys(runData).some((nodeName) => hasTrimmedTaskData(runData[nodeName]));
|
||||
}
|
||||
|
||||
export function executionRetryMessage(executionStatus: ExecutionStatus):
|
||||
|
||||
@@ -29,6 +29,7 @@ test.describe('Logs', () => {
|
||||
await expect(n8n.canvas.logsPanel.getLogEntries()).toHaveCount(0);
|
||||
|
||||
await n8n.canvas.clickExecuteWorkflowButton();
|
||||
|
||||
await expect(
|
||||
n8n.canvas.logsPanel.getOverviewStatus().filter({ hasText: 'Running' }),
|
||||
).toBeVisible();
|
||||
|
||||
Reference in New Issue
Block a user