mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 18:12:04 +00:00
feat(editor): Make logs applicable for all nodes (#14397)
Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
This commit is contained in:
@@ -33,8 +33,10 @@ const telemetry = useTelemetry();
|
||||
const { rootStyles, height, chatWidth, onWindowResize, onResizeDebounced, onResizeChatDebounced } =
|
||||
useResize(container);
|
||||
|
||||
const { currentSessionId, messages, connectedNode, sendMessage, refreshSession, displayExecution } =
|
||||
useChatState(ref(false), onWindowResize);
|
||||
const { currentSessionId, messages, sendMessage, refreshSession, displayExecution } = useChatState(
|
||||
ref(false),
|
||||
onWindowResize,
|
||||
);
|
||||
const isLogDetailsOpen = computed(() => selectedLogEntry.value !== undefined);
|
||||
|
||||
const { canPopOut, isPoppedOut, pipWindow } = usePiPWindow({
|
||||
@@ -134,7 +136,6 @@ watch([panelState, height], ([state, h]) => {
|
||||
<LogsOverviewPanel
|
||||
:class="$style.logsOverview"
|
||||
:is-open="panelState !== LOGS_PANEL_STATE.CLOSED"
|
||||
:node="connectedNode"
|
||||
:selected="selectedLogEntry"
|
||||
@click-header="handleClickHeader"
|
||||
@select="handleSelectLogEntry"
|
||||
|
||||
@@ -8,13 +8,11 @@ import { N8nButton, N8nRadioButtons, N8nText, N8nTooltip } from '@n8n/design-sys
|
||||
import { computed } from 'vue';
|
||||
import { ElTree, type TreeNode as ElTreeNode } from 'element-plus';
|
||||
import {
|
||||
createAiData,
|
||||
createLogEntries,
|
||||
getSubtreeTotalConsumedTokens,
|
||||
getTotalConsumedTokens,
|
||||
getTreeNodeData,
|
||||
type TreeNode,
|
||||
} from '@/components/RunDataAi/utils';
|
||||
import { type INodeUi } from '@/Interface';
|
||||
import { upperFirst } from 'lodash-es';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
import ConsumedTokenCountText from '@/components/CanvasChat/future/components/ConsumedTokenCountText.vue';
|
||||
@@ -24,9 +22,8 @@ import { useRunWorkflow } from '@/composables/useRunWorkflow';
|
||||
import { useNDVStore } from '@/stores/ndv.store';
|
||||
import { useRouter } from 'vue-router';
|
||||
|
||||
const { node, isOpen, selected } = defineProps<{
|
||||
const { isOpen, selected } = defineProps<{
|
||||
isOpen: boolean;
|
||||
node: INodeUi | null;
|
||||
selected?: LogEntryIdentity;
|
||||
}>();
|
||||
|
||||
@@ -44,13 +41,10 @@ const nodeHelpers = useNodeHelpers();
|
||||
const isClearExecutionButtonVisible = useClearExecutionButtonVisible();
|
||||
const workflow = computed(() => workflowsStore.getCurrentWorkflow());
|
||||
const executionTree = computed<TreeNode[]>(() =>
|
||||
node
|
||||
? getTreeNodeData(
|
||||
node.name,
|
||||
workflow.value,
|
||||
createAiData(node.name, workflow.value, workflowsStore.getWorkflowResultDataByNodeName),
|
||||
)
|
||||
: [],
|
||||
createLogEntries(
|
||||
workflow.value,
|
||||
workflowsStore.workflowExecutionData?.data?.resultData.runData ?? {},
|
||||
),
|
||||
);
|
||||
const isEmpty = computed(() => workflowsStore.workflowExecutionData === null);
|
||||
const switchViewOptions = computed(() => [
|
||||
@@ -272,6 +266,7 @@ async function handleTriggerPartialExecution(treeNode: TreeNode) {
|
||||
|
||||
.switchViewButtons {
|
||||
position: absolute;
|
||||
z-index: 10; /* higher than log entry rows background */
|
||||
right: 0;
|
||||
top: 0;
|
||||
margin: var(--spacing-2xs);
|
||||
|
||||
@@ -70,8 +70,12 @@ function isLastChild(level: number) {
|
||||
}
|
||||
|
||||
const siblings = parent?.children ?? [];
|
||||
const lastSibling = siblings[siblings.length - 1];
|
||||
|
||||
return data === siblings[siblings.length - 1];
|
||||
return (
|
||||
(data === undefined && lastSibling === undefined) ||
|
||||
(data?.node === lastSibling?.node && data?.runIndex === lastSibling?.runIndex)
|
||||
);
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
import { createTestNode, createTestWorkflowObject } from '@/__tests__/mocks';
|
||||
import { createAiData, getTreeNodeData } from '@/components/RunDataAi/utils';
|
||||
import { createAiData, createLogEntries, getTreeNodeData } from '@/components/RunDataAi/utils';
|
||||
import { type ITaskData, NodeConnectionTypes } from 'n8n-workflow';
|
||||
|
||||
describe(getTreeNodeData, () => {
|
||||
function createTaskData(partialData: Partial<ITaskData>): ITaskData {
|
||||
return {
|
||||
startTime: 0,
|
||||
executionIndex: 0,
|
||||
executionTime: 1,
|
||||
source: [],
|
||||
executionStatus: 'success',
|
||||
data: { main: [[{ json: {} }]] },
|
||||
...partialData,
|
||||
};
|
||||
}
|
||||
function createTaskData(partialData: Partial<ITaskData>): ITaskData {
|
||||
return {
|
||||
startTime: 0,
|
||||
executionIndex: 0,
|
||||
executionTime: 1,
|
||||
source: [],
|
||||
executionStatus: 'success',
|
||||
data: { main: [[{ json: {} }]] },
|
||||
...partialData,
|
||||
};
|
||||
}
|
||||
|
||||
describe(getTreeNodeData, () => {
|
||||
it('should generate one node per execution', () => {
|
||||
const workflow = createTestWorkflowObject({
|
||||
nodes: [
|
||||
@@ -101,7 +101,7 @@ describe(getTreeNodeData, () => {
|
||||
).toEqual([
|
||||
{
|
||||
depth: 0,
|
||||
id: 'A',
|
||||
id: 'A:0',
|
||||
node: 'A',
|
||||
runIndex: 0,
|
||||
startTime: 0,
|
||||
@@ -115,7 +115,7 @@ describe(getTreeNodeData, () => {
|
||||
children: [
|
||||
{
|
||||
depth: 1,
|
||||
id: 'B',
|
||||
id: 'B:0',
|
||||
node: 'B',
|
||||
runIndex: 0,
|
||||
startTime: Date.parse('2025-02-26T00:00:01.000Z'),
|
||||
@@ -130,7 +130,7 @@ describe(getTreeNodeData, () => {
|
||||
{
|
||||
children: [],
|
||||
depth: 2,
|
||||
id: 'C',
|
||||
id: 'C:0',
|
||||
node: 'C',
|
||||
runIndex: 0,
|
||||
startTime: Date.parse('2025-02-26T00:00:02.000Z'),
|
||||
@@ -146,7 +146,7 @@ describe(getTreeNodeData, () => {
|
||||
},
|
||||
{
|
||||
depth: 1,
|
||||
id: 'B',
|
||||
id: 'B:1',
|
||||
node: 'B',
|
||||
runIndex: 1,
|
||||
startTime: Date.parse('2025-02-26T00:00:03.000Z'),
|
||||
@@ -161,7 +161,7 @@ describe(getTreeNodeData, () => {
|
||||
{
|
||||
children: [],
|
||||
depth: 2,
|
||||
id: 'C',
|
||||
id: 'C:1',
|
||||
node: 'C',
|
||||
runIndex: 1,
|
||||
startTime: Date.parse('2025-02-26T00:00:04.000Z'),
|
||||
@@ -180,3 +180,82 @@ describe(getTreeNodeData, () => {
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe(createLogEntries, () => {
|
||||
it('should return root node log entries in ascending order of executionIndex', () => {
|
||||
const workflow = createTestWorkflowObject({
|
||||
nodes: [
|
||||
createTestNode({ name: 'A' }),
|
||||
createTestNode({ name: 'B' }),
|
||||
createTestNode({ name: 'C' }),
|
||||
],
|
||||
connections: {
|
||||
B: { main: [[{ node: 'A', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||
C: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
createLogEntries(workflow, {
|
||||
A: [
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:00.000Z'), executionIndex: 0 }),
|
||||
],
|
||||
B: [
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:01.000Z'), executionIndex: 1 }),
|
||||
],
|
||||
C: [
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:02.000Z'), executionIndex: 3 }),
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:03.000Z'), executionIndex: 2 }),
|
||||
],
|
||||
}),
|
||||
).toEqual([
|
||||
expect.objectContaining({ node: 'A', runIndex: 0 }),
|
||||
expect.objectContaining({ node: 'B', runIndex: 0 }),
|
||||
expect.objectContaining({ node: 'C', runIndex: 1 }),
|
||||
expect.objectContaining({ node: 'C', runIndex: 0 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return sub node log entries in ascending order of executionIndex', () => {
|
||||
const workflow = createTestWorkflowObject({
|
||||
nodes: [
|
||||
createTestNode({ name: 'A' }),
|
||||
createTestNode({ name: 'B' }),
|
||||
createTestNode({ name: 'C' }),
|
||||
],
|
||||
connections: {
|
||||
A: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||
C: {
|
||||
[NodeConnectionTypes.AiLanguageModel]: [
|
||||
[{ node: 'B', type: NodeConnectionTypes.AiLanguageModel, index: 0 }],
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
createLogEntries(workflow, {
|
||||
A: [
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:00.000Z'), executionIndex: 0 }),
|
||||
],
|
||||
B: [
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:01.000Z'), executionIndex: 1 }),
|
||||
],
|
||||
C: [
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:02.000Z'), executionIndex: 3 }),
|
||||
createTaskData({ startTime: Date.parse('2025-04-04T00:00:03.000Z'), executionIndex: 2 }),
|
||||
],
|
||||
}),
|
||||
).toEqual([
|
||||
expect.objectContaining({ node: 'A', runIndex: 0 }),
|
||||
expect.objectContaining({
|
||||
node: 'B',
|
||||
runIndex: 0,
|
||||
children: [
|
||||
expect.objectContaining({ node: 'C', runIndex: 1 }),
|
||||
expect.objectContaining({ node: 'C', runIndex: 0 }),
|
||||
],
|
||||
}),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { type LlmTokenUsageData, type IAiDataContent } from '@/Interface';
|
||||
import {
|
||||
type IRunData,
|
||||
type INodeExecutionData,
|
||||
type ITaskData,
|
||||
type ITaskDataConnections,
|
||||
@@ -28,16 +29,17 @@ function createNode(
|
||||
parent: TreeNode | undefined,
|
||||
nodeName: string,
|
||||
currentDepth: number,
|
||||
runIndex: number,
|
||||
r?: AIResult,
|
||||
children: TreeNode[] = [],
|
||||
): TreeNode {
|
||||
return {
|
||||
parent,
|
||||
node: nodeName,
|
||||
id: nodeName,
|
||||
id: `${nodeName}:${runIndex}`,
|
||||
depth: currentDepth,
|
||||
startTime: r?.data?.metadata?.startTime ?? 0,
|
||||
runIndex: r?.runIndex ?? 0,
|
||||
runIndex,
|
||||
children,
|
||||
consumedTokens: getConsumedTokens(r?.data),
|
||||
};
|
||||
@@ -47,8 +49,9 @@ export function getTreeNodeData(
|
||||
nodeName: string,
|
||||
workflow: Workflow,
|
||||
aiData: AIResult[] | undefined,
|
||||
runIndex?: number,
|
||||
): TreeNode[] {
|
||||
return getTreeNodeDataRec(undefined, nodeName, 0, workflow, aiData, undefined);
|
||||
return getTreeNodeDataRec(undefined, nodeName, 0, workflow, aiData, runIndex);
|
||||
}
|
||||
|
||||
function getTreeNodeDataRec(
|
||||
@@ -66,32 +69,27 @@ function getTreeNodeDataRec(
|
||||
) ?? [];
|
||||
|
||||
if (!connections) {
|
||||
return resultData.map((d) => createNode(parent, nodeName, currentDepth, d));
|
||||
return resultData.map((d) => createNode(parent, nodeName, currentDepth, d.runIndex, d));
|
||||
}
|
||||
|
||||
// Get the first level of children
|
||||
const connectedSubNodes = workflow.getParentNodes(nodeName, 'ALL_NON_MAIN', 1);
|
||||
|
||||
const treeNode = createNode(parent, nodeName, currentDepth);
|
||||
const children = connectedSubNodes.flatMap((name) => {
|
||||
// Only include sub-nodes which have data
|
||||
return (
|
||||
aiData
|
||||
?.filter(
|
||||
(data) => data.node === name && (runIndex === undefined || data.runIndex === runIndex),
|
||||
)
|
||||
.flatMap((data) =>
|
||||
getTreeNodeDataRec(treeNode, name, currentDepth + 1, workflow, aiData, data.runIndex),
|
||||
) ?? []
|
||||
);
|
||||
});
|
||||
const treeNode = createNode(parent, nodeName, currentDepth, runIndex ?? 0);
|
||||
|
||||
children.sort((a, b) => a.startTime - b.startTime);
|
||||
// Only include sub-nodes which have data
|
||||
const children = (aiData ?? []).flatMap((data) =>
|
||||
connectedSubNodes.includes(data.node) && (runIndex === undefined || data.runIndex === runIndex)
|
||||
? getTreeNodeDataRec(treeNode, data.node, currentDepth + 1, workflow, aiData, data.runIndex)
|
||||
: [],
|
||||
);
|
||||
|
||||
treeNode.children = children;
|
||||
|
||||
if (resultData.length) {
|
||||
return resultData.map((r) => createNode(parent, nodeName, currentDepth, r, children));
|
||||
return resultData.map((r) =>
|
||||
createNode(parent, nodeName, currentDepth, r.runIndex, r, children),
|
||||
);
|
||||
}
|
||||
|
||||
return [treeNode];
|
||||
@@ -102,31 +100,27 @@ export function createAiData(
|
||||
workflow: Workflow,
|
||||
getWorkflowResultDataByNodeName: (nodeName: string) => ITaskData[] | null,
|
||||
): AIResult[] {
|
||||
const result: AIResult[] = [];
|
||||
const connectedSubNodes = workflow.getParentNodes(nodeName, 'ALL_NON_MAIN');
|
||||
return workflow
|
||||
.getParentNodes(nodeName, 'ALL_NON_MAIN')
|
||||
.flatMap((node) =>
|
||||
(getWorkflowResultDataByNodeName(node) ?? []).map((task, index) => ({ node, task, index })),
|
||||
)
|
||||
.sort((a, b) => {
|
||||
// Sort the data by execution index or start time
|
||||
if (a.task.executionIndex !== undefined && b.task.executionIndex !== undefined) {
|
||||
return a.task.executionIndex - b.task.executionIndex;
|
||||
}
|
||||
|
||||
connectedSubNodes.forEach((node) => {
|
||||
const nodeRunData = getWorkflowResultDataByNodeName(node) ?? [];
|
||||
const aTime = a.task.startTime ?? 0;
|
||||
const bTime = b.task.startTime ?? 0;
|
||||
|
||||
nodeRunData.forEach((run, runIndex) => {
|
||||
const referenceData = {
|
||||
data: getReferencedData(run, false, true)[0],
|
||||
node,
|
||||
runIndex,
|
||||
};
|
||||
|
||||
result.push(referenceData);
|
||||
});
|
||||
});
|
||||
|
||||
// Sort the data by start time
|
||||
result.sort((a, b) => {
|
||||
const aTime = a.data?.metadata?.startTime ?? 0;
|
||||
const bTime = b.data?.metadata?.startTime ?? 0;
|
||||
return aTime - bTime;
|
||||
});
|
||||
|
||||
return result;
|
||||
return aTime - bTime;
|
||||
})
|
||||
.map(({ node, task, index }) => ({
|
||||
data: getReferencedData(task, false, true)[0],
|
||||
node,
|
||||
runIndex: index,
|
||||
}));
|
||||
}
|
||||
|
||||
export function getReferencedData(
|
||||
@@ -231,3 +225,44 @@ export function formatTokenUsageCount(
|
||||
|
||||
return usage.isEstimate ? `~${count}` : count.toLocaleString();
|
||||
}
|
||||
|
||||
export function createLogEntries(workflow: Workflow, runData: IRunData) {
|
||||
const runs = Object.entries(runData)
|
||||
.filter(([nodeName]) => workflow.getChildNodes(nodeName, 'ALL_NON_MAIN').length === 0)
|
||||
.flatMap(([nodeName, taskData]) =>
|
||||
taskData.map((task, runIndex) => ({ nodeName, task, runIndex })),
|
||||
)
|
||||
.sort((a, b) => {
|
||||
if (a.task.executionIndex !== undefined && b.task.executionIndex !== undefined) {
|
||||
return a.task.executionIndex - b.task.executionIndex;
|
||||
}
|
||||
|
||||
return a.nodeName === b.nodeName
|
||||
? a.runIndex - b.runIndex
|
||||
: a.task.startTime - b.task.startTime;
|
||||
});
|
||||
|
||||
return runs.flatMap(({ nodeName, runIndex, task }) => {
|
||||
if (workflow.getParentNodes(nodeName, 'ALL_NON_MAIN').length > 0) {
|
||||
return getTreeNodeData(
|
||||
nodeName,
|
||||
workflow,
|
||||
createAiData(nodeName, workflow, (node) => runData[node] ?? []),
|
||||
undefined,
|
||||
);
|
||||
}
|
||||
|
||||
return getTreeNodeData(
|
||||
nodeName,
|
||||
workflow,
|
||||
[
|
||||
{
|
||||
data: getReferencedData(task, false, true)[0],
|
||||
node: nodeName,
|
||||
runIndex,
|
||||
},
|
||||
],
|
||||
runIndex,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -569,8 +569,7 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
||||
void useSchemaPreviewStore().trackSchemaPreviewExecution(pushData);
|
||||
} else if (receivedData.type === 'nodeExecuteBefore') {
|
||||
// A node started to be executed. Set it as executing.
|
||||
const pushData = receivedData.data;
|
||||
workflowsStore.addExecutingNode(pushData.nodeName);
|
||||
workflowsStore.setNodeExecuting(receivedData.data);
|
||||
} else if (receivedData.type === 'testWebhookDeleted') {
|
||||
// A test-webhook was deleted
|
||||
const pushData = receivedData.data;
|
||||
|
||||
@@ -1387,6 +1387,28 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
return testUrl;
|
||||
}
|
||||
|
||||
function setNodeExecuting(pushData: PushPayload<'nodeExecuteBefore'>): void {
|
||||
addExecutingNode(pushData.nodeName);
|
||||
|
||||
if (settingsStore.isNewLogsEnabled) {
|
||||
const node = getNodeByName(pushData.nodeName);
|
||||
|
||||
if (!node || !workflowExecutionData.value?.data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (workflowExecutionData.value.data.resultData.runData[pushData.nodeName] === undefined) {
|
||||
workflowExecutionData.value.data.resultData.runData[pushData.nodeName] = [];
|
||||
}
|
||||
|
||||
workflowExecutionData.value.data.resultData.runData[pushData.nodeName].push({
|
||||
executionStatus: 'running',
|
||||
executionTime: 0,
|
||||
...pushData.data,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateNodeExecutionData(pushData: PushPayload<'nodeExecuteAfter'>): void {
|
||||
if (!workflowExecutionData.value?.data) {
|
||||
throw new Error('The "workflowExecutionData" is not initialized!');
|
||||
@@ -1424,7 +1446,9 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
openFormPopupWindow(testUrl);
|
||||
}
|
||||
} else {
|
||||
if (tasksData.length && tasksData[tasksData.length - 1].executionStatus === 'waiting') {
|
||||
const status = tasksData[tasksData.length - 1]?.executionStatus ?? 'unknown';
|
||||
|
||||
if ('waiting' === status || (settingsStore.isNewLogsEnabled && 'running' === status)) {
|
||||
tasksData.splice(tasksData.length - 1, 1, data);
|
||||
} else {
|
||||
tasksData.push(data);
|
||||
@@ -1785,7 +1809,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
||||
makeNewWorkflowShareable,
|
||||
resetWorkflow,
|
||||
resetState,
|
||||
addExecutingNode,
|
||||
setNodeExecuting,
|
||||
removeExecutingNode,
|
||||
setWorkflowId,
|
||||
setUsedCredentials,
|
||||
|
||||
Reference in New Issue
Block a user