mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-19 11:01:15 +00:00
feat(editor): Make logs applicable for all nodes (#14397)
Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
describe('Logs', () => {
|
describe('Logs', () => {
|
||||||
// TODO: the test can be written without AI nodes once https://linear.app/n8n/issue/SUG-22 is implemented
|
// TODO: the test can be written without AI nodes once https://linear.app/n8n/issue/SUG-39 is implemented
|
||||||
it('should open NDV with the run index that corresponds to clicked log entry');
|
it('should open NDV with the run index that corresponds to clicked log entry');
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -33,8 +33,10 @@ const telemetry = useTelemetry();
|
|||||||
const { rootStyles, height, chatWidth, onWindowResize, onResizeDebounced, onResizeChatDebounced } =
|
const { rootStyles, height, chatWidth, onWindowResize, onResizeDebounced, onResizeChatDebounced } =
|
||||||
useResize(container);
|
useResize(container);
|
||||||
|
|
||||||
const { currentSessionId, messages, connectedNode, sendMessage, refreshSession, displayExecution } =
|
const { currentSessionId, messages, sendMessage, refreshSession, displayExecution } = useChatState(
|
||||||
useChatState(ref(false), onWindowResize);
|
ref(false),
|
||||||
|
onWindowResize,
|
||||||
|
);
|
||||||
const isLogDetailsOpen = computed(() => selectedLogEntry.value !== undefined);
|
const isLogDetailsOpen = computed(() => selectedLogEntry.value !== undefined);
|
||||||
|
|
||||||
const { canPopOut, isPoppedOut, pipWindow } = usePiPWindow({
|
const { canPopOut, isPoppedOut, pipWindow } = usePiPWindow({
|
||||||
@@ -134,7 +136,6 @@ watch([panelState, height], ([state, h]) => {
|
|||||||
<LogsOverviewPanel
|
<LogsOverviewPanel
|
||||||
:class="$style.logsOverview"
|
:class="$style.logsOverview"
|
||||||
:is-open="panelState !== LOGS_PANEL_STATE.CLOSED"
|
:is-open="panelState !== LOGS_PANEL_STATE.CLOSED"
|
||||||
:node="connectedNode"
|
|
||||||
:selected="selectedLogEntry"
|
:selected="selectedLogEntry"
|
||||||
@click-header="handleClickHeader"
|
@click-header="handleClickHeader"
|
||||||
@select="handleSelectLogEntry"
|
@select="handleSelectLogEntry"
|
||||||
|
|||||||
@@ -8,13 +8,11 @@ import { N8nButton, N8nRadioButtons, N8nText, N8nTooltip } from '@n8n/design-sys
|
|||||||
import { computed } from 'vue';
|
import { computed } from 'vue';
|
||||||
import { ElTree, type TreeNode as ElTreeNode } from 'element-plus';
|
import { ElTree, type TreeNode as ElTreeNode } from 'element-plus';
|
||||||
import {
|
import {
|
||||||
createAiData,
|
createLogEntries,
|
||||||
getSubtreeTotalConsumedTokens,
|
getSubtreeTotalConsumedTokens,
|
||||||
getTotalConsumedTokens,
|
getTotalConsumedTokens,
|
||||||
getTreeNodeData,
|
|
||||||
type TreeNode,
|
type TreeNode,
|
||||||
} from '@/components/RunDataAi/utils';
|
} from '@/components/RunDataAi/utils';
|
||||||
import { type INodeUi } from '@/Interface';
|
|
||||||
import { upperFirst } from 'lodash-es';
|
import { upperFirst } from 'lodash-es';
|
||||||
import { useTelemetry } from '@/composables/useTelemetry';
|
import { useTelemetry } from '@/composables/useTelemetry';
|
||||||
import ConsumedTokenCountText from '@/components/CanvasChat/future/components/ConsumedTokenCountText.vue';
|
import ConsumedTokenCountText from '@/components/CanvasChat/future/components/ConsumedTokenCountText.vue';
|
||||||
@@ -24,9 +22,8 @@ import { useRunWorkflow } from '@/composables/useRunWorkflow';
|
|||||||
import { useNDVStore } from '@/stores/ndv.store';
|
import { useNDVStore } from '@/stores/ndv.store';
|
||||||
import { useRouter } from 'vue-router';
|
import { useRouter } from 'vue-router';
|
||||||
|
|
||||||
const { node, isOpen, selected } = defineProps<{
|
const { isOpen, selected } = defineProps<{
|
||||||
isOpen: boolean;
|
isOpen: boolean;
|
||||||
node: INodeUi | null;
|
|
||||||
selected?: LogEntryIdentity;
|
selected?: LogEntryIdentity;
|
||||||
}>();
|
}>();
|
||||||
|
|
||||||
@@ -44,13 +41,10 @@ const nodeHelpers = useNodeHelpers();
|
|||||||
const isClearExecutionButtonVisible = useClearExecutionButtonVisible();
|
const isClearExecutionButtonVisible = useClearExecutionButtonVisible();
|
||||||
const workflow = computed(() => workflowsStore.getCurrentWorkflow());
|
const workflow = computed(() => workflowsStore.getCurrentWorkflow());
|
||||||
const executionTree = computed<TreeNode[]>(() =>
|
const executionTree = computed<TreeNode[]>(() =>
|
||||||
node
|
createLogEntries(
|
||||||
? getTreeNodeData(
|
workflow.value,
|
||||||
node.name,
|
workflowsStore.workflowExecutionData?.data?.resultData.runData ?? {},
|
||||||
workflow.value,
|
),
|
||||||
createAiData(node.name, workflow.value, workflowsStore.getWorkflowResultDataByNodeName),
|
|
||||||
)
|
|
||||||
: [],
|
|
||||||
);
|
);
|
||||||
const isEmpty = computed(() => workflowsStore.workflowExecutionData === null);
|
const isEmpty = computed(() => workflowsStore.workflowExecutionData === null);
|
||||||
const switchViewOptions = computed(() => [
|
const switchViewOptions = computed(() => [
|
||||||
@@ -272,6 +266,7 @@ async function handleTriggerPartialExecution(treeNode: TreeNode) {
|
|||||||
|
|
||||||
.switchViewButtons {
|
.switchViewButtons {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
|
z-index: 10; /* higher than log entry rows background */
|
||||||
right: 0;
|
right: 0;
|
||||||
top: 0;
|
top: 0;
|
||||||
margin: var(--spacing-2xs);
|
margin: var(--spacing-2xs);
|
||||||
|
|||||||
@@ -70,8 +70,12 @@ function isLastChild(level: number) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const siblings = parent?.children ?? [];
|
const siblings = parent?.children ?? [];
|
||||||
|
const lastSibling = siblings[siblings.length - 1];
|
||||||
|
|
||||||
return data === siblings[siblings.length - 1];
|
return (
|
||||||
|
(data === undefined && lastSibling === undefined) ||
|
||||||
|
(data?.node === lastSibling?.node && data?.runIndex === lastSibling?.runIndex)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,20 @@
|
|||||||
import { createTestNode, createTestWorkflowObject } from '@/__tests__/mocks';
|
import { createTestNode, createTestWorkflowObject } from '@/__tests__/mocks';
|
||||||
import { createAiData, getTreeNodeData } from '@/components/RunDataAi/utils';
|
import { createAiData, createLogEntries, getTreeNodeData } from '@/components/RunDataAi/utils';
|
||||||
import { type ITaskData, NodeConnectionTypes } from 'n8n-workflow';
|
import { type ITaskData, NodeConnectionTypes } from 'n8n-workflow';
|
||||||
|
|
||||||
describe(getTreeNodeData, () => {
|
function createTaskData(partialData: Partial<ITaskData>): ITaskData {
|
||||||
function createTaskData(partialData: Partial<ITaskData>): ITaskData {
|
return {
|
||||||
return {
|
startTime: 0,
|
||||||
startTime: 0,
|
executionIndex: 0,
|
||||||
executionIndex: 0,
|
executionTime: 1,
|
||||||
executionTime: 1,
|
source: [],
|
||||||
source: [],
|
executionStatus: 'success',
|
||||||
executionStatus: 'success',
|
data: { main: [[{ json: {} }]] },
|
||||||
data: { main: [[{ json: {} }]] },
|
...partialData,
|
||||||
...partialData,
|
};
|
||||||
};
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
describe(getTreeNodeData, () => {
|
||||||
it('should generate one node per execution', () => {
|
it('should generate one node per execution', () => {
|
||||||
const workflow = createTestWorkflowObject({
|
const workflow = createTestWorkflowObject({
|
||||||
nodes: [
|
nodes: [
|
||||||
@@ -101,7 +101,7 @@ describe(getTreeNodeData, () => {
|
|||||||
).toEqual([
|
).toEqual([
|
||||||
{
|
{
|
||||||
depth: 0,
|
depth: 0,
|
||||||
id: 'A',
|
id: 'A:0',
|
||||||
node: 'A',
|
node: 'A',
|
||||||
runIndex: 0,
|
runIndex: 0,
|
||||||
startTime: 0,
|
startTime: 0,
|
||||||
@@ -115,7 +115,7 @@ describe(getTreeNodeData, () => {
|
|||||||
children: [
|
children: [
|
||||||
{
|
{
|
||||||
depth: 1,
|
depth: 1,
|
||||||
id: 'B',
|
id: 'B:0',
|
||||||
node: 'B',
|
node: 'B',
|
||||||
runIndex: 0,
|
runIndex: 0,
|
||||||
startTime: Date.parse('2025-02-26T00:00:01.000Z'),
|
startTime: Date.parse('2025-02-26T00:00:01.000Z'),
|
||||||
@@ -130,7 +130,7 @@ describe(getTreeNodeData, () => {
|
|||||||
{
|
{
|
||||||
children: [],
|
children: [],
|
||||||
depth: 2,
|
depth: 2,
|
||||||
id: 'C',
|
id: 'C:0',
|
||||||
node: 'C',
|
node: 'C',
|
||||||
runIndex: 0,
|
runIndex: 0,
|
||||||
startTime: Date.parse('2025-02-26T00:00:02.000Z'),
|
startTime: Date.parse('2025-02-26T00:00:02.000Z'),
|
||||||
@@ -146,7 +146,7 @@ describe(getTreeNodeData, () => {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
depth: 1,
|
depth: 1,
|
||||||
id: 'B',
|
id: 'B:1',
|
||||||
node: 'B',
|
node: 'B',
|
||||||
runIndex: 1,
|
runIndex: 1,
|
||||||
startTime: Date.parse('2025-02-26T00:00:03.000Z'),
|
startTime: Date.parse('2025-02-26T00:00:03.000Z'),
|
||||||
@@ -161,7 +161,7 @@ describe(getTreeNodeData, () => {
|
|||||||
{
|
{
|
||||||
children: [],
|
children: [],
|
||||||
depth: 2,
|
depth: 2,
|
||||||
id: 'C',
|
id: 'C:1',
|
||||||
node: 'C',
|
node: 'C',
|
||||||
runIndex: 1,
|
runIndex: 1,
|
||||||
startTime: Date.parse('2025-02-26T00:00:04.000Z'),
|
startTime: Date.parse('2025-02-26T00:00:04.000Z'),
|
||||||
@@ -180,3 +180,82 @@ describe(getTreeNodeData, () => {
|
|||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe(createLogEntries, () => {
|
||||||
|
it('should return root node log entries in ascending order of executionIndex', () => {
|
||||||
|
const workflow = createTestWorkflowObject({
|
||||||
|
nodes: [
|
||||||
|
createTestNode({ name: 'A' }),
|
||||||
|
createTestNode({ name: 'B' }),
|
||||||
|
createTestNode({ name: 'C' }),
|
||||||
|
],
|
||||||
|
connections: {
|
||||||
|
B: { main: [[{ node: 'A', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||||
|
C: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(
|
||||||
|
createLogEntries(workflow, {
|
||||||
|
A: [
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:00.000Z'), executionIndex: 0 }),
|
||||||
|
],
|
||||||
|
B: [
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:01.000Z'), executionIndex: 1 }),
|
||||||
|
],
|
||||||
|
C: [
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:02.000Z'), executionIndex: 3 }),
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:03.000Z'), executionIndex: 2 }),
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
).toEqual([
|
||||||
|
expect.objectContaining({ node: 'A', runIndex: 0 }),
|
||||||
|
expect.objectContaining({ node: 'B', runIndex: 0 }),
|
||||||
|
expect.objectContaining({ node: 'C', runIndex: 1 }),
|
||||||
|
expect.objectContaining({ node: 'C', runIndex: 0 }),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return sub node log entries in ascending order of executionIndex', () => {
|
||||||
|
const workflow = createTestWorkflowObject({
|
||||||
|
nodes: [
|
||||||
|
createTestNode({ name: 'A' }),
|
||||||
|
createTestNode({ name: 'B' }),
|
||||||
|
createTestNode({ name: 'C' }),
|
||||||
|
],
|
||||||
|
connections: {
|
||||||
|
A: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||||
|
C: {
|
||||||
|
[NodeConnectionTypes.AiLanguageModel]: [
|
||||||
|
[{ node: 'B', type: NodeConnectionTypes.AiLanguageModel, index: 0 }],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(
|
||||||
|
createLogEntries(workflow, {
|
||||||
|
A: [
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:00.000Z'), executionIndex: 0 }),
|
||||||
|
],
|
||||||
|
B: [
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:01.000Z'), executionIndex: 1 }),
|
||||||
|
],
|
||||||
|
C: [
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:02.000Z'), executionIndex: 3 }),
|
||||||
|
createTaskData({ startTime: Date.parse('2025-04-04T00:00:03.000Z'), executionIndex: 2 }),
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
).toEqual([
|
||||||
|
expect.objectContaining({ node: 'A', runIndex: 0 }),
|
||||||
|
expect.objectContaining({
|
||||||
|
node: 'B',
|
||||||
|
runIndex: 0,
|
||||||
|
children: [
|
||||||
|
expect.objectContaining({ node: 'C', runIndex: 1 }),
|
||||||
|
expect.objectContaining({ node: 'C', runIndex: 0 }),
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { type LlmTokenUsageData, type IAiDataContent } from '@/Interface';
|
import { type LlmTokenUsageData, type IAiDataContent } from '@/Interface';
|
||||||
import {
|
import {
|
||||||
|
type IRunData,
|
||||||
type INodeExecutionData,
|
type INodeExecutionData,
|
||||||
type ITaskData,
|
type ITaskData,
|
||||||
type ITaskDataConnections,
|
type ITaskDataConnections,
|
||||||
@@ -28,16 +29,17 @@ function createNode(
|
|||||||
parent: TreeNode | undefined,
|
parent: TreeNode | undefined,
|
||||||
nodeName: string,
|
nodeName: string,
|
||||||
currentDepth: number,
|
currentDepth: number,
|
||||||
|
runIndex: number,
|
||||||
r?: AIResult,
|
r?: AIResult,
|
||||||
children: TreeNode[] = [],
|
children: TreeNode[] = [],
|
||||||
): TreeNode {
|
): TreeNode {
|
||||||
return {
|
return {
|
||||||
parent,
|
parent,
|
||||||
node: nodeName,
|
node: nodeName,
|
||||||
id: nodeName,
|
id: `${nodeName}:${runIndex}`,
|
||||||
depth: currentDepth,
|
depth: currentDepth,
|
||||||
startTime: r?.data?.metadata?.startTime ?? 0,
|
startTime: r?.data?.metadata?.startTime ?? 0,
|
||||||
runIndex: r?.runIndex ?? 0,
|
runIndex,
|
||||||
children,
|
children,
|
||||||
consumedTokens: getConsumedTokens(r?.data),
|
consumedTokens: getConsumedTokens(r?.data),
|
||||||
};
|
};
|
||||||
@@ -47,8 +49,9 @@ export function getTreeNodeData(
|
|||||||
nodeName: string,
|
nodeName: string,
|
||||||
workflow: Workflow,
|
workflow: Workflow,
|
||||||
aiData: AIResult[] | undefined,
|
aiData: AIResult[] | undefined,
|
||||||
|
runIndex?: number,
|
||||||
): TreeNode[] {
|
): TreeNode[] {
|
||||||
return getTreeNodeDataRec(undefined, nodeName, 0, workflow, aiData, undefined);
|
return getTreeNodeDataRec(undefined, nodeName, 0, workflow, aiData, runIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getTreeNodeDataRec(
|
function getTreeNodeDataRec(
|
||||||
@@ -66,32 +69,27 @@ function getTreeNodeDataRec(
|
|||||||
) ?? [];
|
) ?? [];
|
||||||
|
|
||||||
if (!connections) {
|
if (!connections) {
|
||||||
return resultData.map((d) => createNode(parent, nodeName, currentDepth, d));
|
return resultData.map((d) => createNode(parent, nodeName, currentDepth, d.runIndex, d));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the first level of children
|
// Get the first level of children
|
||||||
const connectedSubNodes = workflow.getParentNodes(nodeName, 'ALL_NON_MAIN', 1);
|
const connectedSubNodes = workflow.getParentNodes(nodeName, 'ALL_NON_MAIN', 1);
|
||||||
|
|
||||||
const treeNode = createNode(parent, nodeName, currentDepth);
|
const treeNode = createNode(parent, nodeName, currentDepth, runIndex ?? 0);
|
||||||
const children = connectedSubNodes.flatMap((name) => {
|
|
||||||
// Only include sub-nodes which have data
|
|
||||||
return (
|
|
||||||
aiData
|
|
||||||
?.filter(
|
|
||||||
(data) => data.node === name && (runIndex === undefined || data.runIndex === runIndex),
|
|
||||||
)
|
|
||||||
.flatMap((data) =>
|
|
||||||
getTreeNodeDataRec(treeNode, name, currentDepth + 1, workflow, aiData, data.runIndex),
|
|
||||||
) ?? []
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
children.sort((a, b) => a.startTime - b.startTime);
|
// Only include sub-nodes which have data
|
||||||
|
const children = (aiData ?? []).flatMap((data) =>
|
||||||
|
connectedSubNodes.includes(data.node) && (runIndex === undefined || data.runIndex === runIndex)
|
||||||
|
? getTreeNodeDataRec(treeNode, data.node, currentDepth + 1, workflow, aiData, data.runIndex)
|
||||||
|
: [],
|
||||||
|
);
|
||||||
|
|
||||||
treeNode.children = children;
|
treeNode.children = children;
|
||||||
|
|
||||||
if (resultData.length) {
|
if (resultData.length) {
|
||||||
return resultData.map((r) => createNode(parent, nodeName, currentDepth, r, children));
|
return resultData.map((r) =>
|
||||||
|
createNode(parent, nodeName, currentDepth, r.runIndex, r, children),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return [treeNode];
|
return [treeNode];
|
||||||
@@ -102,31 +100,27 @@ export function createAiData(
|
|||||||
workflow: Workflow,
|
workflow: Workflow,
|
||||||
getWorkflowResultDataByNodeName: (nodeName: string) => ITaskData[] | null,
|
getWorkflowResultDataByNodeName: (nodeName: string) => ITaskData[] | null,
|
||||||
): AIResult[] {
|
): AIResult[] {
|
||||||
const result: AIResult[] = [];
|
return workflow
|
||||||
const connectedSubNodes = workflow.getParentNodes(nodeName, 'ALL_NON_MAIN');
|
.getParentNodes(nodeName, 'ALL_NON_MAIN')
|
||||||
|
.flatMap((node) =>
|
||||||
|
(getWorkflowResultDataByNodeName(node) ?? []).map((task, index) => ({ node, task, index })),
|
||||||
|
)
|
||||||
|
.sort((a, b) => {
|
||||||
|
// Sort the data by execution index or start time
|
||||||
|
if (a.task.executionIndex !== undefined && b.task.executionIndex !== undefined) {
|
||||||
|
return a.task.executionIndex - b.task.executionIndex;
|
||||||
|
}
|
||||||
|
|
||||||
connectedSubNodes.forEach((node) => {
|
const aTime = a.task.startTime ?? 0;
|
||||||
const nodeRunData = getWorkflowResultDataByNodeName(node) ?? [];
|
const bTime = b.task.startTime ?? 0;
|
||||||
|
|
||||||
nodeRunData.forEach((run, runIndex) => {
|
return aTime - bTime;
|
||||||
const referenceData = {
|
})
|
||||||
data: getReferencedData(run, false, true)[0],
|
.map(({ node, task, index }) => ({
|
||||||
node,
|
data: getReferencedData(task, false, true)[0],
|
||||||
runIndex,
|
node,
|
||||||
};
|
runIndex: index,
|
||||||
|
}));
|
||||||
result.push(referenceData);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Sort the data by start time
|
|
||||||
result.sort((a, b) => {
|
|
||||||
const aTime = a.data?.metadata?.startTime ?? 0;
|
|
||||||
const bTime = b.data?.metadata?.startTime ?? 0;
|
|
||||||
return aTime - bTime;
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getReferencedData(
|
export function getReferencedData(
|
||||||
@@ -231,3 +225,44 @@ export function formatTokenUsageCount(
|
|||||||
|
|
||||||
return usage.isEstimate ? `~${count}` : count.toLocaleString();
|
return usage.isEstimate ? `~${count}` : count.toLocaleString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function createLogEntries(workflow: Workflow, runData: IRunData) {
|
||||||
|
const runs = Object.entries(runData)
|
||||||
|
.filter(([nodeName]) => workflow.getChildNodes(nodeName, 'ALL_NON_MAIN').length === 0)
|
||||||
|
.flatMap(([nodeName, taskData]) =>
|
||||||
|
taskData.map((task, runIndex) => ({ nodeName, task, runIndex })),
|
||||||
|
)
|
||||||
|
.sort((a, b) => {
|
||||||
|
if (a.task.executionIndex !== undefined && b.task.executionIndex !== undefined) {
|
||||||
|
return a.task.executionIndex - b.task.executionIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
return a.nodeName === b.nodeName
|
||||||
|
? a.runIndex - b.runIndex
|
||||||
|
: a.task.startTime - b.task.startTime;
|
||||||
|
});
|
||||||
|
|
||||||
|
return runs.flatMap(({ nodeName, runIndex, task }) => {
|
||||||
|
if (workflow.getParentNodes(nodeName, 'ALL_NON_MAIN').length > 0) {
|
||||||
|
return getTreeNodeData(
|
||||||
|
nodeName,
|
||||||
|
workflow,
|
||||||
|
createAiData(nodeName, workflow, (node) => runData[node] ?? []),
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return getTreeNodeData(
|
||||||
|
nodeName,
|
||||||
|
workflow,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
data: getReferencedData(task, false, true)[0],
|
||||||
|
node: nodeName,
|
||||||
|
runIndex,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
runIndex,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@@ -569,8 +569,7 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||||||
void useSchemaPreviewStore().trackSchemaPreviewExecution(pushData);
|
void useSchemaPreviewStore().trackSchemaPreviewExecution(pushData);
|
||||||
} else if (receivedData.type === 'nodeExecuteBefore') {
|
} else if (receivedData.type === 'nodeExecuteBefore') {
|
||||||
// A node started to be executed. Set it as executing.
|
// A node started to be executed. Set it as executing.
|
||||||
const pushData = receivedData.data;
|
workflowsStore.setNodeExecuting(receivedData.data);
|
||||||
workflowsStore.addExecutingNode(pushData.nodeName);
|
|
||||||
} else if (receivedData.type === 'testWebhookDeleted') {
|
} else if (receivedData.type === 'testWebhookDeleted') {
|
||||||
// A test-webhook was deleted
|
// A test-webhook was deleted
|
||||||
const pushData = receivedData.data;
|
const pushData = receivedData.data;
|
||||||
|
|||||||
@@ -1387,6 +1387,28 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||||||
return testUrl;
|
return testUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function setNodeExecuting(pushData: PushPayload<'nodeExecuteBefore'>): void {
|
||||||
|
addExecutingNode(pushData.nodeName);
|
||||||
|
|
||||||
|
if (settingsStore.isNewLogsEnabled) {
|
||||||
|
const node = getNodeByName(pushData.nodeName);
|
||||||
|
|
||||||
|
if (!node || !workflowExecutionData.value?.data) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowExecutionData.value.data.resultData.runData[pushData.nodeName] === undefined) {
|
||||||
|
workflowExecutionData.value.data.resultData.runData[pushData.nodeName] = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
workflowExecutionData.value.data.resultData.runData[pushData.nodeName].push({
|
||||||
|
executionStatus: 'running',
|
||||||
|
executionTime: 0,
|
||||||
|
...pushData.data,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function updateNodeExecutionData(pushData: PushPayload<'nodeExecuteAfter'>): void {
|
function updateNodeExecutionData(pushData: PushPayload<'nodeExecuteAfter'>): void {
|
||||||
if (!workflowExecutionData.value?.data) {
|
if (!workflowExecutionData.value?.data) {
|
||||||
throw new Error('The "workflowExecutionData" is not initialized!');
|
throw new Error('The "workflowExecutionData" is not initialized!');
|
||||||
@@ -1424,7 +1446,9 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||||||
openFormPopupWindow(testUrl);
|
openFormPopupWindow(testUrl);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (tasksData.length && tasksData[tasksData.length - 1].executionStatus === 'waiting') {
|
const status = tasksData[tasksData.length - 1]?.executionStatus ?? 'unknown';
|
||||||
|
|
||||||
|
if ('waiting' === status || (settingsStore.isNewLogsEnabled && 'running' === status)) {
|
||||||
tasksData.splice(tasksData.length - 1, 1, data);
|
tasksData.splice(tasksData.length - 1, 1, data);
|
||||||
} else {
|
} else {
|
||||||
tasksData.push(data);
|
tasksData.push(data);
|
||||||
@@ -1785,7 +1809,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||||||
makeNewWorkflowShareable,
|
makeNewWorkflowShareable,
|
||||||
resetWorkflow,
|
resetWorkflow,
|
||||||
resetState,
|
resetState,
|
||||||
addExecutingNode,
|
setNodeExecuting,
|
||||||
removeExecutingNode,
|
removeExecutingNode,
|
||||||
setWorkflowId,
|
setWorkflowId,
|
||||||
setUsedCredentials,
|
setUsedCredentials,
|
||||||
|
|||||||
Reference in New Issue
Block a user