mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 17:46:45 +00:00
feat(editor): Show sub workflow runs in the log view (#15163)
This commit is contained in:
@@ -256,6 +256,8 @@ export function createTestTaskData(partialData: Partial<ITaskData> = {}): ITaskD
|
||||
}
|
||||
|
||||
export function createTestLogEntry(data: Partial<LogEntry> = {}): LogEntry {
|
||||
const executionId = data.executionId ?? 'test-execution-id';
|
||||
|
||||
return {
|
||||
node: createTestNode(),
|
||||
runIndex: 0,
|
||||
@@ -264,6 +266,9 @@ export function createTestLogEntry(data: Partial<LogEntry> = {}): LogEntry {
|
||||
children: [],
|
||||
consumedTokens: { completionTokens: 0, totalTokens: 0, promptTokens: 0, isEstimate: false },
|
||||
depth: 0,
|
||||
workflow: createTestWorkflowObject(),
|
||||
executionId,
|
||||
execution: createTestWorkflowExecutionResponse({ id: executionId }).data!,
|
||||
...data,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { createTestNode, createTestWorkflow, mockNodeTypeDescription } from '@/__tests__/mocks';
|
||||
import type { LogTreeCreationContext } from '@/components/RunDataAi/utils';
|
||||
import {
|
||||
AGENT_NODE_TYPE,
|
||||
AI_CATEGORY_AGENTS,
|
||||
@@ -7,7 +8,26 @@ import {
|
||||
MANUAL_TRIGGER_NODE_TYPE,
|
||||
} from '@/constants';
|
||||
import { type IExecutionResponse } from '@/Interface';
|
||||
import { WorkflowOperationError } from 'n8n-workflow';
|
||||
import { WorkflowOperationError, type IRunData, type Workflow } from 'n8n-workflow';
|
||||
|
||||
export function createTestLogTreeCreationContext(
|
||||
workflow: Workflow,
|
||||
runData: IRunData,
|
||||
): LogTreeCreationContext {
|
||||
return {
|
||||
parent: undefined,
|
||||
workflow,
|
||||
workflows: {},
|
||||
subWorkflowData: {},
|
||||
executionId: 'test-execution-id',
|
||||
depth: 0,
|
||||
data: {
|
||||
resultData: {
|
||||
runData,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const nodeTypes = [
|
||||
mockNodeTypeDescription({
|
||||
|
||||
@@ -19,6 +19,7 @@ import { useNodeTypesStore } from '@/stores/nodeTypes.store';
|
||||
import { LOGS_PANEL_STATE } from '../types/logs';
|
||||
import { IN_PROGRESS_EXECUTION_ID } from '@/constants';
|
||||
import { useCanvasOperations } from '@/composables/useCanvasOperations';
|
||||
import { createTestTaskData } from '@/__tests__/mocks';
|
||||
|
||||
describe('LogsPanel', () => {
|
||||
const VIEWPORT_HEIGHT = 800;
|
||||
@@ -211,7 +212,9 @@ describe('LogsPanel', () => {
|
||||
finished: false,
|
||||
startedAt: new Date('2025-04-20T12:34:50.000Z'),
|
||||
stoppedAt: undefined,
|
||||
data: { resultData: { runData: {} } },
|
||||
data: {
|
||||
resultData: { runData: { Chat: [createTestTaskData()] } },
|
||||
},
|
||||
});
|
||||
|
||||
const rendered = render();
|
||||
@@ -227,10 +230,15 @@ describe('LogsPanel', () => {
|
||||
data: { executionIndex: 0, startTime: Date.parse('2025-04-20T12:34:51.000Z'), source: [] },
|
||||
});
|
||||
|
||||
const treeItem = within(await rendered.findByRole('treeitem'));
|
||||
const lastTreeItem = await waitFor(() => {
|
||||
const items = rendered.getAllByRole('treeitem');
|
||||
|
||||
expect(treeItem.getByText('AI Agent')).toBeInTheDocument();
|
||||
expect(treeItem.getByText('Running')).toBeInTheDocument();
|
||||
expect(items).toHaveLength(2);
|
||||
return within(items[1]);
|
||||
});
|
||||
|
||||
expect(lastTreeItem.getByText('AI Agent')).toBeInTheDocument();
|
||||
expect(lastTreeItem.getByText('Running')).toBeInTheDocument();
|
||||
|
||||
workflowsStore.updateNodeExecutionData({
|
||||
nodeName: 'AI Agent',
|
||||
@@ -243,11 +251,11 @@ describe('LogsPanel', () => {
|
||||
executionStatus: 'success',
|
||||
},
|
||||
});
|
||||
expect(await treeItem.findByText('AI Agent')).toBeInTheDocument();
|
||||
expect(treeItem.getByText('Success in 33ms')).toBeInTheDocument();
|
||||
expect(await lastTreeItem.findByText('AI Agent')).toBeInTheDocument();
|
||||
expect(lastTreeItem.getByText('Success in 33ms')).toBeInTheDocument();
|
||||
|
||||
workflowsStore.setWorkflowExecutionData({
|
||||
...aiChatExecutionResponse,
|
||||
...workflowsStore.workflowExecutionData!,
|
||||
id: '1234',
|
||||
status: 'success',
|
||||
finished: true,
|
||||
|
||||
@@ -47,11 +47,12 @@ const {
|
||||
displayExecution,
|
||||
} = useChatState(props.isReadOnly);
|
||||
|
||||
const { workflow, execution, hasChat, latestNodeNameById, resetExecutionData } = useExecutionData();
|
||||
const { entries, execution, hasChat, latestNodeNameById, resetExecutionData, loadSubExecution } =
|
||||
useExecutionData();
|
||||
|
||||
const manualLogEntrySelection = ref<LogEntrySelection>({ type: 'initial' });
|
||||
const selectedLogEntry = computed(() =>
|
||||
findSelectedLogEntry(manualLogEntrySelection.value, execution.value),
|
||||
findSelectedLogEntry(manualLogEntrySelection.value as LogEntrySelection, entries.value),
|
||||
);
|
||||
const isLogDetailsOpen = computed(() => isOpen.value && selectedLogEntry.value !== undefined);
|
||||
const isLogDetailsVisuallyOpen = computed(
|
||||
@@ -66,16 +67,8 @@ const logsPanelActionsProps = computed<InstanceType<typeof LogsPanelActions>['$p
|
||||
}));
|
||||
|
||||
function handleSelectLogEntry(selected: LogEntry | undefined) {
|
||||
const workflowId = execution.value?.workflowData.id;
|
||||
|
||||
if (!workflowId) {
|
||||
return;
|
||||
}
|
||||
|
||||
manualLogEntrySelection.value =
|
||||
selected === undefined
|
||||
? { type: 'none', workflowId }
|
||||
: { type: 'selected', workflowId, data: selected };
|
||||
selected === undefined ? { type: 'none' } : { type: 'selected', id: selected.id };
|
||||
}
|
||||
|
||||
function handleResizeOverviewPanelEnd() {
|
||||
@@ -145,15 +138,17 @@ function handleResizeOverviewPanelEnd() {
|
||||
:is-read-only="isReadOnly"
|
||||
:is-compact="isLogDetailsVisuallyOpen"
|
||||
:selected="selectedLogEntry"
|
||||
:entries="entries"
|
||||
:execution="execution"
|
||||
:scroll-to-selection="
|
||||
manualLogEntrySelection.type !== 'selected' ||
|
||||
manualLogEntrySelection.data.id !== selectedLogEntry?.id
|
||||
manualLogEntrySelection.id !== selectedLogEntry?.id
|
||||
"
|
||||
:latest-node-info="latestNodeNameById"
|
||||
@click-header="onToggleOpen(true)"
|
||||
@select="handleSelectLogEntry"
|
||||
@clear-execution-data="resetExecutionData"
|
||||
@load-sub-execution="loadSubExecution"
|
||||
>
|
||||
<template #actions>
|
||||
<LogsPanelActions
|
||||
@@ -164,14 +159,12 @@ function handleResizeOverviewPanelEnd() {
|
||||
</LogsOverviewPanel>
|
||||
</N8nResizeWrapper>
|
||||
<LogsDetailsPanel
|
||||
v-if="isLogDetailsVisuallyOpen && selectedLogEntry && workflow && execution"
|
||||
v-if="isLogDetailsVisuallyOpen && selectedLogEntry"
|
||||
:class="$style.logDetails"
|
||||
:is-open="isOpen"
|
||||
:log-entry="selectedLogEntry"
|
||||
:workflow="workflow"
|
||||
:execution="execution"
|
||||
:window="pipWindow"
|
||||
:latest-info="latestNodeNameById[selectedLogEntry.node.id]"
|
||||
:latest-info="latestNodeNameById[selectedLogEntry.id]"
|
||||
@click-header="onToggleOpen(true)"
|
||||
>
|
||||
<template #actions>
|
||||
|
||||
@@ -9,12 +9,12 @@ import {
|
||||
createTestNode,
|
||||
createTestTaskData,
|
||||
createTestWorkflow,
|
||||
createTestWorkflowExecutionResponse,
|
||||
createTestWorkflowObject,
|
||||
} from '@/__tests__/mocks';
|
||||
import { mockedStore } from '@/__tests__/utils';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
import { type FrontendSettings } from '@n8n/api-types';
|
||||
import type { LogEntry } from '@/components/RunDataAi/utils';
|
||||
|
||||
describe('LogDetailsPanel', () => {
|
||||
let pinia: TestingPinia;
|
||||
@@ -37,26 +37,24 @@ describe('LogDetailsPanel', () => {
|
||||
source: [{ previousNode: 'Chat Trigger' }],
|
||||
});
|
||||
|
||||
function render(props: Partial<InstanceType<typeof LogDetailsPanel>['$props']>) {
|
||||
const mergedProps: InstanceType<typeof LogDetailsPanel>['$props'] = {
|
||||
...props,
|
||||
logEntry: props.logEntry ?? createTestLogEntry(),
|
||||
workflow: props.workflow ?? createTestWorkflowObject(workflowData),
|
||||
execution:
|
||||
props.execution ??
|
||||
createTestWorkflowExecutionResponse({
|
||||
workflowData,
|
||||
data: {
|
||||
resultData: {
|
||||
runData: { 'Chat Trigger': [chatNodeRunData], 'AI Agent': [aiNodeRunData] },
|
||||
},
|
||||
function createLogEntry(data: Partial<LogEntry> = {}) {
|
||||
return createTestLogEntry({
|
||||
workflow: createTestWorkflowObject(workflowData),
|
||||
execution: {
|
||||
resultData: {
|
||||
runData: {
|
||||
'Chat Trigger': [chatNodeRunData],
|
||||
'AI Agent': [aiNodeRunData],
|
||||
},
|
||||
}),
|
||||
isOpen: props.isOpen ?? true,
|
||||
};
|
||||
},
|
||||
},
|
||||
...data,
|
||||
});
|
||||
}
|
||||
|
||||
function render(props: InstanceType<typeof LogDetailsPanel>['$props']) {
|
||||
const rendered = renderComponent(LogDetailsPanel, {
|
||||
props: mergedProps,
|
||||
props,
|
||||
global: {
|
||||
plugins: [
|
||||
createRouter({
|
||||
@@ -97,11 +95,7 @@ describe('LogDetailsPanel', () => {
|
||||
|
||||
const rendered = render({
|
||||
isOpen: true,
|
||||
logEntry: createTestLogEntry({
|
||||
node: aiNode,
|
||||
runIndex: 0,
|
||||
runData: aiNodeRunData,
|
||||
}),
|
||||
logEntry: createLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
});
|
||||
|
||||
const header = within(rendered.getByTestId('log-details-header'));
|
||||
@@ -117,7 +111,7 @@ describe('LogDetailsPanel', () => {
|
||||
it('should toggle input and output panel when the button is clicked', async () => {
|
||||
const rendered = render({
|
||||
isOpen: true,
|
||||
logEntry: createTestLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
logEntry: createLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
});
|
||||
|
||||
const header = within(rendered.getByTestId('log-details-header'));
|
||||
@@ -141,7 +135,7 @@ describe('LogDetailsPanel', () => {
|
||||
|
||||
const rendered = render({
|
||||
isOpen: true,
|
||||
logEntry: createTestLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
logEntry: createLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
});
|
||||
|
||||
await fireEvent.mouseDown(rendered.getByTestId('resize-handle'));
|
||||
@@ -160,7 +154,7 @@ describe('LogDetailsPanel', () => {
|
||||
|
||||
const rendered = render({
|
||||
isOpen: true,
|
||||
logEntry: createTestLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
logEntry: createLogEntry({ node: aiNode, runIndex: 0, runData: aiNodeRunData }),
|
||||
});
|
||||
|
||||
await fireEvent.mouseDown(rendered.getByTestId('resize-handle'));
|
||||
|
||||
@@ -8,13 +8,11 @@ import NodeIcon from '@/components/NodeIcon.vue';
|
||||
import { useI18n } from '@/composables/useI18n';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
|
||||
import { type Workflow } from 'n8n-workflow';
|
||||
import { type IExecutionResponse } from '@/Interface';
|
||||
import NodeName from '@/components/CanvasChat/future/components/NodeName.vue';
|
||||
import {
|
||||
getSubtreeTotalConsumedTokens,
|
||||
type LatestNodeInfo,
|
||||
type LogEntry,
|
||||
type LatestNodeInfo,
|
||||
} from '@/components/RunDataAi/utils';
|
||||
import { N8nButton, N8nResizeWrapper } from '@n8n/design-system';
|
||||
import { useLocalStorage } from '@vueuse/core';
|
||||
@@ -22,11 +20,9 @@ import { computed, useTemplateRef } from 'vue';
|
||||
|
||||
const MIN_IO_PANEL_WIDTH = 200;
|
||||
|
||||
const { isOpen, logEntry, workflow, execution, window, latestInfo } = defineProps<{
|
||||
const { isOpen, logEntry, window, latestInfo } = defineProps<{
|
||||
isOpen: boolean;
|
||||
logEntry: LogEntry;
|
||||
workflow: Workflow;
|
||||
execution: IExecutionResponse;
|
||||
window?: Window;
|
||||
latestInfo?: LatestNodeInfo;
|
||||
}>();
|
||||
@@ -46,7 +42,7 @@ const content = useLocalStorage<LogDetailsContent>(
|
||||
);
|
||||
|
||||
const type = computed(() => nodeTypeStore.getNodeType(logEntry.node.type));
|
||||
const consumedTokens = computed(() => getSubtreeTotalConsumedTokens(logEntry));
|
||||
const consumedTokens = computed(() => getSubtreeTotalConsumedTokens(logEntry, false));
|
||||
const isTriggerNode = computed(() => type.value?.group.includes('trigger'));
|
||||
const container = useTemplateRef<HTMLElement>('container');
|
||||
const resizer = useResizablePanel('N8N_LOGS_INPUT_PANEL_WIDTH', {
|
||||
@@ -169,8 +165,6 @@ function handleResizeEnd() {
|
||||
pane-type="input"
|
||||
:title="locale.baseText('logs.details.header.actions.input')"
|
||||
:log-entry="logEntry"
|
||||
:workflow="workflow"
|
||||
:execution="execution"
|
||||
/>
|
||||
</N8nResizeWrapper>
|
||||
<RunDataView
|
||||
@@ -180,8 +174,6 @@ function handleResizeEnd() {
|
||||
:class="$style.outputPanel"
|
||||
:title="locale.baseText('logs.details.header.actions.output')"
|
||||
:log-entry="logEntry"
|
||||
:workflow="workflow"
|
||||
:execution="execution"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
import { usePushConnectionStore } from '@/stores/pushConnection.store';
|
||||
import { useNDVStore } from '@/stores/ndv.store';
|
||||
import { createTestWorkflowObject } from '@/__tests__/mocks';
|
||||
import { createLogEntries } from '@/components/RunDataAi/utils';
|
||||
import { createLogTree } from '@/components/RunDataAi/utils';
|
||||
|
||||
describe('LogsOverviewPanel', () => {
|
||||
let pinia: TestingPinia;
|
||||
@@ -30,14 +30,9 @@ describe('LogsOverviewPanel', () => {
|
||||
isReadOnly: false,
|
||||
isCompact: false,
|
||||
scrollToSelection: false,
|
||||
execution: {
|
||||
...aiChatExecutionResponse,
|
||||
tree: createLogEntries(
|
||||
createTestWorkflowObject(aiChatWorkflow),
|
||||
aiChatExecutionResponse.data?.resultData.runData ?? {},
|
||||
),
|
||||
},
|
||||
entries: createLogTree(createTestWorkflowObject(aiChatWorkflow), aiChatExecutionResponse),
|
||||
latestNodeInfo: {},
|
||||
execution: aiChatExecutionResponse,
|
||||
...props,
|
||||
};
|
||||
|
||||
@@ -75,7 +70,7 @@ describe('LogsOverviewPanel', () => {
|
||||
});
|
||||
|
||||
it('should render empty text if there is no execution', () => {
|
||||
const rendered = render({ isOpen: true, execution: undefined });
|
||||
const rendered = render({ isOpen: true, entries: [], execution: undefined });
|
||||
|
||||
expect(rendered.queryByTestId('logs-overview-empty')).toBeInTheDocument();
|
||||
});
|
||||
@@ -134,13 +129,7 @@ describe('LogsOverviewPanel', () => {
|
||||
|
||||
const rendered = render({
|
||||
isOpen: true,
|
||||
execution: {
|
||||
...aiManualExecutionResponse,
|
||||
tree: createLogEntries(
|
||||
createTestWorkflowObject(aiManualWorkflow),
|
||||
aiManualExecutionResponse.data?.resultData.runData ?? {},
|
||||
),
|
||||
},
|
||||
entries: createLogTree(createTestWorkflowObject(aiManualWorkflow), aiManualExecutionResponse),
|
||||
});
|
||||
const aiAgentRow = (await rendered.findAllByRole('treeitem'))[0];
|
||||
|
||||
|
||||
@@ -11,31 +11,44 @@ import { useNDVStore } from '@/stores/ndv.store';
|
||||
import { useRouter } from 'vue-router';
|
||||
import ExecutionSummary from '@/components/CanvasChat/future/components/ExecutionSummary.vue';
|
||||
import {
|
||||
type ExecutionLogViewData,
|
||||
getDefaultCollapsedEntries,
|
||||
flattenLogEntries,
|
||||
getSubtreeTotalConsumedTokens,
|
||||
getTotalConsumedTokens,
|
||||
hasSubExecution,
|
||||
type LatestNodeInfo,
|
||||
type LogEntry,
|
||||
getDepth,
|
||||
} from '@/components/RunDataAi/utils';
|
||||
import { useVirtualList } from '@vueuse/core';
|
||||
import { ndvEventBus } from '@/event-bus';
|
||||
import { type IExecutionResponse } from '@/Interface';
|
||||
|
||||
const { isOpen, isReadOnly, selected, isCompact, execution, latestNodeInfo, scrollToSelection } =
|
||||
defineProps<{
|
||||
isOpen: boolean;
|
||||
selected?: LogEntry;
|
||||
isReadOnly: boolean;
|
||||
isCompact: boolean;
|
||||
execution?: ExecutionLogViewData;
|
||||
latestNodeInfo: Record<string, LatestNodeInfo>;
|
||||
scrollToSelection: boolean;
|
||||
}>();
|
||||
const {
|
||||
isOpen,
|
||||
isReadOnly,
|
||||
selected,
|
||||
isCompact,
|
||||
execution,
|
||||
entries,
|
||||
latestNodeInfo,
|
||||
scrollToSelection,
|
||||
} = defineProps<{
|
||||
isOpen: boolean;
|
||||
selected?: LogEntry;
|
||||
isReadOnly: boolean;
|
||||
isCompact: boolean;
|
||||
entries: LogEntry[];
|
||||
execution?: IExecutionResponse;
|
||||
latestNodeInfo: Record<string, LatestNodeInfo>;
|
||||
scrollToSelection: boolean;
|
||||
}>();
|
||||
|
||||
const emit = defineEmits<{
|
||||
clickHeader: [];
|
||||
select: [LogEntry | undefined];
|
||||
clearExecutionData: [];
|
||||
loadSubExecution: [LogEntry];
|
||||
}>();
|
||||
|
||||
defineSlots<{ actions: {} }>();
|
||||
@@ -46,44 +59,62 @@ const router = useRouter();
|
||||
const runWorkflow = useRunWorkflow({ router });
|
||||
const ndvStore = useNDVStore();
|
||||
const isClearExecutionButtonVisible = useClearExecutionButtonVisible();
|
||||
const isEmpty = computed(() => execution === undefined);
|
||||
const isEmpty = computed(() => entries.length === 0 || execution === undefined);
|
||||
const switchViewOptions = computed(() => [
|
||||
{ label: locale.baseText('logs.overview.header.switch.overview'), value: 'overview' as const },
|
||||
{ label: locale.baseText('logs.overview.header.switch.details'), value: 'details' as const },
|
||||
]);
|
||||
const consumedTokens = computed(() =>
|
||||
getTotalConsumedTokens(...(execution?.tree ?? []).map(getSubtreeTotalConsumedTokens)),
|
||||
getTotalConsumedTokens(
|
||||
...entries.map((entry) =>
|
||||
getSubtreeTotalConsumedTokens(
|
||||
entry,
|
||||
false, // Exclude token usages from sub workflow which is loaded only after expanding the row
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
const collapsedEntries = ref<Record<string, boolean>>({});
|
||||
const flatLogEntries = computed(() =>
|
||||
flattenLogEntries(execution?.tree ?? [], collapsedEntries.value),
|
||||
const shouldShowTokenCountColumn = computed(
|
||||
() =>
|
||||
consumedTokens.value.totalTokens > 0 ||
|
||||
entries.some((entry) => getSubtreeTotalConsumedTokens(entry, true).totalTokens > 0),
|
||||
);
|
||||
const manuallyCollapsedEntries = ref<Record<string, boolean>>({});
|
||||
const collapsedEntries = computed(() => ({
|
||||
...getDefaultCollapsedEntries(entries),
|
||||
...manuallyCollapsedEntries.value,
|
||||
}));
|
||||
const flatLogEntries = computed(() => flattenLogEntries(entries, collapsedEntries.value));
|
||||
const virtualList = useVirtualList(flatLogEntries, { itemHeight: 32 });
|
||||
|
||||
function handleClickNode(clicked: LogEntry) {
|
||||
if (selected?.node === clicked.node && selected?.runIndex === clicked.runIndex) {
|
||||
if (selected?.id === clicked.id) {
|
||||
emit('select', undefined);
|
||||
return;
|
||||
}
|
||||
|
||||
emit('select', clicked);
|
||||
|
||||
telemetry.track('User selected node in log view', {
|
||||
node_type: clicked.node.type,
|
||||
node_id: clicked.node.id,
|
||||
execution_id: execution?.id,
|
||||
workflow_id: execution?.workflowData.id,
|
||||
subworkflow_depth: getDepth(clicked),
|
||||
});
|
||||
}
|
||||
|
||||
function handleSwitchView(value: 'overview' | 'details') {
|
||||
emit(
|
||||
'select',
|
||||
value === 'overview' || (execution?.tree ?? []).length === 0 ? undefined : execution?.tree[0],
|
||||
);
|
||||
emit('select', value === 'overview' || entries.length === 0 ? undefined : entries[0]);
|
||||
}
|
||||
|
||||
function handleToggleExpanded(treeNode: LogEntry) {
|
||||
collapsedEntries.value[treeNode.id] = !collapsedEntries.value[treeNode.id];
|
||||
async function handleToggleExpanded(treeNode: LogEntry) {
|
||||
if (hasSubExecution(treeNode) && treeNode.children.length === 0) {
|
||||
emit('loadSubExecution', treeNode);
|
||||
return;
|
||||
}
|
||||
|
||||
manuallyCollapsedEntries.value[treeNode.id] = !collapsedEntries.value[treeNode.id];
|
||||
}
|
||||
|
||||
async function handleOpenNdv(treeNode: LogEntry) {
|
||||
@@ -109,10 +140,10 @@ async function handleTriggerPartialExecution(treeNode: LogEntry) {
|
||||
|
||||
// Scroll selected row into view
|
||||
watch(
|
||||
() => (scrollToSelection ? selected : undefined),
|
||||
async (entry) => {
|
||||
if (entry) {
|
||||
const index = flatLogEntries.value.findIndex((e) => e.id === entry.id);
|
||||
() => (scrollToSelection ? selected?.id : undefined),
|
||||
async (selectedId) => {
|
||||
if (selectedId) {
|
||||
const index = flatLogEntries.value.findIndex((e) => e.id === selectedId);
|
||||
|
||||
if (index >= 0) {
|
||||
// Wait for the node to be added to the list, and then scroll
|
||||
@@ -155,7 +186,7 @@ watch(
|
||||
data-test-id="logs-overview-body"
|
||||
>
|
||||
<N8nText
|
||||
v-if="isEmpty"
|
||||
v-if="isEmpty || execution === undefined"
|
||||
tag="p"
|
||||
size="medium"
|
||||
color="text-base"
|
||||
@@ -166,7 +197,6 @@ watch(
|
||||
</N8nText>
|
||||
<template v-else>
|
||||
<ExecutionSummary
|
||||
v-if="execution"
|
||||
data-test-id="logs-overview-status"
|
||||
:class="$style.summary"
|
||||
:status="execution.status"
|
||||
@@ -184,13 +214,12 @@ watch(
|
||||
:key="index"
|
||||
:data="data"
|
||||
:is-read-only="isReadOnly"
|
||||
:is-selected="
|
||||
data.node.name === selected?.node.name && data.runIndex === selected?.runIndex
|
||||
"
|
||||
:is-selected="data.id === selected?.id"
|
||||
:is-compact="isCompact"
|
||||
:should-show-consumed-tokens="consumedTokens.totalTokens > 0"
|
||||
:should-show-token-count-column="shouldShowTokenCountColumn"
|
||||
:latest-info="latestNodeInfo[data.node.id]"
|
||||
:expanded="!collapsedEntries[data.id]"
|
||||
:can-open-ndv="data.executionId === execution?.id"
|
||||
@click.stop="handleClickNode(data)"
|
||||
@toggle-expanded="handleToggleExpanded"
|
||||
@open-ndv="handleOpenNdv"
|
||||
|
||||
@@ -18,10 +18,11 @@ const props = defineProps<{
|
||||
data: LogEntry;
|
||||
isSelected: boolean;
|
||||
isReadOnly: boolean;
|
||||
shouldShowConsumedTokens: boolean;
|
||||
shouldShowTokenCountColumn: boolean;
|
||||
isCompact: boolean;
|
||||
latestInfo?: LatestNodeInfo;
|
||||
expanded: boolean;
|
||||
canOpenNdv: boolean;
|
||||
}>();
|
||||
|
||||
const emit = defineEmits<{
|
||||
@@ -50,7 +51,11 @@ const startedAtText = computed(() => {
|
||||
});
|
||||
|
||||
const subtreeConsumedTokens = computed(() =>
|
||||
props.shouldShowConsumedTokens ? getSubtreeTotalConsumedTokens(props.data) : undefined,
|
||||
props.shouldShowTokenCountColumn ? getSubtreeTotalConsumedTokens(props.data, false) : undefined,
|
||||
);
|
||||
|
||||
const hasChildren = computed(
|
||||
() => props.data.children.length > 0 || !!props.data.runData.metadata?.subExecution,
|
||||
);
|
||||
|
||||
function isLastChild(level: number) {
|
||||
@@ -153,6 +158,10 @@ function isLastChild(level: number) {
|
||||
size="medium"
|
||||
icon="edit"
|
||||
style="color: var(--color-text-base)"
|
||||
:style="{
|
||||
visibility: props.canOpenNdv ? '' : 'hidden',
|
||||
color: 'var(--color-text-base)',
|
||||
}"
|
||||
:disabled="props.latestInfo?.deleted"
|
||||
:class="$style.openNdvButton"
|
||||
:aria-label="locale.baseText('logs.overview.body.open')"
|
||||
@@ -173,12 +182,12 @@ function isLastChild(level: number) {
|
||||
@click.stop="emit('triggerPartialExecution', props.data)"
|
||||
/>
|
||||
<N8nButton
|
||||
v-if="!isCompact || props.data.children.length > 0"
|
||||
v-if="!isCompact || hasChildren"
|
||||
type="secondary"
|
||||
size="small"
|
||||
:square="true"
|
||||
:style="{
|
||||
visibility: props.data.children.length === 0 ? 'hidden' : '',
|
||||
visibility: hasChildren ? '' : 'hidden',
|
||||
color: 'var(--color-text-base)', // give higher specificity than the style from the component itself
|
||||
}"
|
||||
:class="$style.toggleButton"
|
||||
|
||||
@@ -2,19 +2,16 @@
|
||||
import RunData from '@/components/RunData.vue';
|
||||
import { type LogEntry } from '@/components/RunDataAi/utils';
|
||||
import { useI18n } from '@/composables/useI18n';
|
||||
import { type IRunDataDisplayMode, type IExecutionResponse, type NodePanelType } from '@/Interface';
|
||||
import { type IRunDataDisplayMode, type NodePanelType } from '@/Interface';
|
||||
import { useNDVStore } from '@/stores/ndv.store';
|
||||
import { N8nLink, N8nText } from '@n8n/design-system';
|
||||
import { type Workflow } from 'n8n-workflow';
|
||||
import { computed, ref } from 'vue';
|
||||
import { I18nT } from 'vue-i18n';
|
||||
|
||||
const { title, logEntry, paneType, workflow, execution } = defineProps<{
|
||||
const { title, logEntry, paneType } = defineProps<{
|
||||
title: string;
|
||||
paneType: NodePanelType;
|
||||
logEntry: LogEntry;
|
||||
workflow: Workflow;
|
||||
execution: IExecutionResponse;
|
||||
}>();
|
||||
|
||||
const locale = useI18n();
|
||||
@@ -30,7 +27,7 @@ const runDataProps = computed<
|
||||
}
|
||||
|
||||
const source = logEntry.runData.source[0];
|
||||
const node = source && workflow.getNode(source.previousNode);
|
||||
const node = source && logEntry.workflow.getNode(source.previousNode);
|
||||
|
||||
if (!source || !node) {
|
||||
return undefined;
|
||||
@@ -59,8 +56,8 @@ function handleChangeDisplayMode(value: IRunDataDisplayMode) {
|
||||
<RunData
|
||||
v-if="runDataProps"
|
||||
v-bind="runDataProps"
|
||||
:workflow="workflow"
|
||||
:workflow-execution="execution"
|
||||
:workflow="logEntry.workflow"
|
||||
:workflow-execution="logEntry.execution"
|
||||
:too-much-data-title="locale.baseText('ndv.output.tooMuchData.title')"
|
||||
:no-data-in-branch-message="locale.baseText('ndv.output.noOutputDataInBranch')"
|
||||
:executing-message="locale.baseText('ndv.output.executing')"
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
import { setActivePinia } from 'pinia';
|
||||
import { useExecutionData } from './useExecutionData';
|
||||
import { waitFor } from '@testing-library/vue';
|
||||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { mockedStore } from '@/__tests__/utils';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
|
||||
import { nodeTypes } from '../../__test__/data';
|
||||
import {
|
||||
createTestNode,
|
||||
createTestTaskData,
|
||||
createTestWorkflow,
|
||||
createTestWorkflowExecutionResponse,
|
||||
} from '@/__tests__/mocks';
|
||||
import type { IRunExecutionData } from 'n8n-workflow';
|
||||
import { stringify } from 'flatted';
|
||||
import { useToast } from '@/composables/useToast';
|
||||
|
||||
vi.mock('@/composables/useToast');
|
||||
|
||||
describe(useExecutionData, () => {
|
||||
let workflowsStore: ReturnType<typeof mockedStore<typeof useWorkflowsStore>>;
|
||||
let nodeTypeStore: ReturnType<typeof mockedStore<typeof useNodeTypesStore>>;
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createTestingPinia({ stubActions: false }));
|
||||
|
||||
workflowsStore = mockedStore(useWorkflowsStore);
|
||||
|
||||
nodeTypeStore = mockedStore(useNodeTypesStore);
|
||||
nodeTypeStore.setNodeTypes(nodeTypes);
|
||||
});
|
||||
|
||||
describe('loadSubExecution', () => {
|
||||
beforeEach(() => {
|
||||
workflowsStore.setWorkflowExecutionData(
|
||||
createTestWorkflowExecutionResponse({
|
||||
id: 'e0',
|
||||
workflowData: createTestWorkflow({
|
||||
id: 'w0',
|
||||
nodes: [createTestNode({ name: 'A' }), createTestNode({ name: 'B' })],
|
||||
connections: {
|
||||
A: {
|
||||
main: [[{ type: 'main', node: 'B', index: 0 }]],
|
||||
},
|
||||
},
|
||||
}),
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
A: [createTestTaskData()],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
metadata: { subExecution: { workflowId: 'w1', executionId: 'e1' } },
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should add runs from sub execution to the entries', async () => {
|
||||
workflowsStore.fetchExecutionDataById.mockResolvedValueOnce(
|
||||
createTestWorkflowExecutionResponse({
|
||||
id: 'e1',
|
||||
data: stringify({
|
||||
resultData: { runData: { C: [createTestTaskData()] } },
|
||||
}) as unknown as IRunExecutionData, // Data is stringified in actual API response
|
||||
workflowData: createTestWorkflow({ id: 'w1', nodes: [createTestNode({ name: 'C' })] }),
|
||||
}),
|
||||
);
|
||||
|
||||
const { loadSubExecution, entries } = useExecutionData();
|
||||
|
||||
expect(entries.value).toHaveLength(2);
|
||||
expect(entries.value[1].children).toHaveLength(0);
|
||||
|
||||
await loadSubExecution(entries.value[1]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(entries.value).toHaveLength(2);
|
||||
expect(entries.value[1].children).toHaveLength(1);
|
||||
expect(entries.value[1].children[0].node.name).toBe('C');
|
||||
expect(entries.value[1].children[0].workflow.id).toBe('w1');
|
||||
expect(entries.value[1].children[0].executionId).toBe('e1');
|
||||
});
|
||||
});
|
||||
|
||||
it('should show toast when failed to fetch execution data for sub execution', async () => {
|
||||
const showErrorSpy = vi.fn();
|
||||
const useToastMock = vi.mocked(useToast);
|
||||
|
||||
useToastMock.mockReturnValue({ showError: showErrorSpy } as unknown as ReturnType<
|
||||
typeof useToastMock
|
||||
>);
|
||||
|
||||
workflowsStore.fetchWorkflow.mockResolvedValueOnce(createTestWorkflow());
|
||||
workflowsStore.fetchExecutionDataById.mockRejectedValueOnce(
|
||||
new Error('test execution fetch fail'),
|
||||
);
|
||||
|
||||
const { loadSubExecution, entries } = useExecutionData();
|
||||
|
||||
await loadSubExecution(entries.value[1]);
|
||||
await waitFor(() => expect(showErrorSpy).toHaveBeenCalled());
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,22 +1,27 @@
|
||||
import { watch, computed, ref } from 'vue';
|
||||
import { isChatNode } from '../../utils';
|
||||
import { type IExecutionResponse } from '@/Interface';
|
||||
import { Workflow } from 'n8n-workflow';
|
||||
import { Workflow, type IRunExecutionData } from 'n8n-workflow';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { useNodeHelpers } from '@/composables/useNodeHelpers';
|
||||
import { useThrottleFn } from '@vueuse/core';
|
||||
import {
|
||||
createLogEntries,
|
||||
createLogTree,
|
||||
deepToRaw,
|
||||
type ExecutionLogViewData,
|
||||
type LatestNodeInfo,
|
||||
type LogEntry,
|
||||
} from '@/components/RunDataAi/utils';
|
||||
import { parse } from 'flatted';
|
||||
import { useToast } from '@/composables/useToast';
|
||||
|
||||
export function useExecutionData() {
|
||||
const nodeHelpers = useNodeHelpers();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const toast = useToast();
|
||||
|
||||
const execData = ref<IExecutionResponse | undefined>();
|
||||
const subWorkflowExecData = ref<Record<string, IRunExecutionData>>({});
|
||||
const subWorkflows = ref<Record<string, Workflow>>({});
|
||||
|
||||
const workflow = computed(() =>
|
||||
execData.value
|
||||
@@ -46,17 +51,19 @@ export function useExecutionData() {
|
||||
nodes.some(isChatNode),
|
||||
),
|
||||
);
|
||||
const execution = computed<ExecutionLogViewData | undefined>(() => {
|
||||
if (!execData.value || !workflow.value) {
|
||||
return undefined;
|
||||
const entries = computed<LogEntry[]>(() => {
|
||||
if (!execData.value?.data || !workflow.value) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return {
|
||||
...execData.value,
|
||||
tree: createLogEntries(workflow.value, execData.value.data?.resultData.runData ?? {}),
|
||||
};
|
||||
return createLogTree(
|
||||
workflow.value,
|
||||
execData.value,
|
||||
subWorkflows.value,
|
||||
subWorkflowExecData.value,
|
||||
);
|
||||
});
|
||||
const updateInterval = computed(() => ((execution.value?.tree.length ?? 0) > 10 ? 300 : 0));
|
||||
const updateInterval = computed(() => ((entries.value?.length ?? 0) > 10 ? 300 : 0));
|
||||
|
||||
function resetExecutionData() {
|
||||
execData.value = undefined;
|
||||
@@ -64,6 +71,34 @@ export function useExecutionData() {
|
||||
nodeHelpers.updateNodesExecutionIssues();
|
||||
}
|
||||
|
||||
async function loadSubExecution(logEntry: LogEntry) {
|
||||
const executionId = logEntry.runData.metadata?.subExecution?.executionId;
|
||||
const workflowId = logEntry.runData.metadata?.subExecution?.workflowId;
|
||||
|
||||
if (!execData.value?.data || !executionId || !workflowId) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const subExecution = await workflowsStore.fetchExecutionDataById(executionId);
|
||||
const data = subExecution?.data
|
||||
? (parse(subExecution.data as unknown as string) as IRunExecutionData)
|
||||
: undefined;
|
||||
|
||||
if (!data || !subExecution) {
|
||||
throw Error('Data is missing');
|
||||
}
|
||||
|
||||
subWorkflowExecData.value[executionId] = data;
|
||||
subWorkflows.value[workflowId] = new Workflow({
|
||||
...subExecution.workflowData,
|
||||
nodeTypes: workflowsStore.getNodeTypes(),
|
||||
});
|
||||
} catch (e) {
|
||||
toast.showError(e, 'Unable to load sub execution');
|
||||
}
|
||||
}
|
||||
|
||||
watch(
|
||||
// Fields that should trigger update
|
||||
[
|
||||
@@ -73,9 +108,15 @@ export function useExecutionData() {
|
||||
() => workflowsStore.workflowExecutionResultDataLastUpdate,
|
||||
],
|
||||
useThrottleFn(
|
||||
() => {
|
||||
([executionId], [previousExecutionId]) => {
|
||||
// Create deep copy to disable reactivity
|
||||
execData.value = deepToRaw(workflowsStore.workflowExecutionData ?? undefined);
|
||||
|
||||
if (executionId !== previousExecutionId) {
|
||||
// Reset sub workflow data when top-level execution changes
|
||||
subWorkflowExecData.value = {};
|
||||
subWorkflows.value = {};
|
||||
}
|
||||
},
|
||||
updateInterval,
|
||||
true,
|
||||
@@ -84,5 +125,12 @@ export function useExecutionData() {
|
||||
{ immediate: true },
|
||||
);
|
||||
|
||||
return { execution, workflow, hasChat, latestNodeNameById, resetExecutionData };
|
||||
return {
|
||||
execution: execData,
|
||||
entries,
|
||||
hasChat,
|
||||
latestNodeNameById,
|
||||
resetExecutionData,
|
||||
loadSubExecution,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import { type LogEntry } from '@/components/RunDataAi/utils';
|
||||
|
||||
export type LogEntrySelection =
|
||||
| { type: 'initial' }
|
||||
| { type: 'selected'; workflowId: string; data: LogEntry }
|
||||
| { type: 'none'; workflowId: string };
|
||||
| { type: 'selected'; id: string }
|
||||
| { type: 'none' };
|
||||
|
||||
export const LOGS_PANEL_STATE = {
|
||||
CLOSED: 'closed',
|
||||
|
||||
@@ -25,7 +25,6 @@ import {
|
||||
import { computed, defineAsyncComponent, onBeforeUnmount, onMounted, ref, toRef, watch } from 'vue';
|
||||
|
||||
import type {
|
||||
IExecutionResponse,
|
||||
INodeUi,
|
||||
INodeUpdatePropertiesInformation,
|
||||
IRunDataDisplayMode,
|
||||
@@ -121,7 +120,7 @@ export type EnterEditModeArgs = {
|
||||
|
||||
type Props = {
|
||||
workflow: Workflow;
|
||||
workflowExecution?: IExecutionResponse;
|
||||
workflowExecution?: IRunExecutionData;
|
||||
runIndex: number;
|
||||
tooMuchDataTitle: string;
|
||||
executingMessage: string;
|
||||
@@ -252,7 +251,7 @@ const isReadOnlyRoute = computed(() => route.meta.readOnlyCanvas === true);
|
||||
const isWaitNodeWaiting = computed(() => {
|
||||
return (
|
||||
node.value?.name &&
|
||||
workflowExecution.value?.data?.resultData?.runData?.[node.value?.name]?.[props.runIndex]
|
||||
workflowExecution.value?.resultData?.runData?.[node.value?.name]?.[props.runIndex]
|
||||
?.executionStatus === 'waiting'
|
||||
);
|
||||
});
|
||||
@@ -339,13 +338,13 @@ const executionHints = computed(() => {
|
||||
});
|
||||
|
||||
const workflowExecution = computed(
|
||||
() => props.workflowExecution ?? workflowsStore.getWorkflowExecution ?? undefined,
|
||||
() => props.workflowExecution ?? workflowsStore.getWorkflowExecution?.data ?? undefined,
|
||||
);
|
||||
const workflowRunData = computed(() => {
|
||||
if (workflowExecution.value === undefined) {
|
||||
return null;
|
||||
}
|
||||
const executionData: IRunExecutionData | undefined = workflowExecution.value?.data;
|
||||
const executionData: IRunExecutionData | undefined = workflowExecution.value;
|
||||
if (executionData?.resultData) {
|
||||
return executionData.resultData.runData;
|
||||
}
|
||||
@@ -780,7 +779,7 @@ function getNodeHints(): NodeHint[] {
|
||||
|
||||
if (workflowNode) {
|
||||
const nodeHints = nodeHelpers.getNodeHints(props.workflow, workflowNode, nodeType.value, {
|
||||
runExecutionData: workflowExecution.value?.data ?? null,
|
||||
runExecutionData: workflowExecution.value ?? null,
|
||||
runIndex: props.runIndex,
|
||||
connectionInputData: parentNodeOutputData.value,
|
||||
});
|
||||
|
||||
@@ -8,9 +8,10 @@ import {
|
||||
} from '@/__tests__/mocks';
|
||||
import {
|
||||
createAiData,
|
||||
createLogEntries,
|
||||
createLogTree,
|
||||
deepToRaw,
|
||||
findSelectedLogEntry,
|
||||
getDefaultCollapsedEntries,
|
||||
getTreeNodeData,
|
||||
getTreeNodeDataV2,
|
||||
} from '@/components/RunDataAi/utils';
|
||||
@@ -23,6 +24,7 @@ import {
|
||||
import { type LogEntrySelection } from '../CanvasChat/types/logs';
|
||||
import { type IExecutionResponse } from '@/Interface';
|
||||
import { isReactive, reactive } from 'vue';
|
||||
import { createTestLogTreeCreationContext } from '../CanvasChat/__test__/data';
|
||||
|
||||
describe(getTreeNodeData, () => {
|
||||
it('should generate one node per execution', () => {
|
||||
@@ -537,10 +539,11 @@ describe(getTreeNodeData, () => {
|
||||
describe(getTreeNodeDataV2, () => {
|
||||
it('should generate one node per execution', () => {
|
||||
const workflow = createTestWorkflowObject({
|
||||
id: 'test-wf-id',
|
||||
nodes: [
|
||||
createTestNode({ name: 'A' }),
|
||||
createTestNode({ name: 'B' }),
|
||||
createTestNode({ name: 'C' }),
|
||||
createTestNode({ name: 'A', id: 'test-node-id-a' }),
|
||||
createTestNode({ name: 'B', id: 'test-node-id-b' }),
|
||||
createTestNode({ name: 'C', id: 'test-node-id-c' }),
|
||||
],
|
||||
connections: {
|
||||
B: { ai_tool: [[{ node: 'A', type: NodeConnectionTypes.AiTool, index: 0 }]] },
|
||||
@@ -554,115 +557,66 @@ describe(getTreeNodeDataV2, () => {
|
||||
const jsonB2 = { tokenUsage: { completionTokens: 4, promptTokens: 5, totalTokens: 6 } };
|
||||
const jsonC1 = { tokenUsageEstimate: { completionTokens: 7, promptTokens: 8, totalTokens: 9 } };
|
||||
|
||||
expect(
|
||||
getTreeNodeDataV2('A', createTestTaskData({}), workflow, {
|
||||
A: [createTestTaskData({ startTime: Date.parse('2025-02-26T00:00:00.000Z') })],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-02-26T00:00:01.000Z'),
|
||||
data: { main: [[{ json: jsonB1 }]] },
|
||||
}),
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-02-26T00:00:03.000Z'),
|
||||
data: { main: [[{ json: jsonB2 }]] },
|
||||
}),
|
||||
],
|
||||
C: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-02-26T00:00:02.000Z'),
|
||||
data: { main: [[{ json: jsonC1 }]] },
|
||||
}),
|
||||
createTestTaskData({ startTime: Date.parse('2025-02-26T00:00:04.000Z') }),
|
||||
],
|
||||
}),
|
||||
).toEqual([
|
||||
{
|
||||
depth: 0,
|
||||
id: 'A:0',
|
||||
node: expect.objectContaining({ name: 'A' }),
|
||||
runIndex: 0,
|
||||
runData: expect.objectContaining({ startTime: 0 }),
|
||||
parent: undefined,
|
||||
consumedTokens: {
|
||||
completionTokens: 0,
|
||||
promptTokens: 0,
|
||||
totalTokens: 0,
|
||||
isEstimate: false,
|
||||
},
|
||||
children: [
|
||||
{
|
||||
depth: 1,
|
||||
id: 'B:0',
|
||||
node: expect.objectContaining({ name: 'B' }),
|
||||
runIndex: 0,
|
||||
runData: expect.objectContaining({
|
||||
startTime: Date.parse('2025-02-26T00:00:01.000Z'),
|
||||
}),
|
||||
parent: expect.objectContaining({ node: expect.objectContaining({ name: 'A' }) }),
|
||||
consumedTokens: {
|
||||
completionTokens: 1,
|
||||
promptTokens: 2,
|
||||
totalTokens: 3,
|
||||
isEstimate: false,
|
||||
},
|
||||
children: [
|
||||
{
|
||||
children: [],
|
||||
depth: 2,
|
||||
id: 'C:0',
|
||||
node: expect.objectContaining({ name: 'C' }),
|
||||
runIndex: 0,
|
||||
runData: expect.objectContaining({
|
||||
startTime: Date.parse('2025-02-26T00:00:02.000Z'),
|
||||
}),
|
||||
parent: expect.objectContaining({ node: expect.objectContaining({ name: 'B' }) }),
|
||||
consumedTokens: {
|
||||
completionTokens: 7,
|
||||
promptTokens: 8,
|
||||
totalTokens: 9,
|
||||
isEstimate: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
depth: 1,
|
||||
id: 'B:1',
|
||||
node: expect.objectContaining({ name: 'B' }),
|
||||
runIndex: 1,
|
||||
runData: expect.objectContaining({
|
||||
startTime: Date.parse('2025-02-26T00:00:03.000Z'),
|
||||
}),
|
||||
parent: expect.objectContaining({ node: expect.objectContaining({ name: 'A' }) }),
|
||||
consumedTokens: {
|
||||
completionTokens: 4,
|
||||
promptTokens: 5,
|
||||
totalTokens: 6,
|
||||
isEstimate: false,
|
||||
},
|
||||
children: [
|
||||
{
|
||||
children: [],
|
||||
depth: 2,
|
||||
id: 'C:1',
|
||||
node: expect.objectContaining({ name: 'C' }),
|
||||
runIndex: 1,
|
||||
runData: expect.objectContaining({
|
||||
startTime: Date.parse('2025-02-26T00:00:04.000Z'),
|
||||
}),
|
||||
parent: expect.objectContaining({ node: expect.objectContaining({ name: 'B' }) }),
|
||||
consumedTokens: {
|
||||
completionTokens: 0,
|
||||
promptTokens: 0,
|
||||
totalTokens: 0,
|
||||
isEstimate: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
const ctx = createTestLogTreeCreationContext(workflow, {
|
||||
A: [createTestTaskData({ startTime: 1740528000000 })],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
startTime: 1740528000001,
|
||||
data: { main: [[{ json: jsonB1 }]] },
|
||||
}),
|
||||
createTestTaskData({
|
||||
startTime: 1740528000002,
|
||||
data: { main: [[{ json: jsonB2 }]] },
|
||||
}),
|
||||
],
|
||||
C: [
|
||||
createTestTaskData({
|
||||
startTime: 1740528000003,
|
||||
data: { main: [[{ json: jsonC1 }]] },
|
||||
}),
|
||||
createTestTaskData({ startTime: 1740528000004 }),
|
||||
],
|
||||
});
|
||||
const logTree = getTreeNodeDataV2('A', ctx.data.resultData.runData.A[0], undefined, ctx);
|
||||
|
||||
expect(logTree.length).toBe(1);
|
||||
|
||||
expect(logTree[0].id).toBe('test-wf-id:A:test-execution-id:0');
|
||||
expect(logTree[0].depth).toBe(0);
|
||||
expect(logTree[0].runIndex).toBe(0);
|
||||
expect(logTree[0].parent).toBe(undefined);
|
||||
expect(logTree[0].runData.startTime).toBe(1740528000000);
|
||||
expect(logTree[0].children.length).toBe(2);
|
||||
|
||||
expect(logTree[0].children[0].id).toBe('test-wf-id:B:test-execution-id:0');
|
||||
expect(logTree[0].children[0].depth).toBe(1);
|
||||
expect(logTree[0].children[0].runIndex).toBe(0);
|
||||
expect(logTree[0].children[0].parent?.node.name).toBe('A');
|
||||
expect(logTree[0].children[0].runData.startTime).toBe(1740528000001);
|
||||
expect(logTree[0].children[0].consumedTokens.isEstimate).toBe(false);
|
||||
expect(logTree[0].children[0].consumedTokens.completionTokens).toBe(1);
|
||||
expect(logTree[0].children[0].children.length).toBe(1);
|
||||
|
||||
expect(logTree[0].children[0].children[0].id).toBe('test-wf-id:C:test-execution-id:0');
|
||||
expect(logTree[0].children[0].children[0].depth).toBe(2);
|
||||
expect(logTree[0].children[0].children[0].runIndex).toBe(0);
|
||||
expect(logTree[0].children[0].children[0].parent?.node.name).toBe('B');
|
||||
expect(logTree[0].children[0].children[0].consumedTokens.isEstimate).toBe(true);
|
||||
expect(logTree[0].children[0].children[0].consumedTokens.completionTokens).toBe(7);
|
||||
|
||||
expect(logTree[0].children[1].id).toBe('test-wf-id:B:test-execution-id:1');
|
||||
expect(logTree[0].children[1].depth).toBe(1);
|
||||
expect(logTree[0].children[1].runIndex).toBe(1);
|
||||
expect(logTree[0].children[1].parent?.node.name).toBe('A');
|
||||
expect(logTree[0].children[1].consumedTokens.isEstimate).toBe(false);
|
||||
expect(logTree[0].children[1].consumedTokens.completionTokens).toBe(4);
|
||||
expect(logTree[0].children[1].children.length).toBe(1);
|
||||
|
||||
expect(logTree[0].children[1].children[0].id).toBe('test-wf-id:C:test-execution-id:1');
|
||||
expect(logTree[0].children[1].children[0].depth).toBe(2);
|
||||
expect(logTree[0].children[1].children[0].runIndex).toBe(1);
|
||||
expect(logTree[0].children[1].children[0].parent?.node.name).toBe('B');
|
||||
expect(logTree[0].children[1].children[0].consumedTokens.completionTokens).toBe(0);
|
||||
});
|
||||
|
||||
it('should filter node executions based on source node', () => {
|
||||
@@ -713,13 +667,23 @@ describe(getTreeNodeDataV2, () => {
|
||||
};
|
||||
|
||||
// Test for RootNode1 - should only show SharedSubNode with source RootNode1
|
||||
const rootNode1Tree = getTreeNodeDataV2('RootNode1', runData.RootNode1[0], workflow, runData);
|
||||
const rootNode1Tree = getTreeNodeDataV2(
|
||||
'RootNode1',
|
||||
runData.RootNode1[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode1Tree[0].children.length).toBe(1);
|
||||
expect(rootNode1Tree[0].children[0].node.name).toBe('SharedSubNode');
|
||||
expect(rootNode1Tree[0].children[0].runIndex).toBe(0);
|
||||
|
||||
// Test for RootNode2 - should only show SharedSubNode with source RootNode2
|
||||
const rootNode2Tree = getTreeNodeDataV2('RootNode2', runData.RootNode2[0], workflow, runData);
|
||||
const rootNode2Tree = getTreeNodeDataV2(
|
||||
'RootNode2',
|
||||
runData.RootNode2[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode2Tree[0].children.length).toBe(1);
|
||||
expect(rootNode2Tree[0].children[0].node.name).toBe('SharedSubNode');
|
||||
expect(rootNode2Tree[0].children[0].runIndex).toBe(1);
|
||||
@@ -760,13 +724,23 @@ describe(getTreeNodeDataV2, () => {
|
||||
};
|
||||
|
||||
// Test for run #1 of RootNode - should only show SubNode with source run index 0
|
||||
const rootNode1Tree = getTreeNodeDataV2('RootNode', runData.RootNode[0], workflow, runData, 0);
|
||||
const rootNode1Tree = getTreeNodeDataV2(
|
||||
'RootNode',
|
||||
runData.RootNode[0],
|
||||
0,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode1Tree[0].children.length).toBe(1);
|
||||
expect(rootNode1Tree[0].children[0].node.name).toBe('SubNode');
|
||||
expect(rootNode1Tree[0].children[0].runIndex).toBe(0);
|
||||
|
||||
// Test for run #2 of RootNode - should only show SubNode with source run index 1
|
||||
const rootNode2Tree = getTreeNodeDataV2('RootNode', runData.RootNode[1], workflow, runData, 1);
|
||||
const rootNode2Tree = getTreeNodeDataV2(
|
||||
'RootNode',
|
||||
runData.RootNode[1],
|
||||
1,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode2Tree[0].children.length).toBe(1);
|
||||
expect(rootNode2Tree[0].children[0].node.name).toBe('SubNode');
|
||||
expect(rootNode2Tree[0].children[0].runIndex).toBe(1);
|
||||
@@ -801,7 +775,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
};
|
||||
|
||||
// Test for RootNode - should still show SubNode even without source info
|
||||
const rootNodeTree = getTreeNodeDataV2('RootNode', runData.RootNode[0], workflow, runData);
|
||||
const rootNodeTree = getTreeNodeDataV2(
|
||||
'RootNode',
|
||||
runData.RootNode[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNodeTree[0].children.length).toBe(1);
|
||||
expect(rootNodeTree[0].children[0].node.name).toBe('SubNode');
|
||||
expect(rootNodeTree[0].children[0].runIndex).toBe(0);
|
||||
@@ -836,7 +815,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
};
|
||||
|
||||
// Test for RootNode - should still show SubNode even with empty source array
|
||||
const rootNodeTree = getTreeNodeDataV2('RootNode', runData.RootNode[0], workflow, runData);
|
||||
const rootNodeTree = getTreeNodeDataV2(
|
||||
'RootNode',
|
||||
runData.RootNode[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNodeTree[0].children.length).toBe(1);
|
||||
expect(rootNodeTree[0].children[0].node.name).toBe('SubNode');
|
||||
expect(rootNodeTree[0].children[0].runIndex).toBe(0);
|
||||
@@ -858,7 +842,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
SubNode: [createTestTaskData({ executionIndex: 1, source: [null] })],
|
||||
};
|
||||
|
||||
const rootNodeTree = getTreeNodeDataV2('RootNode', runData.RootNode[0], workflow, runData);
|
||||
const rootNodeTree = getTreeNodeDataV2(
|
||||
'RootNode',
|
||||
runData.RootNode[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
|
||||
expect(rootNodeTree[0].children.length).toBe(1);
|
||||
expect(rootNodeTree[0].children[0].node.name).toBe('SubNode');
|
||||
@@ -930,7 +919,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
};
|
||||
|
||||
// Test filtering for RootNode1
|
||||
const rootNode1Tree = getTreeNodeDataV2('RootNode1', runData.RootNode1[0], workflow, runData);
|
||||
const rootNode1Tree = getTreeNodeDataV2(
|
||||
'RootNode1',
|
||||
runData.RootNode1[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode1Tree[0].children.length).toBe(1);
|
||||
expect(rootNode1Tree[0].children[0].node.name).toBe('SharedSubNode');
|
||||
expect(rootNode1Tree[0].children[0].runIndex).toBe(0);
|
||||
@@ -939,7 +933,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
expect(rootNode1Tree[0].children[0].children[0].runIndex).toBe(0);
|
||||
|
||||
// Test filtering for RootNode2
|
||||
const rootNode2Tree = getTreeNodeDataV2('RootNode2', runData.RootNode2[0], workflow, runData);
|
||||
const rootNode2Tree = getTreeNodeDataV2(
|
||||
'RootNode2',
|
||||
runData.RootNode2[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode2Tree[0].children.length).toBe(1);
|
||||
expect(rootNode2Tree[0].children[0].node.name).toBe('SharedSubNode');
|
||||
expect(rootNode2Tree[0].children[0].runIndex).toBe(1);
|
||||
@@ -1020,7 +1019,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
};
|
||||
|
||||
// Test filtering for RootNode1 -> SubNodeA -> DeepNode
|
||||
const rootNode1Tree = getTreeNodeDataV2('RootNode1', runData.RootNode1[0], workflow, runData);
|
||||
const rootNode1Tree = getTreeNodeDataV2(
|
||||
'RootNode1',
|
||||
runData.RootNode1[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
expect(rootNode1Tree[0].children.length).toBe(1);
|
||||
expect(rootNode1Tree[0].children[0].node.name).toBe('SubNodeA');
|
||||
expect(rootNode1Tree[0].children[0].children.length).toBe(1);
|
||||
@@ -1028,7 +1032,12 @@ describe(getTreeNodeDataV2, () => {
|
||||
expect(rootNode1Tree[0].children[0].children[0].runIndex).toBe(0); // First DeepNode execution
|
||||
|
||||
// Test filtering for RootNode2 -> SubNodeB -> DeepNode
|
||||
const rootNode2Tree = getTreeNodeDataV2('RootNode2', runData.RootNode2[0], workflow, runData);
|
||||
const rootNode2Tree = getTreeNodeDataV2(
|
||||
'RootNode2',
|
||||
runData.RootNode2[0],
|
||||
undefined,
|
||||
createTestLogTreeCreationContext(workflow, runData),
|
||||
);
|
||||
|
||||
expect(rootNode2Tree[0].children.length).toBe(1);
|
||||
expect(rootNode2Tree[0].children[0].node.name).toBe('SubNodeB');
|
||||
@@ -1042,13 +1051,10 @@ describe(getTreeNodeDataV2, () => {
|
||||
|
||||
describe(findSelectedLogEntry, () => {
|
||||
function find(state: LogEntrySelection, response: IExecutionResponse) {
|
||||
return findSelectedLogEntry(state, {
|
||||
...response,
|
||||
tree: createLogEntries(
|
||||
createTestWorkflowObject(response.workflowData),
|
||||
response.data?.resultData.runData ?? {},
|
||||
),
|
||||
});
|
||||
return findSelectedLogEntry(
|
||||
state,
|
||||
createLogTree(createTestWorkflowObject(response.workflowData), response),
|
||||
);
|
||||
}
|
||||
|
||||
describe('when log is not manually selected', () => {
|
||||
@@ -1193,11 +1199,11 @@ describe(findSelectedLogEntry, () => {
|
||||
|
||||
describe('when log is manually selected', () => {
|
||||
it('should return manually selected log', () => {
|
||||
const nodeA = createTestNode({ name: 'A' });
|
||||
const response = createTestWorkflowExecutionResponse({
|
||||
id: 'my-exec-id',
|
||||
workflowData: createTestWorkflow({
|
||||
id: 'test-wf-id',
|
||||
nodes: [nodeA, createTestNode({ name: 'B' })],
|
||||
nodes: [createTestNode({ name: 'A' }), createTestNode({ name: 'B' })],
|
||||
}),
|
||||
data: {
|
||||
resultData: {
|
||||
@@ -1209,14 +1215,7 @@ describe(findSelectedLogEntry, () => {
|
||||
},
|
||||
});
|
||||
|
||||
const result = find(
|
||||
{
|
||||
type: 'selected',
|
||||
workflowId: 'test-wf-id',
|
||||
data: createTestLogEntry({ node: nodeA, runIndex: 0 }),
|
||||
},
|
||||
response,
|
||||
);
|
||||
const result = find({ type: 'selected', id: 'test-wf-id:A:my-exec-id:0' }, response);
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({ node: expect.objectContaining({ name: 'A' }), runIndex: 0 }),
|
||||
@@ -1225,7 +1224,7 @@ describe(findSelectedLogEntry, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe(createLogEntries, () => {
|
||||
describe(createLogTree, () => {
|
||||
it('should return root node log entries in ascending order of executionIndex', () => {
|
||||
const workflow = createTestWorkflowObject({
|
||||
nodes: [
|
||||
@@ -1238,33 +1237,38 @@ describe(createLogEntries, () => {
|
||||
C: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||
},
|
||||
});
|
||||
const execution = createTestWorkflowExecutionResponse({
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
A: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:00.000Z'),
|
||||
executionIndex: 0,
|
||||
}),
|
||||
],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:01.000Z'),
|
||||
executionIndex: 1,
|
||||
}),
|
||||
],
|
||||
C: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:02.000Z'),
|
||||
executionIndex: 3,
|
||||
}),
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:03.000Z'),
|
||||
executionIndex: 2,
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
createLogEntries(workflow, {
|
||||
A: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:00.000Z'),
|
||||
executionIndex: 0,
|
||||
}),
|
||||
],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:01.000Z'),
|
||||
executionIndex: 1,
|
||||
}),
|
||||
],
|
||||
C: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:02.000Z'),
|
||||
executionIndex: 3,
|
||||
}),
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:03.000Z'),
|
||||
executionIndex: 2,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
).toEqual([
|
||||
expect(createLogTree(workflow, execution)).toEqual([
|
||||
expect.objectContaining({ node: expect.objectContaining({ name: 'A' }), runIndex: 0 }),
|
||||
expect.objectContaining({ node: expect.objectContaining({ name: 'B' }), runIndex: 0 }),
|
||||
expect.objectContaining({ node: expect.objectContaining({ name: 'C' }), runIndex: 1 }),
|
||||
@@ -1290,30 +1294,39 @@ describe(createLogEntries, () => {
|
||||
});
|
||||
|
||||
expect(
|
||||
createLogEntries(workflow, {
|
||||
A: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:00.000Z'),
|
||||
executionIndex: 0,
|
||||
}),
|
||||
],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:01.000Z'),
|
||||
executionIndex: 1,
|
||||
}),
|
||||
],
|
||||
C: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:02.000Z'),
|
||||
executionIndex: 3,
|
||||
}),
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:03.000Z'),
|
||||
executionIndex: 2,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createLogTree(
|
||||
workflow,
|
||||
createTestWorkflowExecutionResponse({
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
A: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:00.000Z'),
|
||||
executionIndex: 0,
|
||||
}),
|
||||
],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:01.000Z'),
|
||||
executionIndex: 1,
|
||||
}),
|
||||
],
|
||||
C: [
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:02.000Z'),
|
||||
executionIndex: 3,
|
||||
}),
|
||||
createTestTaskData({
|
||||
startTime: Date.parse('2025-04-04T00:00:03.000Z'),
|
||||
executionIndex: 2,
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
),
|
||||
).toEqual([
|
||||
expect.objectContaining({ node: expect.objectContaining({ name: 'A' }), runIndex: 0 }),
|
||||
expect.objectContaining({
|
||||
@@ -1334,10 +1347,90 @@ describe(createLogEntries, () => {
|
||||
A: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||
},
|
||||
});
|
||||
const response = createTestWorkflowExecutionResponse({
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
A: [createTestTaskData()],
|
||||
B: [createTestTaskData()],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
createLogEntries(workflow, { A: [createTestTaskData()], B: [createTestTaskData()] }),
|
||||
).toEqual([expect.objectContaining({ node: expect.objectContaining({ name: 'A' }) })]);
|
||||
expect(createLogTree(workflow, response)).toEqual([
|
||||
expect.objectContaining({ node: expect.objectContaining({ name: 'A' }) }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should include runs of a sub execution', () => {
|
||||
const workflow = createTestWorkflowObject({
|
||||
id: 'root-workflow-id',
|
||||
nodes: [createTestNode({ name: 'A' }), createTestNode({ name: 'B' })],
|
||||
connections: {
|
||||
A: { main: [[{ node: 'B', type: NodeConnectionTypes.Main, index: 0 }]] },
|
||||
},
|
||||
});
|
||||
const subWorkflow = createTestWorkflowObject({
|
||||
id: 'sub-workflow-id',
|
||||
nodes: [createTestNode({ name: 'C' })],
|
||||
});
|
||||
const rootExecutionData = createTestWorkflowExecutionResponse({
|
||||
id: 'root-exec-id',
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
A: [createTestTaskData()],
|
||||
B: [
|
||||
createTestTaskData({
|
||||
metadata: {
|
||||
subExecution: { workflowId: 'sub-workflow-id', executionId: 'sub-exec-id' },
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const subExecutionData = {
|
||||
resultData: { runData: { C: [createTestTaskData(), createTestTaskData()] } },
|
||||
};
|
||||
const logs = createLogTree(
|
||||
workflow,
|
||||
rootExecutionData,
|
||||
{ 'sub-workflow-id': subWorkflow },
|
||||
{ 'sub-exec-id': subExecutionData },
|
||||
);
|
||||
|
||||
expect(logs).toHaveLength(2);
|
||||
|
||||
expect(logs[0].node.name).toBe('A');
|
||||
expect(logs[0].depth).toBe(0);
|
||||
expect(logs[0].workflow).toBe(workflow);
|
||||
expect(logs[0].execution).toBe(rootExecutionData.data);
|
||||
expect(logs[0].executionId).toBe('root-exec-id');
|
||||
expect(logs[0].children).toHaveLength(0);
|
||||
|
||||
expect(logs[1].node.name).toBe('B');
|
||||
expect(logs[1].depth).toBe(0);
|
||||
expect(logs[1].workflow).toBe(workflow);
|
||||
expect(logs[1].execution).toBe(rootExecutionData.data);
|
||||
expect(logs[1].executionId).toBe('root-exec-id');
|
||||
expect(logs[1].children).toHaveLength(2);
|
||||
|
||||
expect(logs[1].children[0].node.name).toBe('C');
|
||||
expect(logs[1].children[0].depth).toBe(1);
|
||||
expect(logs[1].children[0].workflow).toBe(subWorkflow);
|
||||
expect(logs[1].children[0].execution).toBe(subExecutionData);
|
||||
expect(logs[1].children[0].executionId).toBe('sub-exec-id');
|
||||
expect(logs[1].children[0].children).toHaveLength(0);
|
||||
|
||||
expect(logs[1].children[1].node.name).toBe('C');
|
||||
expect(logs[1].children[1].depth).toBe(1);
|
||||
expect(logs[1].children[1].workflow).toBe(subWorkflow);
|
||||
expect(logs[1].children[1].execution).toBe(subExecutionData);
|
||||
expect(logs[1].children[1].executionId).toBe('sub-exec-id');
|
||||
expect(logs[1].children[1].children).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1363,3 +1456,42 @@ describe(deepToRaw, () => {
|
||||
expect(isReactive(raw.bazz)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe(getDefaultCollapsedEntries, () => {
|
||||
it('should recursively find logs for runs with a sub execution and has no child logs', () => {
|
||||
const entries = [
|
||||
// Has sub execution and has no children
|
||||
createTestLogEntry({
|
||||
id: 'l0',
|
||||
runData: createTestTaskData({
|
||||
metadata: { subExecution: { workflowId: 'w0', executionId: 'e0' } },
|
||||
}),
|
||||
children: [],
|
||||
}),
|
||||
// Has no sub execution
|
||||
createTestLogEntry({ id: 'l1' }),
|
||||
// Has sub execution and has children
|
||||
createTestLogEntry({
|
||||
id: 'l2',
|
||||
runData: createTestTaskData({
|
||||
metadata: { subExecution: { workflowId: 'w0', executionId: 'e0' } },
|
||||
}),
|
||||
children: [
|
||||
// Has no sub execution - nested
|
||||
createTestLogEntry({ id: 'l3' }),
|
||||
// Has sub execution and has no children - nested
|
||||
createTestLogEntry({
|
||||
id: 'l4',
|
||||
runData: createTestTaskData({
|
||||
metadata: {
|
||||
subExecution: { workflowId: 'w0', executionId: 'e0' },
|
||||
},
|
||||
}),
|
||||
}),
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
expect(getDefaultCollapsedEntries(entries)).toEqual({ l0: true, l4: true });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,12 +6,12 @@ import {
|
||||
} from '@/Interface';
|
||||
import {
|
||||
AGENT_LANGCHAIN_NODE_TYPE,
|
||||
type IRunData,
|
||||
type INodeExecutionData,
|
||||
type ITaskData,
|
||||
type ITaskDataConnections,
|
||||
type NodeConnectionType,
|
||||
type Workflow,
|
||||
type IRunExecutionData,
|
||||
} from 'n8n-workflow';
|
||||
import { type LogEntrySelection } from '../CanvasChat/types/logs';
|
||||
import { isProxy, isReactive, isRef, toRaw } from 'vue';
|
||||
@@ -244,10 +244,6 @@ export function formatTokenUsageCount(
|
||||
return usage.isEstimate ? `~${count}` : count.toLocaleString();
|
||||
}
|
||||
|
||||
export interface ExecutionLogViewData extends IExecutionResponse {
|
||||
tree: LogEntry[];
|
||||
}
|
||||
|
||||
export interface LogEntry {
|
||||
parent?: LogEntry;
|
||||
node: INodeUi;
|
||||
@@ -257,6 +253,19 @@ export interface LogEntry {
|
||||
runIndex: number;
|
||||
runData: ITaskData;
|
||||
consumedTokens: LlmTokenUsageData;
|
||||
workflow: Workflow;
|
||||
executionId: string;
|
||||
execution: IRunExecutionData;
|
||||
}
|
||||
|
||||
export interface LogTreeCreationContext {
|
||||
parent: LogEntry | undefined;
|
||||
depth: number;
|
||||
workflow: Workflow;
|
||||
executionId: string;
|
||||
data: IRunExecutionData;
|
||||
workflows: Record<string, Workflow>;
|
||||
subWorkflowData: Record<string, IRunExecutionData>;
|
||||
}
|
||||
|
||||
export interface LatestNodeInfo {
|
||||
@@ -288,87 +297,117 @@ function getConsumedTokensV2(task: ITaskData): LlmTokenUsageData {
|
||||
}
|
||||
|
||||
function createNodeV2(
|
||||
parent: LogEntry | undefined,
|
||||
node: INodeUi,
|
||||
currentDepth: number,
|
||||
context: LogTreeCreationContext,
|
||||
runIndex: number,
|
||||
runData: ITaskData,
|
||||
children: LogEntry[] = [],
|
||||
): LogEntry {
|
||||
return {
|
||||
parent,
|
||||
parent: context.parent,
|
||||
node,
|
||||
id: `${node.name}:${runIndex}`,
|
||||
depth: currentDepth,
|
||||
id: `${context.workflow.id}:${node.name}:${context.executionId}:${runIndex}`,
|
||||
depth: context.depth,
|
||||
runIndex,
|
||||
runData,
|
||||
children,
|
||||
consumedTokens: getConsumedTokensV2(runData),
|
||||
workflow: context.workflow,
|
||||
executionId: context.executionId,
|
||||
execution: context.data,
|
||||
};
|
||||
}
|
||||
|
||||
export function getTreeNodeDataV2(
|
||||
nodeName: string,
|
||||
runData: ITaskData,
|
||||
workflow: Workflow,
|
||||
data: IRunData,
|
||||
runIndex?: number,
|
||||
runIndex: number | undefined,
|
||||
context: LogTreeCreationContext,
|
||||
): LogEntry[] {
|
||||
const node = workflow.getNode(nodeName);
|
||||
const node = context.workflow.getNode(nodeName);
|
||||
|
||||
return node ? getTreeNodeDataRecV2(undefined, node, runData, 0, workflow, data, runIndex) : [];
|
||||
return node ? getTreeNodeDataRecV2(node, runData, context, runIndex) : [];
|
||||
}
|
||||
|
||||
function getChildNodes(
|
||||
treeNode: LogEntry,
|
||||
node: INodeUi,
|
||||
runIndex: number | undefined,
|
||||
context: LogTreeCreationContext,
|
||||
) {
|
||||
if (hasSubExecution(treeNode)) {
|
||||
const workflowId = treeNode.runData.metadata?.subExecution?.workflowId;
|
||||
const executionId = treeNode.runData.metadata?.subExecution?.executionId;
|
||||
const workflow = workflowId ? context.workflows[workflowId] : undefined;
|
||||
const subWorkflowRunData = executionId ? context.subWorkflowData[executionId] : undefined;
|
||||
|
||||
if (!workflow || !subWorkflowRunData || !executionId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return createLogTreeRec({
|
||||
...context,
|
||||
parent: treeNode,
|
||||
depth: context.depth + 1,
|
||||
workflow,
|
||||
executionId,
|
||||
data: subWorkflowRunData,
|
||||
});
|
||||
}
|
||||
|
||||
// Get the first level of children
|
||||
const connectedSubNodes = context.workflow.getParentNodes(node.name, 'ALL_NON_MAIN', 1);
|
||||
|
||||
return connectedSubNodes.flatMap((subNodeName) =>
|
||||
(context.data.resultData.runData[subNodeName] ?? []).flatMap((t, index) => {
|
||||
// At root depth, filter out node executions that weren't triggered by this node
|
||||
// This prevents showing duplicate executions when a sub-node is connected to multiple parents
|
||||
// Only filter nodes that have source information with valid previousNode references
|
||||
const isMatched =
|
||||
context.depth === 0 && t.source.some((source) => source !== null)
|
||||
? t.source.some(
|
||||
(source) =>
|
||||
source?.previousNode === node.name &&
|
||||
(runIndex === undefined || source.previousNodeRun === runIndex),
|
||||
)
|
||||
: runIndex === undefined || index === runIndex;
|
||||
|
||||
if (!isMatched) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const subNode = context.workflow.getNode(subNodeName);
|
||||
|
||||
return subNode
|
||||
? getTreeNodeDataRecV2(
|
||||
subNode,
|
||||
t,
|
||||
{ ...context, depth: context.depth + 1, parent: treeNode },
|
||||
index,
|
||||
)
|
||||
: [];
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function getTreeNodeDataRecV2(
|
||||
parent: LogEntry | undefined,
|
||||
node: INodeUi,
|
||||
runData: ITaskData,
|
||||
currentDepth: number,
|
||||
workflow: Workflow,
|
||||
data: IRunData,
|
||||
context: LogTreeCreationContext,
|
||||
runIndex: number | undefined,
|
||||
): LogEntry[] {
|
||||
// Get the first level of children
|
||||
const connectedSubNodes = workflow.getParentNodes(node.name, 'ALL_NON_MAIN', 1);
|
||||
const treeNode = createNodeV2(parent, node, currentDepth, runIndex ?? 0, runData);
|
||||
const treeNode = createNodeV2(node, context, runIndex ?? 0, runData);
|
||||
const children = getChildNodes(treeNode, node, runIndex, context).sort((a, b) => {
|
||||
// Sort the data by execution index or start time
|
||||
if (a.runData.executionIndex !== undefined && b.runData.executionIndex !== undefined) {
|
||||
return a.runData.executionIndex - b.runData.executionIndex;
|
||||
}
|
||||
|
||||
const children = connectedSubNodes
|
||||
.flatMap((subNodeName) =>
|
||||
(data[subNodeName] ?? []).flatMap((t, index) => {
|
||||
// At root depth, filter out node executions that weren't triggered by this node
|
||||
// This prevents showing duplicate executions when a sub-node is connected to multiple parents
|
||||
// Only filter nodes that have source information with valid previousNode references
|
||||
const isMatched =
|
||||
currentDepth === 0 && t.source.some((source) => source !== null)
|
||||
? t.source.some(
|
||||
(source) =>
|
||||
source?.previousNode === node.name &&
|
||||
(runIndex === undefined || source.previousNodeRun === runIndex),
|
||||
)
|
||||
: runIndex === undefined || index === runIndex;
|
||||
const aTime = a.runData.startTime ?? 0;
|
||||
const bTime = b.runData.startTime ?? 0;
|
||||
|
||||
if (!isMatched) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const subNode = workflow.getNode(subNodeName);
|
||||
|
||||
return subNode
|
||||
? getTreeNodeDataRecV2(treeNode, subNode, t, currentDepth + 1, workflow, data, index)
|
||||
: [];
|
||||
}),
|
||||
)
|
||||
.sort((a, b) => {
|
||||
// Sort the data by execution index or start time
|
||||
if (a.runData.executionIndex !== undefined && b.runData.executionIndex !== undefined) {
|
||||
return a.runData.executionIndex - b.runData.executionIndex;
|
||||
}
|
||||
|
||||
const aTime = a.runData.startTime ?? 0;
|
||||
const bTime = b.runData.startTime ?? 0;
|
||||
|
||||
return aTime - bTime;
|
||||
});
|
||||
return aTime - bTime;
|
||||
});
|
||||
|
||||
treeNode.children = children;
|
||||
|
||||
@@ -379,35 +418,39 @@ export function getTotalConsumedTokens(...usage: LlmTokenUsageData[]): LlmTokenU
|
||||
return usage.reduce(addTokenUsageData, emptyTokenUsageData);
|
||||
}
|
||||
|
||||
export function getSubtreeTotalConsumedTokens(treeNode: LogEntry): LlmTokenUsageData {
|
||||
return getTotalConsumedTokens(
|
||||
treeNode.consumedTokens,
|
||||
...treeNode.children.map(getSubtreeTotalConsumedTokens),
|
||||
);
|
||||
export function getSubtreeTotalConsumedTokens(
|
||||
treeNode: LogEntry,
|
||||
includeSubWorkflow: boolean,
|
||||
): LlmTokenUsageData {
|
||||
const executionId = treeNode.executionId;
|
||||
|
||||
function calculate(currentNode: LogEntry): LlmTokenUsageData {
|
||||
if (!includeSubWorkflow && currentNode.executionId !== executionId) {
|
||||
return emptyTokenUsageData;
|
||||
}
|
||||
|
||||
return getTotalConsumedTokens(
|
||||
currentNode.consumedTokens,
|
||||
...currentNode.children.map(calculate),
|
||||
);
|
||||
}
|
||||
|
||||
return calculate(treeNode);
|
||||
}
|
||||
|
||||
function findLogEntryToAutoSelectRec(
|
||||
data: ExecutionLogViewData,
|
||||
subTree: LogEntry[],
|
||||
depth: number,
|
||||
): LogEntry | undefined {
|
||||
function findLogEntryToAutoSelectRec(subTree: LogEntry[], depth: number): LogEntry | undefined {
|
||||
for (const entry of subTree) {
|
||||
const taskData = data.data?.resultData.runData[entry.node.name]?.[entry.runIndex];
|
||||
|
||||
if (taskData?.error) {
|
||||
if (entry.runData?.error) {
|
||||
return entry;
|
||||
}
|
||||
|
||||
const childAutoSelect = findLogEntryToAutoSelectRec(data, entry.children, depth + 1);
|
||||
const childAutoSelect = findLogEntryToAutoSelectRec(entry.children, depth + 1);
|
||||
|
||||
if (childAutoSelect) {
|
||||
return childAutoSelect;
|
||||
}
|
||||
|
||||
if (
|
||||
data.workflowData.nodes.find((n) => n.name === entry.node.name)?.type ===
|
||||
AGENT_LANGCHAIN_NODE_TYPE
|
||||
) {
|
||||
if (entry.node.type === AGENT_LANGCHAIN_NODE_TYPE) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
@@ -415,11 +458,28 @@ function findLogEntryToAutoSelectRec(
|
||||
return depth === 0 ? subTree[0] : undefined;
|
||||
}
|
||||
|
||||
export function createLogEntries(workflow: Workflow, runData: IRunData) {
|
||||
const runs = Object.entries(runData)
|
||||
export function createLogTree(
|
||||
workflow: Workflow,
|
||||
response: IExecutionResponse,
|
||||
workflows: Record<string, Workflow> = {},
|
||||
subWorkflowData: Record<string, IRunExecutionData> = {},
|
||||
) {
|
||||
return createLogTreeRec({
|
||||
parent: undefined,
|
||||
depth: 0,
|
||||
executionId: response.id,
|
||||
workflow,
|
||||
workflows,
|
||||
data: response.data ?? { resultData: { runData: {} } },
|
||||
subWorkflowData,
|
||||
});
|
||||
}
|
||||
|
||||
function createLogTreeRec(context: LogTreeCreationContext) {
|
||||
const runs = Object.entries(context.data.resultData.runData)
|
||||
.flatMap(([nodeName, taskData]) =>
|
||||
workflow.getChildNodes(nodeName, 'ALL_NON_MAIN').length > 0 ||
|
||||
workflow.getNode(nodeName)?.disabled
|
||||
context.workflow.getChildNodes(nodeName, 'ALL_NON_MAIN').length > 0 ||
|
||||
context.workflow.getNode(nodeName)?.disabled
|
||||
? [] // skip sub nodes and disabled nodes
|
||||
: taskData.map((task, runIndex) => ({
|
||||
nodeName,
|
||||
@@ -439,37 +499,45 @@ export function createLogEntries(workflow: Workflow, runData: IRunData) {
|
||||
});
|
||||
|
||||
return runs.flatMap(({ nodeName, runIndex, task, nodeHasMultipleRuns }) =>
|
||||
getTreeNodeDataV2(
|
||||
nodeName,
|
||||
task,
|
||||
workflow,
|
||||
runData,
|
||||
nodeHasMultipleRuns ? runIndex : undefined,
|
||||
),
|
||||
getTreeNodeDataV2(nodeName, task, nodeHasMultipleRuns ? runIndex : undefined, context),
|
||||
);
|
||||
}
|
||||
|
||||
export function includesLogEntry(log: LogEntry, logs: LogEntry[]): boolean {
|
||||
return logs.some(
|
||||
(l) =>
|
||||
(l.node.name === log.node.name && log.runIndex === l.runIndex) ||
|
||||
includesLogEntry(log, l.children),
|
||||
);
|
||||
export function findLogEntryRec(id: string, entries: LogEntry[]): LogEntry | undefined {
|
||||
for (const entry of entries) {
|
||||
if (entry.id === id) {
|
||||
return entry;
|
||||
}
|
||||
|
||||
const child = findLogEntryRec(id, entry.children);
|
||||
|
||||
if (child) {
|
||||
return child;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function findSelectedLogEntry(
|
||||
state: LogEntrySelection,
|
||||
execution?: ExecutionLogViewData,
|
||||
selection: LogEntrySelection,
|
||||
entries: LogEntry[],
|
||||
): LogEntry | undefined {
|
||||
return state.type === 'initial' ||
|
||||
state.workflowId !== execution?.workflowData.id ||
|
||||
(state.type === 'selected' && !includesLogEntry(state.data, execution.tree))
|
||||
? execution
|
||||
? findLogEntryToAutoSelectRec(execution, execution.tree, 0)
|
||||
: undefined
|
||||
: state.type === 'none'
|
||||
? undefined
|
||||
: state.data;
|
||||
switch (selection.type) {
|
||||
case 'initial':
|
||||
return findLogEntryToAutoSelectRec(entries, 0);
|
||||
case 'none':
|
||||
return undefined;
|
||||
case 'selected': {
|
||||
const entry = findLogEntryRec(selection.id, entries);
|
||||
|
||||
if (entry) {
|
||||
return entry;
|
||||
}
|
||||
|
||||
return findLogEntryToAutoSelectRec(entries, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
@@ -526,3 +594,37 @@ export function flattenLogEntries(
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function hasSubExecution(entry: LogEntry): boolean {
|
||||
return !!entry.runData.metadata?.subExecution;
|
||||
}
|
||||
|
||||
export function getDefaultCollapsedEntries(entries: LogEntry[]): Record<string, boolean> {
|
||||
const ret: Record<string, boolean> = {};
|
||||
|
||||
function collect(children: LogEntry[]) {
|
||||
for (const entry of children) {
|
||||
if (hasSubExecution(entry) && entry.children.length === 0) {
|
||||
ret[entry.id] = true;
|
||||
}
|
||||
|
||||
collect(entry.children);
|
||||
}
|
||||
}
|
||||
|
||||
collect(entries);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function getDepth(entry: LogEntry): number {
|
||||
let depth = 0;
|
||||
let currentEntry = entry;
|
||||
|
||||
while (currentEntry.parent !== undefined) {
|
||||
currentEntry = currentEntry.parent;
|
||||
depth++;
|
||||
}
|
||||
|
||||
return depth;
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ import type {
|
||||
|
||||
import type {
|
||||
ICredentialsResponse,
|
||||
IExecutionResponse,
|
||||
INodeUi,
|
||||
INodeUpdatePropertiesInformation,
|
||||
NodePanelType,
|
||||
@@ -543,12 +542,12 @@ export function useNodeHelpers() {
|
||||
}
|
||||
}
|
||||
|
||||
function getNodeTaskData(nodeName: string, runIndex = 0, execution?: IExecutionResponse) {
|
||||
function getNodeTaskData(nodeName: string, runIndex = 0, execution?: IRunExecutionData) {
|
||||
return getAllNodeTaskData(nodeName, execution)?.[runIndex] ?? null;
|
||||
}
|
||||
|
||||
function getAllNodeTaskData(nodeName: string, execution?: IExecutionResponse) {
|
||||
const runData = execution?.data?.resultData.runData ?? workflowsStore.getWorkflowRunData;
|
||||
function getAllNodeTaskData(nodeName: string, execution?: IRunExecutionData) {
|
||||
const runData = execution?.resultData.runData ?? workflowsStore.getWorkflowRunData;
|
||||
|
||||
return runData?.[nodeName] ?? null;
|
||||
}
|
||||
@@ -580,7 +579,7 @@ export function useNodeHelpers() {
|
||||
outputIndex = 0,
|
||||
paneType: NodePanelType = 'output',
|
||||
connectionType: NodeConnectionType = NodeConnectionTypes.Main,
|
||||
execution?: IExecutionResponse,
|
||||
execution?: IRunExecutionData,
|
||||
): INodeExecutionData[] {
|
||||
if (!node) return [];
|
||||
const taskData = getNodeTaskData(node.name, runIndex, execution);
|
||||
|
||||
Reference in New Issue
Block a user