refactor(editor): Clean up feature flag for the log view (#15606)

This commit is contained in:
Suguru Inoue
2025-06-10 10:15:22 +02:00
committed by GitHub
parent 25567f6f0e
commit d68a776e5c
69 changed files with 2402 additions and 3323 deletions

View File

@@ -9,10 +9,6 @@ import Workflow_loop from '../fixtures/Workflow_loop.json';
import Workflow_wait_for_webhook from '../fixtures/Workflow_wait_for_webhook.json';
describe('Logs', () => {
beforeEach(() => {
cy.overrideSettings({ logsView: { enabled: true } });
});
it('should populate logs as manual execution progresses', () => {
workflow.navigateToNewWorkflowPage();
workflow.pasteWorkflow(Workflow_loop);

View File

@@ -163,9 +163,6 @@ export interface FrontendSettings {
folders: {
enabled: boolean;
};
logsView: {
enabled: boolean;
};
banners: {
dismissed: string[];
};

View File

@@ -285,13 +285,4 @@ export const schema = {
env: 'N8N_PROXY_HOPS',
doc: 'Number of reverse-proxies n8n is running behind',
},
logs_view: {
enabled: {
format: Boolean,
default: true,
env: 'N8N_ENABLE_LOGS_VIEW',
doc: 'Temporary env variable to enable logs view',
},
},
};

View File

@@ -253,9 +253,6 @@ export class FrontendService {
dashboard: false,
dateRanges: [],
},
logsView: {
enabled: false,
},
evaluation: {
quota: this.licenseState.getMaxWorkflowsWithEvaluations(),
},
@@ -396,8 +393,6 @@ export class FrontendService {
this.settings.folders.enabled = this.license.isFoldersEnabled();
this.settings.logsView.enabled = config.get('logs_view.enabled');
// Refresh evaluation settings
this.settings.evaluation.quota = this.licenseState.getMaxWorkflowsWithEvaluations();

View File

@@ -157,9 +157,6 @@ export const defaultSettings: FrontendSettings = {
{ key: 'year', licensed: false, granularity: 'week' },
],
},
logsView: {
enabled: false,
},
evaluation: {
quota: 0,
},

View File

@@ -30,7 +30,6 @@ import {
import type { IExecutionResponse, INodeUi, IWorkflowDb } from '@/Interface';
import { CanvasNodeRenderType } from '@/types';
import type { FrontendSettings } from '@n8n/api-types';
import { type LogEntry } from '@/components/RunDataAi/utils';
export const mockNode = ({
id = uuid(),
@@ -255,24 +254,6 @@ export function createTestTaskData(partialData: Partial<ITaskData> = {}): ITaskD
};
}
export function createTestLogEntry(data: Partial<LogEntry> = {}): LogEntry {
const executionId = data.executionId ?? 'test-execution-id';
return {
node: createTestNode(),
runIndex: 0,
runData: createTestTaskData({}),
id: uuid(),
children: [],
consumedTokens: { completionTokens: 0, totalTokens: 0, promptTokens: 0, isEstimate: false },
depth: 0,
workflow: createTestWorkflowObject(),
executionId,
execution: createTestWorkflowExecutionResponse({ id: executionId }).data!,
...data,
};
}
export function createTestWorkflowExecutionResponse(
data: Partial<IExecutionResponse> = {},
): IExecutionResponse {

View File

@@ -1,588 +0,0 @@
import { setActivePinia } from 'pinia';
import { createTestingPinia } from '@pinia/testing';
import { waitFor } from '@testing-library/vue';
import { userEvent } from '@testing-library/user-event';
import { createRouter, createWebHistory } from 'vue-router';
import { computed, ref } from 'vue';
import type { INodeTypeDescription } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import CanvasChat from './CanvasChat.vue';
import { createComponentRenderer } from '@/__tests__/render';
import { createTestWorkflowObject } from '@/__tests__/mocks';
import { mockedStore } from '@/__tests__/utils';
import { STORES } from '@n8n/stores';
import { ChatOptionsSymbol, ChatSymbol } from '@n8n/chat/constants';
import { chatEventBus } from '@n8n/chat/event-buses';
import { useWorkflowsStore } from '@/stores/workflows.store';
import * as useChatMessaging from './composables/useChatMessaging';
import * as useChatTrigger from './composables/useChatTrigger';
import { useToast } from '@/composables/useToast';
import type { IExecutionResponse, INodeUi } from '@/Interface';
import type { ChatMessage } from '@n8n/chat/types';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { LOGS_PANEL_STATE } from './types/logs';
import { useLogsStore } from '@/stores/logs.store';
vi.mock('@/composables/useToast', () => {
const showMessage = vi.fn();
const showError = vi.fn();
return {
useToast: () => {
return {
showMessage,
showError,
clearAllStickyNotifications: vi.fn(),
};
},
};
});
vi.mock('@/stores/pushConnection.store', () => ({
usePushConnectionStore: vi.fn().mockReturnValue({
isConnected: true,
}),
}));
// Test data
const mockNodes: INodeUi[] = [
{
parameters: {
options: {
allowFileUploads: true,
},
},
id: 'chat-trigger-id',
name: 'When chat message received',
type: '@n8n/n8n-nodes-langchain.chatTrigger',
typeVersion: 1.1,
position: [740, 860],
webhookId: 'webhook-id',
},
{
parameters: {},
id: 'agent-id',
name: 'AI Agent',
type: '@n8n/n8n-nodes-langchain.agent',
typeVersion: 1.7,
position: [960, 860],
},
];
const mockNodeTypes: INodeTypeDescription[] = [
{
displayName: 'AI Agent',
name: '@n8n/n8n-nodes-langchain.agent',
properties: [],
defaults: {
name: 'AI Agent',
},
inputs: [NodeConnectionTypes.Main],
outputs: [NodeConnectionTypes.Main],
version: 0,
group: [],
description: '',
codex: {
subcategories: {
AI: ['Agents'],
},
},
},
];
const mockConnections = {
'When chat message received': {
main: [
[
{
node: 'AI Agent',
type: NodeConnectionTypes.Main,
index: 0,
},
],
],
},
};
const mockWorkflowExecution = {
data: {
resultData: {
runData: {
'AI Agent': [
{
data: {
main: [[{ json: { output: 'AI response message' } }]],
},
},
],
},
lastNodeExecuted: 'AI Agent',
},
},
};
const router = createRouter({
history: createWebHistory(),
routes: [],
});
describe('CanvasChat', () => {
const renderComponent = createComponentRenderer(CanvasChat, {
global: {
provide: {
[ChatSymbol as symbol]: {},
[ChatOptionsSymbol as symbol]: {},
},
plugins: [router],
},
});
let workflowsStore: ReturnType<typeof mockedStore<typeof useWorkflowsStore>>;
let logsStore: ReturnType<typeof mockedStore<typeof useLogsStore>>;
let nodeTypeStore: ReturnType<typeof mockedStore<typeof useNodeTypesStore>>;
beforeEach(() => {
const pinia = createTestingPinia({
initialState: {
[STORES.WORKFLOWS]: {
workflow: {
nodes: mockNodes,
connections: mockConnections,
},
},
[STORES.UI]: {
chatPanelOpen: true,
},
},
});
setActivePinia(pinia);
workflowsStore = mockedStore(useWorkflowsStore);
logsStore = mockedStore(useLogsStore);
nodeTypeStore = mockedStore(useNodeTypesStore);
// Setup default mocks
workflowsStore.getCurrentWorkflow.mockReturnValue(
createTestWorkflowObject({
nodes: mockNodes,
connections: mockConnections,
}),
);
workflowsStore.getNodeByName.mockImplementation((name) => {
const matchedNode = mockNodes.find((node) => node.name === name) ?? null;
return matchedNode;
});
logsStore.isOpen = true;
workflowsStore.getWorkflowExecution = mockWorkflowExecution as unknown as IExecutionResponse;
workflowsStore.getPastChatMessages = ['Previous message 1', 'Previous message 2'];
logsStore.state = LOGS_PANEL_STATE.ATTACHED;
nodeTypeStore.getNodeType = vi.fn().mockImplementation((nodeTypeName) => {
return mockNodeTypes.find((node) => node.name === nodeTypeName) ?? null;
});
workflowsStore.runWorkflow.mockResolvedValue({ executionId: 'test-execution-issd' });
});
afterEach(() => {
vi.clearAllMocks();
});
describe('rendering', () => {
it('should render chat when panel is open', () => {
const { getByTestId } = renderComponent();
expect(getByTestId('canvas-chat')).toBeInTheDocument();
});
it('should not render chat when panel is closed', async () => {
logsStore.state = LOGS_PANEL_STATE.CLOSED;
const { queryByTestId } = renderComponent();
await waitFor(() => {
expect(queryByTestId('canvas-chat')).not.toBeInTheDocument();
});
});
it('should show correct input placeholder', async () => {
const { findByTestId } = renderComponent();
expect(await findByTestId('chat-input')).toBeInTheDocument();
});
});
describe('message handling', () => {
beforeEach(() => {
vi.spyOn(chatEventBus, 'emit');
workflowsStore.runWorkflow.mockResolvedValue({ executionId: 'test-execution-id' });
});
it('should send message and show response', async () => {
const { findByTestId, findByText } = renderComponent();
// Send message
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Hello AI!');
await userEvent.keyboard('{Enter}');
// Verify message and response
expect(await findByText('Hello AI!')).toBeInTheDocument();
await waitFor(async () => {
workflowsStore.getWorkflowExecution = {
...(mockWorkflowExecution as unknown as IExecutionResponse),
status: 'success',
};
expect(await findByText('AI response message')).toBeInTheDocument();
});
// Verify workflow execution
expect(workflowsStore.runWorkflow).toHaveBeenCalledWith(
expect.objectContaining({
runData: undefined,
triggerToStartFrom: {
name: 'When chat message received',
data: {
data: {
main: [
[
{
json: {
action: 'sendMessage',
chatInput: 'Hello AI!',
sessionId: expect.any(String),
},
},
],
],
},
executionIndex: 0,
executionStatus: 'success',
executionTime: 0,
source: [null],
startTime: expect.any(Number),
},
},
}),
);
});
it('should show loading state during message processing', async () => {
const { findByTestId, queryByTestId } = renderComponent();
// Send message
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Test message');
// Since runWorkflow resolve is mocked, the isWorkflowRunning will be false from the first run.
// This means that the loading state never gets a chance to appear.
// We're forcing isWorkflowRunning to be true for the first run.
workflowsStore.isWorkflowRunning = true;
await userEvent.keyboard('{Enter}');
await waitFor(() => expect(queryByTestId('chat-message-typing')).toBeInTheDocument());
workflowsStore.isWorkflowRunning = false;
workflowsStore.getWorkflowExecution = {
...(mockWorkflowExecution as unknown as IExecutionResponse),
status: 'success',
};
await waitFor(() => expect(queryByTestId('chat-message-typing')).not.toBeInTheDocument());
});
it('should handle workflow execution errors', async () => {
workflowsStore.runWorkflow.mockRejectedValueOnce(new Error());
const { findByTestId } = renderComponent();
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Hello AI!');
await userEvent.keyboard('{Enter}');
const toast = useToast();
expect(toast.showError).toHaveBeenCalledWith(new Error(), 'Problem running workflow');
});
});
describe('session management', () => {
const mockMessages: ChatMessage[] = [
{
id: '1',
text: 'Existing message',
sender: 'user',
},
];
beforeEach(() => {
vi.spyOn(useChatMessaging, 'useChatMessaging').mockImplementation(({ messages }) => {
messages.value.push(...mockMessages);
return {
sendMessage: vi.fn(),
previousMessageIndex: ref(0),
isLoading: computed(() => false),
};
});
});
it('should allow copying session ID', async () => {
const clipboardSpy = vi.fn();
document.execCommand = clipboardSpy;
const { getByTestId } = renderComponent();
await userEvent.click(getByTestId('chat-session-id'));
const toast = useToast();
expect(clipboardSpy).toHaveBeenCalledWith('copy');
expect(toast.showMessage).toHaveBeenCalledWith({
message: '',
title: 'Copied to clipboard',
type: 'success',
});
});
it('should refresh session when messages exist', async () => {
const { getByTestId } = renderComponent();
const originalSessionId = getByTestId('chat-session-id').textContent;
await userEvent.click(getByTestId('refresh-session-button'));
expect(getByTestId('chat-session-id').textContent).not.toEqual(originalSessionId);
});
});
describe('resize functionality', () => {
it('should handle panel resizing', async () => {
const { container } = renderComponent();
const resizeWrapper = container.querySelector('.resizeWrapper');
if (!resizeWrapper) throw new Error('Resize wrapper not found');
await userEvent.pointer([
{ target: resizeWrapper, coords: { clientX: 0, clientY: 0 } },
{ coords: { clientX: 0, clientY: 100 } },
]);
expect(logsStore.setHeight).toHaveBeenCalled();
});
it('should persist resize dimensions', () => {
const mockStorage = {
getItem: vi.fn(),
setItem: vi.fn(),
};
Object.defineProperty(window, 'localStorage', { value: mockStorage });
renderComponent();
expect(mockStorage.getItem).toHaveBeenCalledWith('N8N_CANVAS_CHAT_HEIGHT');
expect(mockStorage.getItem).toHaveBeenCalledWith('N8N_CANVAS_CHAT_WIDTH');
});
});
describe('file handling', () => {
beforeEach(() => {
vi.spyOn(useChatMessaging, 'useChatMessaging').mockReturnValue({
sendMessage: vi.fn(),
previousMessageIndex: ref(0),
isLoading: computed(() => false),
});
logsStore.state = LOGS_PANEL_STATE.ATTACHED;
workflowsStore.allowFileUploads = true;
});
it('should enable file uploads when allowed by chat trigger node', async () => {
const allowFileUploads = ref(true);
const original = useChatTrigger.useChatTrigger;
vi.spyOn(useChatTrigger, 'useChatTrigger').mockImplementation((...args) => ({
...original(...args),
allowFileUploads: computed(() => allowFileUploads.value),
}));
const { getByTestId } = renderComponent();
const chatPanel = getByTestId('canvas-chat');
expect(chatPanel).toBeInTheDocument();
const fileInput = getByTestId('chat-attach-file-button');
expect(fileInput).toBeInTheDocument();
allowFileUploads.value = false;
await waitFor(() => {
expect(fileInput).not.toBeInTheDocument();
});
});
});
describe('message history handling', () => {
it('should properly navigate through message history with wrap-around', async () => {
const messages = ['Message 1', 'Message 2', 'Message 3'];
workflowsStore.getPastChatMessages = messages;
const { findByTestId } = renderComponent();
const input = await findByTestId('chat-input');
// First up should show most recent message
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 3');
// Second up should show second most recent
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 2');
// Third up should show oldest message
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 1');
// Fourth up should wrap around to most recent
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 3');
// Down arrow should go in reverse
await userEvent.keyboard('{ArrowDown}');
expect(input).toHaveValue('Message 1');
});
it('should reset message history navigation on new input', async () => {
workflowsStore.getPastChatMessages = ['Message 1', 'Message 2'];
const { findByTestId } = renderComponent();
const input = await findByTestId('chat-input');
// Navigate to oldest message
await userEvent.keyboard('{ArrowUp}'); // Most recent
await userEvent.keyboard('{ArrowUp}'); // Oldest
expect(input).toHaveValue('Message 1');
await userEvent.type(input, 'New message');
await userEvent.keyboard('{Enter}');
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 2');
});
});
describe('message reuse and repost', () => {
const sendMessageSpy = vi.fn();
beforeEach(() => {
const mockMessages: ChatMessage[] = [
{
id: '1',
text: 'Original message',
sender: 'user',
},
{
id: '2',
text: 'AI response',
sender: 'bot',
},
];
vi.spyOn(useChatMessaging, 'useChatMessaging').mockImplementation(({ messages }) => {
messages.value.push(...mockMessages);
return {
sendMessage: sendMessageSpy,
previousMessageIndex: ref(0),
isLoading: computed(() => false),
};
});
workflowsStore.messages = mockMessages;
});
it('should repost user message with new execution', async () => {
const { findByTestId } = renderComponent();
const repostButton = await findByTestId('repost-message-button');
await userEvent.click(repostButton);
expect(sendMessageSpy).toHaveBeenCalledWith('Original message');
expect.objectContaining({
runData: expect.objectContaining({
'When chat message received': expect.arrayContaining([
expect.objectContaining({
data: expect.objectContaining({
main: expect.arrayContaining([
expect.arrayContaining([
expect.objectContaining({
json: expect.objectContaining({
chatInput: 'Original message',
}),
}),
]),
]),
}),
}),
]),
}),
});
});
it('should show message options only for appropriate messages', async () => {
const { findByText, container } = renderComponent();
await findByText('Original message');
const userMessage = container.querySelector('.chat-message-from-user');
expect(
userMessage?.querySelector('[data-test-id="repost-message-button"]'),
).toBeInTheDocument();
expect(
userMessage?.querySelector('[data-test-id="reuse-message-button"]'),
).toBeInTheDocument();
await findByText('AI response');
const botMessage = container.querySelector('.chat-message-from-bot');
expect(
botMessage?.querySelector('[data-test-id="repost-message-button"]'),
).not.toBeInTheDocument();
expect(
botMessage?.querySelector('[data-test-id="reuse-message-button"]'),
).not.toBeInTheDocument();
});
});
describe('panel state synchronization', () => {
it('should update canvas height when chat or logs panel state changes', async () => {
renderComponent();
// Toggle logs panel
logsStore.isOpen = true;
await waitFor(() => {
expect(logsStore.setHeight).toHaveBeenCalled();
});
// Close chat panel
logsStore.state = LOGS_PANEL_STATE.CLOSED;
await waitFor(() => {
expect(logsStore.setHeight).toHaveBeenCalledWith(0);
});
});
it('should preserve panel state across component remounts', async () => {
const { unmount, rerender } = renderComponent();
// Set initial state
logsStore.state = LOGS_PANEL_STATE.ATTACHED;
logsStore.isOpen = true;
// Unmount and remount
unmount();
await rerender({});
expect(logsStore.state).toBe(LOGS_PANEL_STATE.ATTACHED);
expect(logsStore.isOpen).toBe(true);
});
});
describe('keyboard shortcuts', () => {
it('should handle Enter key with modifier to start new line', async () => {
const { findByTestId } = renderComponent();
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Line 1');
await userEvent.keyboard('{Shift>}{Enter}{/Shift}');
await userEvent.type(input, 'Line 2');
expect(input).toHaveValue('Line 1\nLine 2');
});
});
});

View File

@@ -1,251 +0,0 @@
<script setup lang="ts">
import { computed, ref, watchEffect, useTemplateRef, watch } from 'vue';
// Components
import ChatMessagesPanel from './components/ChatMessagesPanel.vue';
import ChatLogsPanel from './components/ChatLogsPanel.vue';
// Composables
import { useResize } from './composables/useResize';
// Types
import { useWorkflowsStore } from '@/stores/workflows.store';
import { usePiPWindow } from '@/components/CanvasChat/composables/usePiPWindow';
import { N8nResizeWrapper } from '@n8n/design-system';
import { useTelemetry } from '@/composables/useTelemetry';
import { useChatState } from '@/components/CanvasChat/composables/useChatState';
import { LOGS_PANEL_STATE } from '@/components/CanvasChat/types/logs';
import { useLogsStore } from '@/stores/logs.store';
const workflowsStore = useWorkflowsStore();
const logsStore = useLogsStore();
// Component state
const container = ref<HTMLElement>();
const pipContainer = useTemplateRef('pipContainer');
const pipContent = useTemplateRef('pipContent');
// Computed properties
const workflow = computed(() => workflowsStore.getCurrentWorkflow());
const chatPanelState = computed(() => logsStore.state);
const resultData = computed(() => workflowsStore.getWorkflowRunData);
const telemetry = useTelemetry();
const {
height,
chatWidth,
rootStyles,
logsWidth,
onResizeDebounced,
onResizeChatDebounced,
onWindowResize,
} = useResize(container);
const { canPopOut, isPoppedOut, pipWindow } = usePiPWindow({
initialHeight: 400,
initialWidth: window.document.body.offsetWidth * 0.8,
container: pipContainer,
content: pipContent,
shouldPopOut: computed(() => chatPanelState.value === LOGS_PANEL_STATE.FLOATING),
onRequestClose: () => {
if (chatPanelState.value === LOGS_PANEL_STATE.CLOSED) {
return;
}
telemetry.track('User toggled log view', { new_state: 'attached' });
logsStore.setPreferPoppedOut(false);
},
});
const {
currentSessionId,
messages,
chatTriggerNode,
connectedNode,
previousChatMessages,
sendMessage,
refreshSession,
displayExecution,
} = useChatState(false);
// Expose internal state for testing
defineExpose({
messages,
currentSessionId,
workflow,
});
const closePanel = () => {
logsStore.toggleOpen(false);
};
function onPopOut() {
telemetry.track('User toggled log view', { new_state: 'floating' });
logsStore.toggleOpen(true);
logsStore.setPreferPoppedOut(true);
}
// Watchers
watchEffect(() => {
logsStore.setHeight(chatPanelState.value === LOGS_PANEL_STATE.ATTACHED ? height.value : 0);
});
watch(
chatPanelState,
(state) => {
if (state !== LOGS_PANEL_STATE.CLOSED) {
setTimeout(() => {
onWindowResize?.();
}, 0);
}
},
{ immediate: true },
);
</script>
<template>
<div ref="pipContainer">
<div ref="pipContent" :class="$style.pipContent">
<N8nResizeWrapper
v-if="chatTriggerNode"
:is-resizing-enabled="!isPoppedOut && chatPanelState === LOGS_PANEL_STATE.ATTACHED"
:supported-directions="['top']"
:class="[$style.resizeWrapper, chatPanelState === LOGS_PANEL_STATE.CLOSED && $style.empty]"
:height="height"
:style="rootStyles"
@resize="onResizeDebounced"
>
<div ref="container" :class="[$style.container, 'ignore-key-press-canvas']" tabindex="0">
<div v-if="chatPanelState !== LOGS_PANEL_STATE.CLOSED" :class="$style.chatResizer">
<N8nResizeWrapper
:supported-directions="['right']"
:width="chatWidth"
:class="$style.chat"
:window="pipWindow"
@resize="onResizeChatDebounced"
>
<div :class="$style.inner">
<ChatMessagesPanel
data-test-id="canvas-chat"
:messages="messages"
:session-id="currentSessionId"
:past-chat-messages="previousChatMessages"
:show-close-button="!isPoppedOut && !connectedNode"
@close="closePanel"
@refresh-session="refreshSession"
@display-execution="displayExecution"
@send-message="sendMessage"
/>
</div>
</N8nResizeWrapper>
<div v-if="connectedNode" :class="$style.logs">
<ChatLogsPanel
:key="`${resultData?.length ?? messages?.length}`"
:workflow="workflow"
data-test-id="canvas-chat-logs"
:node="connectedNode"
:slim="logsWidth < 700"
>
<template #actions>
<n8n-icon-button
v-if="canPopOut && !isPoppedOut"
icon="pop-out"
type="secondary"
size="medium"
@click="onPopOut"
/>
<n8n-icon-button
v-if="!isPoppedOut"
outline
icon="times"
type="secondary"
size="medium"
@click="closePanel"
/>
</template>
</ChatLogsPanel>
</div>
</div>
</div>
</N8nResizeWrapper>
</div>
</div>
</template>
<style lang="scss" module>
@media all and (display-mode: picture-in-picture) {
.resizeWrapper {
height: 100% !important;
max-height: 100vh !important;
}
}
.pipContent {
height: 100%;
}
.resizeWrapper {
height: var(--panel-height);
min-height: 4rem;
max-height: 90vh;
flex-basis: content;
border-top: 1px solid var(--color-foreground-base);
&.empty {
height: auto;
min-height: 0;
flex-basis: 0;
}
}
.container {
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
overflow: hidden;
}
.chatResizer {
display: flex;
width: 100%;
height: 100%;
max-width: 100%;
}
.footer {
border-top: 1px solid var(--color-foreground-base);
width: 100%;
background-color: var(--color-background-light);
display: flex;
padding: var(--spacing-2xs);
gap: var(--spacing-2xs);
}
.chat {
width: var(--chat-width);
flex-shrink: 0;
border-right: 1px solid var(--color-foreground-base);
max-width: 100%;
&:only-child {
width: 100%;
}
}
.inner {
display: flex;
flex-direction: column;
overflow: hidden;
height: 100%;
width: 100%;
}
.logs {
flex-grow: 1;
flex-shrink: 1;
background-color: var(--color-background-light);
}
</style>

View File

@@ -1,11 +0,0 @@
<script setup lang="ts">
import LogsPanel from '@/components/CanvasChat/future/LogsPanel.vue';
import { useSettingsStore } from '@/stores/settings.store';
const { isNewLogsEnabled } = useSettingsStore();
</script>
<template>
<LogsPanel v-if="isNewLogsEnabled" />
<CanvasChat v-else />
</template>

View File

@@ -1,89 +0,0 @@
<script setup lang="ts">
import type { INode, Workflow } from 'n8n-workflow';
import RunDataAi from '@/components/RunDataAi/RunDataAi.vue';
import { useI18n } from '@n8n/i18n';
defineProps<{
node: INode | null;
slim?: boolean;
workflow: Workflow;
}>();
defineSlots<{ actions: {} }>();
const locale = useI18n();
</script>
<template>
<div :class="$style.logsWrapper" data-test-id="lm-chat-logs">
<header :class="$style.logsHeader">
<div class="meta">
{{ locale.baseText('chat.window.logs') }}
<span v-if="node">
{{
locale.baseText('chat.window.logsFromNode', { interpolate: { nodeName: node.name } })
}}
</span>
</div>
<div :class="$style.actions">
<slot name="actions"></slot>
</div>
</header>
<div :class="$style.logs">
<RunDataAi
v-if="node"
:class="$style.runData"
:node="node"
:workflow="workflow"
:slim="slim"
/>
</div>
</div>
</template>
<style lang="scss" module>
.logsHeader {
font-size: var(--font-size-s);
font-weight: var(--font-weight-bold);
height: 2.6875rem;
line-height: 18px;
text-align: left;
border-bottom: 1px solid var(--color-foreground-base);
padding: var(--spacing-xs);
background-color: var(--color-foreground-xlight);
display: flex;
justify-content: space-between;
align-items: center;
span {
font-weight: var(--font-weight-regular);
}
}
.logsWrapper {
--node-icon-color: var(--color-text-base);
height: 100%;
overflow: hidden;
width: 100%;
display: flex;
flex-direction: column;
}
.logsTitle {
margin: 0 var(--spacing-s) var(--spacing-s);
}
.logs {
padding: var(--spacing-s) 0;
flex-grow: 1;
overflow: auto;
}
.actions {
display: flex;
align-items: center;
button {
border: none;
}
}
</style>

View File

@@ -1,115 +0,0 @@
import type { ComputedRef } from 'vue';
import { computed } from 'vue';
import {
CHAIN_SUMMARIZATION_LANGCHAIN_NODE_TYPE,
NodeConnectionTypes,
NodeHelpers,
} from 'n8n-workflow';
import type { INodeTypeDescription, Workflow, INodeParameters } from 'n8n-workflow';
import {
AI_CATEGORY_AGENTS,
AI_CATEGORY_CHAINS,
AI_CODE_NODE_TYPE,
AI_SUBCATEGORY,
} from '@/constants';
import type { INodeUi } from '@/Interface';
import { isChatNode } from '@/components/CanvasChat/utils';
export interface ChatTriggerDependencies {
getNodeByName: (name: string) => INodeUi | null;
getNodeType: (type: string, version: number) => INodeTypeDescription | null;
workflow: ComputedRef<Workflow>;
}
export function useChatTrigger({ getNodeByName, getNodeType, workflow }: ChatTriggerDependencies) {
const chatTriggerNode = computed(
() => Object.values(workflow.value.nodes).find(isChatNode) ?? null,
);
const allowFileUploads = computed(() => {
return (
(chatTriggerNode.value?.parameters?.options as INodeParameters)?.allowFileUploads === true
);
});
const allowedFilesMimeTypes = computed(() => {
return (
(
chatTriggerNode.value?.parameters?.options as INodeParameters
)?.allowedFilesMimeTypes?.toString() ?? ''
);
});
/** Sets the connected node after finding the trigger */
const connectedNode = computed(() => {
const triggerNode = chatTriggerNode.value;
if (!triggerNode) {
return null;
}
const chatChildren = workflow.value.getChildNodes(triggerNode.name);
const chatRootNode = chatChildren
.reverse()
.map((nodeName: string) => getNodeByName(nodeName))
.filter((n): n is INodeUi => n !== null)
// Reverse the nodes to match the last node logs first
.reverse()
.find((storeNode: INodeUi): boolean => {
// Skip summarization nodes
if (storeNode.type === CHAIN_SUMMARIZATION_LANGCHAIN_NODE_TYPE) return false;
const nodeType = getNodeType(storeNode.type, storeNode.typeVersion);
if (!nodeType) return false;
// Check if node is an AI agent or chain based on its metadata
const isAgent =
nodeType.codex?.subcategories?.[AI_SUBCATEGORY]?.includes(AI_CATEGORY_AGENTS);
const isChain =
nodeType.codex?.subcategories?.[AI_SUBCATEGORY]?.includes(AI_CATEGORY_CHAINS);
// Handle custom AI Langchain Code nodes that could act as chains or agents
let isCustomChainOrAgent = false;
if (nodeType.name === AI_CODE_NODE_TYPE) {
// Get node connection types for inputs and outputs
const inputs = NodeHelpers.getNodeInputs(workflow.value, storeNode, nodeType);
const inputTypes = NodeHelpers.getConnectionTypes(inputs);
const outputs = NodeHelpers.getNodeOutputs(workflow.value, storeNode, nodeType);
const outputTypes = NodeHelpers.getConnectionTypes(outputs);
// Validate if node has required AI connection types
if (
inputTypes.includes(NodeConnectionTypes.AiLanguageModel) &&
inputTypes.includes(NodeConnectionTypes.Main) &&
outputTypes.includes(NodeConnectionTypes.Main)
) {
isCustomChainOrAgent = true;
}
}
// Skip if node is not an AI component
if (!isAgent && !isChain && !isCustomChainOrAgent) return false;
// Check if this node is connected to the trigger node
const parentNodes = workflow.value.getParentNodes(storeNode.name);
const isChatChild = parentNodes.some(
(parentNodeName) => parentNodeName === triggerNode.name,
);
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
const result = Boolean(isChatChild && (isAgent || isChain || isCustomChainOrAgent));
return result;
});
return chatRootNode ?? null;
});
return {
allowFileUploads,
allowedFilesMimeTypes,
chatTriggerNode,
connectedNode,
};
}

View File

@@ -1,138 +0,0 @@
import type { Ref } from 'vue';
import { ref, computed, onMounted, onBeforeUnmount, watchEffect } from 'vue';
import { useDebounce } from '@/composables/useDebounce';
import type { IChatResizeStyles } from '../types/chat';
import { useStorage } from '@/composables/useStorage';
import { type ResizeData } from '@n8n/design-system';
export const LOCAL_STORAGE_PANEL_HEIGHT = 'N8N_CANVAS_CHAT_HEIGHT';
export const LOCAL_STORAGE_PANEL_WIDTH = 'N8N_CANVAS_CHAT_WIDTH';
export const LOCAL_STORAGE_OVERVIEW_PANEL_WIDTH = 'N8N_LOGS_OVERVIEW_PANEL_WIDTH';
// Percentage of container width for chat panel constraints
const MAX_WIDTH_PERCENTAGE = 0.8;
const MIN_WIDTH_PERCENTAGE = 0.3;
// Percentage of window height for panel constraints
const MIN_HEIGHT_PERCENTAGE = 0.3;
const MAX_HEIGHT_PERCENTAGE = 0.75;
export function useResize(container: Ref<HTMLElement | undefined>) {
const storage = {
height: useStorage(LOCAL_STORAGE_PANEL_HEIGHT),
width: useStorage(LOCAL_STORAGE_PANEL_WIDTH),
};
const dimensions = {
container: ref(0), // Container width
minHeight: ref(0),
maxHeight: ref(0),
chat: ref(0), // Chat panel width
logs: ref(0),
height: ref(0),
};
/** Computed styles for root element based on current dimensions */
const rootStyles = computed<IChatResizeStyles>(() => ({
'--panel-height': `${dimensions.height.value}px`,
'--chat-width': `${dimensions.chat.value}px`,
}));
const panelToContainerRatio = computed(() => {
const chatRatio = dimensions.chat.value / dimensions.container.value;
const containerRatio = dimensions.container.value / window.screen.width;
return {
chat: chatRatio.toFixed(2),
logs: (1 - chatRatio).toFixed(2),
container: containerRatio.toFixed(2),
};
});
/**
* Constrains height to min/max bounds and updates panel height
*/
function onResize(newHeight: number) {
const { minHeight, maxHeight } = dimensions;
dimensions.height.value = Math.min(Math.max(newHeight, minHeight.value), maxHeight.value);
}
function onResizeDebounced(data: ResizeData) {
void useDebounce().callDebounced(onResize, { debounceTime: 10, trailing: true }, data.height);
}
/**
* Constrains chat width to min/max percentage of container width
*/
function onResizeChat(width: number) {
const containerWidth = dimensions.container.value;
const maxWidth = containerWidth * MAX_WIDTH_PERCENTAGE;
const minWidth = containerWidth * MIN_WIDTH_PERCENTAGE;
dimensions.chat.value = Math.min(Math.max(width, minWidth), maxWidth);
dimensions.logs.value = dimensions.container.value - dimensions.chat.value;
}
function onResizeChatDebounced(data: ResizeData) {
void useDebounce().callDebounced(
onResizeChat,
{ debounceTime: 10, trailing: true },
data.width,
);
}
/**
* Initializes dimensions from localStorage if available
*/
function restorePersistedDimensions() {
const persistedHeight = parseInt(storage.height.value ?? '0', 10);
const persistedWidth = parseInt(storage.width.value ?? '0', 10);
if (persistedHeight) onResize(persistedHeight);
if (persistedWidth) onResizeChat(persistedWidth);
}
/**
* Updates container width and height constraints on window resize
*/
function onWindowResize() {
if (!container.value) return;
// Update container width and adjust chat panel if needed
dimensions.container.value = container.value.getBoundingClientRect().width;
onResizeChat(dimensions.chat.value);
// Update height constraints and adjust panel height if needed
dimensions.minHeight.value = window.innerHeight * MIN_HEIGHT_PERCENTAGE;
dimensions.maxHeight.value = window.innerHeight * MAX_HEIGHT_PERCENTAGE;
onResize(dimensions.height.value);
}
// Persist dimensions to localStorage when they change
watchEffect(() => {
const { chat, height } = dimensions;
if (chat.value > 0) storage.width.value = chat.value.toString();
if (height.value > 0) storage.height.value = height.value.toString();
});
// Initialize dimensions when container is available
watchEffect(() => {
if (container.value) {
onWindowResize();
restorePersistedDimensions();
}
});
// Window resize handling
onMounted(() => window.addEventListener('resize', onWindowResize));
onBeforeUnmount(() => window.removeEventListener('resize', onWindowResize));
return {
height: dimensions.height,
chatWidth: dimensions.chat,
logsWidth: dimensions.logs,
rootStyles,
onWindowResize,
onResizeDebounced,
onResizeChatDebounced,
panelToContainerRatio,
};
}

View File

@@ -1,22 +0,0 @@
export interface LangChainMessage {
id: string[];
kwargs: {
content: string;
};
}
export interface MemoryOutput {
action: string;
chatHistory?: LangChainMessage[];
}
export interface IChatMessageResponse {
executionId?: string;
success: boolean;
error?: Error;
}
export interface IChatResizeStyles {
'--panel-height': string;
'--chat-width': string;
}

View File

@@ -1,21 +0,0 @@
export type LogEntrySelection =
| { type: 'initial' }
| { type: 'selected'; id: string }
| { type: 'none' };
export const LOGS_PANEL_STATE = {
CLOSED: 'closed',
ATTACHED: 'attached',
FLOATING: 'floating',
} as const;
export type LogsPanelState = (typeof LOGS_PANEL_STATE)[keyof typeof LOGS_PANEL_STATE];
export const LOG_DETAILS_PANEL_STATE = {
INPUT: 'input',
OUTPUT: 'output',
BOTH: 'both',
} as const;
export type LogDetailsPanelState =
(typeof LOG_DETAILS_PANEL_STATE)[keyof typeof LOG_DETAILS_PANEL_STATE];

View File

@@ -1,48 +0,0 @@
import { createTestNode, createTestTaskData, createTestWorkflow } from '@/__tests__/mocks';
import { restoreChatHistory } from '@/components/CanvasChat/utils';
import { AGENT_NODE_TYPE, CHAT_TRIGGER_NODE_TYPE } from '@/constants';
import { NodeConnectionTypes } from 'n8n-workflow';
describe(restoreChatHistory, () => {
it('should return extracted chat input and bot message from workflow execution data', () => {
expect(
restoreChatHistory({
id: 'test-exec-id',
workflowData: createTestWorkflow({
nodes: [
createTestNode({ name: 'A', type: CHAT_TRIGGER_NODE_TYPE }),
createTestNode({ name: 'B', type: AGENT_NODE_TYPE }),
],
}),
data: {
resultData: {
lastNodeExecuted: 'B',
runData: {
A: [
createTestTaskData({
startTime: Date.parse('2025-04-20T00:00:01.000Z'),
data: { [NodeConnectionTypes.Main]: [[{ json: { chatInput: 'test input' } }]] },
}),
],
B: [
createTestTaskData({
startTime: Date.parse('2025-04-20T00:00:02.000Z'),
executionTime: 999,
data: { [NodeConnectionTypes.Main]: [[{ json: { output: 'test output' } }]] },
}),
],
},
},
},
finished: true,
mode: 'manual',
status: 'success',
startedAt: '2025-04-20T00:00:00.000Z',
createdAt: '2025-04-20T00:00:00.000Z',
}),
).toEqual([
{ id: expect.any(String), sender: 'user', text: 'test input' },
{ id: 'test-exec-id', sender: 'bot', text: 'test output' },
]);
});
});

View File

@@ -1,123 +0,0 @@
import { CHAT_TRIGGER_NODE_TYPE, MANUAL_CHAT_TRIGGER_NODE_TYPE } from '@/constants';
import { type IExecutionResponse, type INodeUi, type IWorkflowDb } from '@/Interface';
import { type ChatMessage } from '@n8n/chat/types';
import get from 'lodash/get';
import isEmpty from 'lodash/isEmpty';
import { NodeConnectionTypes, type IDataObject, type IRunExecutionData } from 'n8n-workflow';
import { v4 as uuid } from 'uuid';
export function isChatNode(node: INodeUi) {
return [CHAT_TRIGGER_NODE_TYPE, MANUAL_CHAT_TRIGGER_NODE_TYPE].includes(node.type);
}
export function getInputKey(node: INodeUi): string {
if (node.type === MANUAL_CHAT_TRIGGER_NODE_TYPE && node.typeVersion < 1.1) {
return 'input';
}
if (node.type === CHAT_TRIGGER_NODE_TYPE) {
return 'chatInput';
}
return 'chatInput';
}
function extractChatInput(
workflow: IWorkflowDb,
resultData: IRunExecutionData['resultData'],
): ChatMessage | undefined {
const chatTrigger = workflow.nodes.find(isChatNode);
if (chatTrigger === undefined) {
return undefined;
}
const inputKey = getInputKey(chatTrigger);
const runData = (resultData.runData[chatTrigger.name] ?? [])[0];
const message = runData?.data?.[NodeConnectionTypes.Main]?.[0]?.[0]?.json?.[inputKey];
if (runData === undefined || typeof message !== 'string') {
return undefined;
}
return {
text: message,
sender: 'user',
id: uuid(),
};
}
export function extractBotResponse(
resultData: IRunExecutionData['resultData'],
executionId: string,
emptyText?: string,
): ChatMessage | undefined {
const lastNodeExecuted = resultData.lastNodeExecuted;
if (!lastNodeExecuted) return undefined;
const nodeResponseDataArray = get(resultData.runData, lastNodeExecuted) ?? [];
const nodeResponseData = nodeResponseDataArray[nodeResponseDataArray.length - 1];
let responseMessage: string;
if (get(nodeResponseData, 'error')) {
responseMessage = '[ERROR: ' + get(nodeResponseData, 'error.message') + ']';
} else {
const responseData = get(nodeResponseData, 'data.main[0][0].json');
const text = extractResponseText(responseData) ?? emptyText;
if (!text) {
return undefined;
}
responseMessage = text;
}
return {
text: responseMessage,
sender: 'bot',
id: executionId ?? uuid(),
};
}
/** Extracts response message from workflow output */
function extractResponseText(responseData?: IDataObject): string | undefined {
if (!responseData || isEmpty(responseData)) {
return undefined;
}
// Paths where the response message might be located
const paths = ['output', 'text', 'response.text'];
const matchedPath = paths.find((path) => get(responseData, path));
if (!matchedPath) return JSON.stringify(responseData, null, 2);
const matchedOutput = get(responseData, matchedPath);
if (typeof matchedOutput === 'object') {
return '```json\n' + JSON.stringify(matchedOutput, null, 2) + '\n```';
}
return matchedOutput?.toString() ?? '';
}
export function restoreChatHistory(
workflowExecutionData: IExecutionResponse | null,
emptyText?: string,
): ChatMessage[] {
if (!workflowExecutionData?.data) {
return [];
}
const userMessage = extractChatInput(
workflowExecutionData.workflowData,
workflowExecutionData.data.resultData,
);
const botMessage = extractBotResponse(
workflowExecutionData.data.resultData,
workflowExecutionData.id,
emptyText,
);
return [...(userMessage ? [userMessage] : []), ...(botMessage ? [botMessage] : [])];
}

View File

@@ -1,7 +1,7 @@
<script setup lang="ts">
import { formatTokenUsageCount } from '@/components/RunDataAi/utils';
import { useI18n } from '@n8n/i18n';
import { type LlmTokenUsageData } from '@/Interface';
import { formatTokenUsageCount } from '@/utils/aiUtils';
import { N8nText } from '@n8n/design-system';
const { consumedTokens } = defineProps<{ consumedTokens: LlmTokenUsageData }>();

View File

@@ -7,9 +7,10 @@ import { computed } from 'vue';
import NodeIcon from '@/components/NodeIcon.vue';
import AiRunContentBlock from './AiRunContentBlock.vue';
import { useI18n } from '@n8n/i18n';
import { formatTokenUsageCount, getConsumedTokens } from '@/components/RunDataAi/utils';
import { getConsumedTokens } from '@/components/RunDataAi/utils';
import ConsumedTokensDetails from '@/components/ConsumedTokensDetails.vue';
import ViewSubExecution from '../ViewSubExecution.vue';
import { formatTokenUsageCount } from '@/utils/aiUtils';
interface RunMeta {
startTimeMs: number;

View File

@@ -1,21 +1,12 @@
import { type LlmTokenUsageData, type IAiDataContent } from '@/Interface';
import { addTokenUsageData, emptyTokenUsageData } from '@/utils/aiUtils';
import {
type LlmTokenUsageData,
type IAiDataContent,
type INodeUi,
type IExecutionResponse,
} from '@/Interface';
import {
AGENT_LANGCHAIN_NODE_TYPE,
type INodeExecutionData,
type ITaskData,
type ITaskDataConnections,
type NodeConnectionType,
type Workflow,
type ITaskStartedData,
type IRunExecutionData,
} from 'n8n-workflow';
import { type LogEntrySelection } from '../CanvasChat/types/logs';
import { isProxy, isReactive, isRef, toRaw } from 'vue';
export interface AIResult {
node: string;
@@ -193,22 +184,6 @@ export function getReferencedData(
return returnData;
}
const emptyTokenUsageData: LlmTokenUsageData = {
completionTokens: 0,
promptTokens: 0,
totalTokens: 0,
isEstimate: false,
};
function addTokenUsageData(one: LlmTokenUsageData, another: LlmTokenUsageData): LlmTokenUsageData {
return {
completionTokens: one.completionTokens + another.completionTokens,
promptTokens: one.promptTokens + another.promptTokens,
totalTokens: one.totalTokens + another.totalTokens,
isEstimate: one.isEstimate || another.isEstimate,
};
}
export function getConsumedTokens(outputRun: IAiDataContent | undefined): LlmTokenUsageData {
if (!outputRun?.data) {
return emptyTokenUsageData;
@@ -230,458 +205,3 @@ export function getConsumedTokens(outputRun: IAiDataContent | undefined): LlmTok
return tokenUsage;
}
export function formatTokenUsageCount(
usage: LlmTokenUsageData,
field: 'total' | 'prompt' | 'completion',
) {
const count =
field === 'total'
? usage.totalTokens
: field === 'completion'
? usage.completionTokens
: usage.promptTokens;
return usage.isEstimate ? `~${count}` : count.toLocaleString();
}
export interface LogEntry {
parent?: LogEntry;
node: INodeUi;
id: string;
children: LogEntry[];
depth: number;
runIndex: number;
runData: ITaskData;
consumedTokens: LlmTokenUsageData;
workflow: Workflow;
executionId: string;
execution: IRunExecutionData;
}
export interface LogTreeCreationContext {
parent: LogEntry | undefined;
depth: number;
workflow: Workflow;
executionId: string;
data: IRunExecutionData;
workflows: Record<string, Workflow>;
subWorkflowData: Record<string, IRunExecutionData>;
}
export interface LatestNodeInfo {
disabled: boolean;
deleted: boolean;
name: string;
}
function getConsumedTokensV2(task: ITaskData): LlmTokenUsageData {
if (!task.data) {
return emptyTokenUsageData;
}
const tokenUsage = Object.values(task.data)
.flat()
.flat()
.reduce<LlmTokenUsageData>((acc, curr) => {
const tokenUsageData = curr?.json?.tokenUsage ?? curr?.json?.tokenUsageEstimate;
if (!tokenUsageData) return acc;
return addTokenUsageData(acc, {
...(tokenUsageData as Omit<LlmTokenUsageData, 'isEstimate'>),
isEstimate: !!curr?.json.tokenUsageEstimate,
});
}, emptyTokenUsageData);
return tokenUsage;
}
function createNodeV2(
node: INodeUi,
context: LogTreeCreationContext,
runIndex: number,
runData: ITaskData,
children: LogEntry[] = [],
): LogEntry {
return {
parent: context.parent,
node,
id: `${context.workflow.id}:${node.name}:${context.executionId}:${runIndex}`,
depth: context.depth,
runIndex,
runData,
children,
consumedTokens: getConsumedTokensV2(runData),
workflow: context.workflow,
executionId: context.executionId,
execution: context.data,
};
}
export function getTreeNodeDataV2(
nodeName: string,
runData: ITaskData,
runIndex: number | undefined,
context: LogTreeCreationContext,
): LogEntry[] {
const node = context.workflow.getNode(nodeName);
return node ? getTreeNodeDataRecV2(node, runData, context, runIndex) : [];
}
function getChildNodes(
treeNode: LogEntry,
node: INodeUi,
runIndex: number | undefined,
context: LogTreeCreationContext,
) {
if (hasSubExecution(treeNode)) {
const workflowId = treeNode.runData.metadata?.subExecution?.workflowId;
const executionId = treeNode.runData.metadata?.subExecution?.executionId;
const workflow = workflowId ? context.workflows[workflowId] : undefined;
const subWorkflowRunData = executionId ? context.subWorkflowData[executionId] : undefined;
if (!workflow || !subWorkflowRunData || !executionId) {
return [];
}
return createLogTreeRec({
...context,
parent: treeNode,
depth: context.depth + 1,
workflow,
executionId,
data: subWorkflowRunData,
});
}
// Get the first level of children
const connectedSubNodes = context.workflow.getParentNodes(node.name, 'ALL_NON_MAIN', 1);
const isExecutionRoot =
treeNode.parent === undefined || treeNode.executionId !== treeNode.parent.executionId;
return connectedSubNodes.flatMap((subNodeName) =>
(context.data.resultData.runData[subNodeName] ?? []).flatMap((t, index) => {
// At root depth, filter out node executions that weren't triggered by this node
// This prevents showing duplicate executions when a sub-node is connected to multiple parents
// Only filter nodes that have source information with valid previousNode references
const isMatched =
isExecutionRoot && t.source.some((source) => source !== null)
? t.source.some(
(source) =>
source?.previousNode === node.name &&
(runIndex === undefined || source.previousNodeRun === runIndex),
)
: runIndex === undefined || index === runIndex;
if (!isMatched) {
return [];
}
const subNode = context.workflow.getNode(subNodeName);
return subNode
? getTreeNodeDataRecV2(
subNode,
t,
{ ...context, depth: context.depth + 1, parent: treeNode },
index,
)
: [];
}),
);
}
function getTreeNodeDataRecV2(
node: INodeUi,
runData: ITaskData,
context: LogTreeCreationContext,
runIndex: number | undefined,
): LogEntry[] {
const treeNode = createNodeV2(node, context, runIndex ?? 0, runData);
const children = getChildNodes(treeNode, node, runIndex, context).sort(sortLogEntries);
treeNode.children = children;
return [treeNode];
}
export function getTotalConsumedTokens(...usage: LlmTokenUsageData[]): LlmTokenUsageData {
return usage.reduce(addTokenUsageData, emptyTokenUsageData);
}
export function getSubtreeTotalConsumedTokens(
treeNode: LogEntry,
includeSubWorkflow: boolean,
): LlmTokenUsageData {
const executionId = treeNode.executionId;
function calculate(currentNode: LogEntry): LlmTokenUsageData {
if (!includeSubWorkflow && currentNode.executionId !== executionId) {
return emptyTokenUsageData;
}
return getTotalConsumedTokens(
currentNode.consumedTokens,
...currentNode.children.map(calculate),
);
}
return calculate(treeNode);
}
function findLogEntryToAutoSelectRec(subTree: LogEntry[], depth: number): LogEntry | undefined {
for (const entry of subTree) {
if (entry.runData?.error) {
return entry;
}
const childAutoSelect = findLogEntryToAutoSelectRec(entry.children, depth + 1);
if (childAutoSelect) {
return childAutoSelect;
}
if (entry.node.type === AGENT_LANGCHAIN_NODE_TYPE) {
return entry;
}
}
return depth === 0 ? subTree[0] : undefined;
}
export function createLogTree(
workflow: Workflow,
response: IExecutionResponse,
workflows: Record<string, Workflow> = {},
subWorkflowData: Record<string, IRunExecutionData> = {},
) {
return createLogTreeRec({
parent: undefined,
depth: 0,
executionId: response.id,
workflow,
workflows,
data: response.data ?? { resultData: { runData: {} } },
subWorkflowData,
});
}
function createLogTreeRec(context: LogTreeCreationContext) {
const runs = Object.entries(context.data.resultData.runData)
.flatMap(([nodeName, taskData]) =>
context.workflow.getChildNodes(nodeName, 'ALL_NON_MAIN').length > 0 ||
context.workflow.getNode(nodeName)?.disabled
? [] // skip sub nodes and disabled nodes
: taskData.map((task, runIndex) => ({
nodeName,
runData: task,
runIndex,
nodeHasMultipleRuns: taskData.length > 1,
})),
)
.sort(sortLogEntries);
return runs.flatMap(({ nodeName, runIndex, runData, nodeHasMultipleRuns }) =>
getTreeNodeDataV2(nodeName, runData, nodeHasMultipleRuns ? runIndex : undefined, context),
);
}
export function findLogEntryRec(
isMatched: (entry: LogEntry) => boolean,
entries: LogEntry[],
): LogEntry | undefined {
for (const entry of entries) {
if (isMatched(entry)) {
return entry;
}
const child = findLogEntryRec(isMatched, entry.children);
if (child) {
return child;
}
}
return undefined;
}
export function findSelectedLogEntry(
selection: LogEntrySelection,
entries: LogEntry[],
): LogEntry | undefined {
switch (selection.type) {
case 'initial':
return findLogEntryToAutoSelectRec(entries, 0);
case 'none':
return undefined;
case 'selected': {
const entry = findLogEntryRec((e) => e.id === selection.id, entries);
if (entry) {
return entry;
}
return findLogEntryToAutoSelectRec(entries, 0);
}
}
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function deepToRaw<T>(sourceObj: T): T {
const seen = new WeakMap();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const objectIterator = (input: any): any => {
if (seen.has(input)) {
return input;
}
if (input !== null && typeof input === 'object') {
seen.set(input, true);
}
if (Array.isArray(input)) {
return input.map((item) => objectIterator(item));
}
if (isRef(input) || isReactive(input) || isProxy(input)) {
return objectIterator(toRaw(input));
}
if (
input !== null &&
typeof input === 'object' &&
Object.getPrototypeOf(input) === Object.prototype
) {
return Object.keys(input).reduce((acc, key) => {
acc[key as keyof typeof acc] = objectIterator(input[key]);
return acc;
}, {} as T);
}
return input;
};
return objectIterator(sourceObj);
}
export function flattenLogEntries(
entries: LogEntry[],
collapsedEntryIds: Record<string, boolean>,
ret: LogEntry[] = [],
): LogEntry[] {
for (const entry of entries) {
ret.push(entry);
if (!collapsedEntryIds[entry.id]) {
flattenLogEntries(entry.children, collapsedEntryIds, ret);
}
}
return ret;
}
export function getEntryAtRelativeIndex(
entries: LogEntry[],
id: string,
relativeIndex: number,
): LogEntry | undefined {
const offset = entries.findIndex((e) => e.id === id);
return offset === -1 ? undefined : entries[offset + relativeIndex];
}
function sortLogEntries<T extends { runData: ITaskData }>(a: T, b: T) {
// We rely on execution index only when startTime is different
// Because it is reset to 0 when execution is waited, and therefore not necessarily unique
if (a.runData.startTime === b.runData.startTime) {
return a.runData.executionIndex - b.runData.executionIndex;
}
return a.runData.startTime - b.runData.startTime;
}
export function mergeStartData(
startData: { [nodeName: string]: ITaskStartedData[] },
response: IExecutionResponse,
): IExecutionResponse {
if (!response.data) {
return response;
}
const nodeNames = [
...new Set(
Object.keys(startData).concat(Object.keys(response.data.resultData.runData)),
).values(),
];
const runData = Object.fromEntries(
nodeNames.map<[string, ITaskData[]]>((nodeName) => {
const tasks = response.data?.resultData.runData[nodeName] ?? [];
const mergedTasks = tasks.concat(
(startData[nodeName] ?? [])
.filter((task) =>
// To remove duplicate runs, we check start time in addition to execution index
// because nodes such as Wait and Form emits multiple websocket events with
// different execution index for a single run
tasks.every(
(t) => t.startTime < task.startTime && t.executionIndex !== task.executionIndex,
),
)
.map<ITaskData>((task) => ({
...task,
executionTime: 0,
executionStatus: 'running',
})),
);
return [nodeName, mergedTasks];
}),
);
return {
...response,
data: {
...response.data,
resultData: {
...response.data.resultData,
runData,
},
},
};
}
export function hasSubExecution(entry: LogEntry): boolean {
return !!entry.runData.metadata?.subExecution;
}
export function getDefaultCollapsedEntries(entries: LogEntry[]): Record<string, boolean> {
const ret: Record<string, boolean> = {};
function collect(children: LogEntry[]) {
for (const entry of children) {
if (hasSubExecution(entry) && entry.children.length === 0) {
ret[entry.id] = true;
}
collect(entry.children);
}
}
collect(entries);
return ret;
}
export function getDepth(entry: LogEntry): number {
let depth = 0;
let currentEntry = entry;
while (currentEntry.parent !== undefined) {
currentEntry = currentEntry.parent;
depth++;
}
return depth;
}

View File

@@ -2,10 +2,6 @@ import { ref } from 'vue';
import { useViewportAutoAdjust } from './useViewportAutoAdjust';
import { waitFor } from '@testing-library/vue';
vi.mock('@/stores/settings.store', () => ({
useSettingsStore: vi.fn(() => ({ isNewLogsEnabled: true })),
}));
describe(useViewportAutoAdjust, () => {
afterAll(() => {
vi.clearAllMocks();

View File

@@ -1,4 +1,3 @@
import { useSettingsStore } from '@/stores/settings.store';
import type { Rect, SetViewport, ViewportTransform } from '@vue-flow/core';
import { type Ref, ref, watch } from 'vue';
@@ -10,48 +9,44 @@ export function useViewportAutoAdjust(
viewport: Ref<ViewportTransform>,
setViewport: SetViewport,
) {
const settingsStore = useSettingsStore();
const canvasRect = ref<Rect>();
if (settingsStore.isNewLogsEnabled) {
const canvasRect = ref<Rect>();
watch(
viewportRef,
(vp, _, onCleanUp) => {
if (!vp) {
return;
}
const resizeObserver = new ResizeObserver((entries) => {
const entry = entries[0];
if (entry) {
canvasRect.value = entry.contentRect;
}
});
canvasRect.value = {
x: vp.offsetLeft,
y: vp.offsetTop,
width: vp.offsetWidth,
height: vp.offsetHeight,
};
resizeObserver.observe(vp);
onCleanUp(() => resizeObserver.disconnect());
},
{ immediate: true },
);
watch(canvasRect, async (newRect, oldRect) => {
if (!newRect || !oldRect) {
watch(
viewportRef,
(vp, _, onCleanUp) => {
if (!vp) {
return;
}
await setViewport({
x: viewport.value.x + (newRect.width - oldRect.width) / 2,
y: viewport.value.y + (newRect.height - oldRect.height) / 2,
zoom: viewport.value.zoom,
const resizeObserver = new ResizeObserver((entries) => {
const entry = entries[0];
if (entry) {
canvasRect.value = entry.contentRect;
}
});
canvasRect.value = {
x: vp.offsetLeft,
y: vp.offsetTop,
width: vp.offsetWidth,
height: vp.offsetHeight,
};
resizeObserver.observe(vp);
onCleanUp(() => resizeObserver.disconnect());
},
{ immediate: true },
);
watch(canvasRect, async (newRect, oldRect) => {
if (!newRect || !oldRect) {
return;
}
await setViewport({
x: viewport.value.x + (newRect.width - oldRect.width) / 2,
y: viewport.value.y + (newRect.height - oldRect.height) / 2,
zoom: viewport.value.zoom,
});
}
});
}

View File

@@ -6,7 +6,7 @@ import { type ActionDropdownItem, N8nActionDropdown, N8nButton, N8nText } from '
import { useI18n } from '@n8n/i18n';
import { type INodeTypeDescription } from 'n8n-workflow';
import { computed } from 'vue';
import { isChatNode } from '@/components/CanvasChat/utils';
import { isChatNode } from '@/utils/aiUtils';
const emit = defineEmits<{
mouseenter: [event: MouseEvent];

View File

@@ -108,8 +108,8 @@ import { isPresent } from '../utils/typesUtils';
import { useProjectsStore } from '@/stores/projects.store';
import type { CanvasLayoutEvent } from './useCanvasLayout';
import { chatEventBus } from '@n8n/chat/event-buses';
import { isChatNode } from '@/components/CanvasChat/utils';
import { useLogsStore } from '@/stores/logs.store';
import { isChatNode } from '@/utils/aiUtils';
import cloneDeep from 'lodash/cloneDeep';
type AddNodeData = Partial<INodeUi> & {

View File

@@ -4,7 +4,7 @@ import { computed, type MaybeRef, ref, unref, watch } from 'vue';
type GetSize = number | ((containerSize: number) => number);
interface UseResizerV2Options {
interface UseResizablePanelOptions {
/**
* Container element, to which relative size is calculated (doesn't necessarily have to be DOM parent node)
*/
@@ -52,7 +52,7 @@ export function useResizablePanel(
position = 'left',
allowCollapse,
allowFullSize,
}: UseResizerV2Options,
}: UseResizablePanelOptions,
) {
const containerSize = ref(0);
const persistedSize = useLocalStorage(localStorageKey, -1, { writeDefaults: false });

View File

@@ -4,10 +4,10 @@ import { h, defineComponent } from 'vue';
import { useToast } from './useToast';
import { mockedStore } from '@/__tests__/utils';
import { useSettingsStore } from '@/stores/settings.store';
import { useLogsStore } from '@/stores/logs.store';
import { useNDVStore } from '@/stores/ndv.store';
import { useUIStore } from '@/stores/ui.store';
import { EDITABLE_CANVAS_VIEWS, VIEWS } from '@/constants';
import { useLogsStore } from '@/stores/logs.store';
describe('useToast', () => {
let toast: ReturnType<typeof useToast>;

View File

@@ -13,7 +13,7 @@ import { useStyles } from './useStyles';
import { useSettingsStore } from '@/stores/settings.store';
import { useNDVStore } from '@/stores/ndv.store';
import { useLogsStore } from '@/stores/logs.store';
import { LOGS_PANEL_STATE } from '@/components/CanvasChat/types/logs';
import { LOGS_PANEL_STATE } from '@/features/logs/logs.constants';
export interface NotificationErrorWithNodeAndDescription extends ApplicationError {
node: {

View File

@@ -8,9 +8,8 @@ import { useNpsSurveyStore } from '@/stores/npsSurvey.store';
import { useWorkflowsStore } from '@/stores/workflows.store';
import type { IWorkflowDataUpdate } from '@/Interface';
import { mockedStore } from '@/__tests__/utils';
import { createTestNode, createTestWorkflow } from '@/__tests__/mocks';
import { createTestNode, createTestWorkflow, mockNodeTypeDescription } from '@/__tests__/mocks';
import { CHAT_TRIGGER_NODE_TYPE } from 'n8n-workflow';
import { nodeTypes } from '@/components/CanvasChat/__test__/data';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
const modalConfirmSpy = vi.fn();
@@ -82,7 +81,13 @@ describe('useWorkflowSaving', () => {
workflowsStore = mockedStore(useWorkflowsStore);
nodeTypesStore = mockedStore(useNodeTypesStore);
nodeTypesStore.setNodeTypes(nodeTypes);
nodeTypesStore.setNodeTypes([
mockNodeTypeDescription({
name: CHAT_TRIGGER_NODE_TYPE,
version: 1,
group: ['trigger'],
}),
]);
});
describe('promptSaveUnsavedWorkflowChanges', () => {

View File

@@ -1,5 +1,4 @@
import { createTestNode, createTestWorkflow, mockNodeTypeDescription } from '@/__tests__/mocks';
import type { LogTreeCreationContext } from '@/components/RunDataAi/utils';
import {
AGENT_NODE_TYPE,
AI_CATEGORY_AGENTS,
@@ -9,6 +8,7 @@ import {
} from '@/constants';
import { type IExecutionResponse } from '@/Interface';
import { WorkflowOperationError, type IRunData, type Workflow } from 'n8n-workflow';
import type { LogTreeCreationContext } from '../logs.types';
export function createTestLogTreeCreationContext(
workflow: Workflow,
@@ -87,6 +87,7 @@ export const aiChatExecutionResponse: IExecutionResponse = {
status: 'success',
data: {
resultData: {
lastNodeExecuted: 'AI Agent',
runData: {
'AI Agent': [
{
@@ -95,7 +96,9 @@ export const aiChatExecutionResponse: IExecutionResponse = {
executionIndex: 0,
executionTime: 1778,
source: [],
data: {},
data: {
main: [[{ json: { output: 'AI response message' } }]],
},
},
],
'AI Model': [

View File

@@ -0,0 +1,26 @@
import {
createTestNode,
createTestTaskData,
createTestWorkflowExecutionResponse,
createTestWorkflowObject,
} from '@/__tests__/mocks';
import type { LogEntry } from '../logs.types';
import { v4 as uuid } from 'uuid';
export function createTestLogEntry(data: Partial<LogEntry> = {}): LogEntry {
const executionId = data.executionId ?? 'test-execution-id';
return {
node: createTestNode(),
runIndex: 0,
runData: createTestTaskData({}),
id: uuid(),
children: [],
consumedTokens: { completionTokens: 0, totalTokens: 0, promptTokens: 0, isEstimate: false },
depth: 0,
workflow: createTestWorkflowObject(),
executionId,
execution: createTestWorkflowExecutionResponse({ id: executionId }).data!,
...data,
};
}

View File

@@ -7,12 +7,11 @@ import MessageOptionAction from './MessageOptionAction.vue';
import { chatEventBus } from '@n8n/chat/event-buses';
import type { ArrowKeyDownPayload } from '@n8n/chat/components/Input.vue';
import ChatInput from '@n8n/chat/components/Input.vue';
import { watch, computed, ref } from 'vue';
import { computed, ref } from 'vue';
import { useClipboard } from '@/composables/useClipboard';
import { useToast } from '@/composables/useToast';
import LogsPanelHeader from '@/components/CanvasChat/future/components/LogsPanelHeader.vue';
import LogsPanelHeader from '@/features/logs/components/LogsPanelHeader.vue';
import { N8nButton, N8nIconButton, N8nTooltip } from '@n8n/design-system';
import { useSettingsStore } from '@/stores/settings.store';
interface Props {
pastChatMessages: string[];
@@ -21,13 +20,11 @@ interface Props {
showCloseButton?: boolean;
isOpen?: boolean;
isReadOnly?: boolean;
isNewLogsEnabled?: boolean;
}
const props = withDefaults(defineProps<Props>(), {
isOpen: true,
isReadOnly: false,
isNewLogsEnabled: false,
});
const emit = defineEmits<{
@@ -41,7 +38,6 @@ const emit = defineEmits<{
const clipboard = useClipboard();
const locale = useI18n();
const toast = useToast();
const settingsStore = useSettingsStore();
const previousMessageIndex = ref(0);
@@ -138,18 +134,6 @@ async function copySessionId() {
type: 'success',
});
}
watch(
() => props.isOpen,
(isOpen) => {
if (isOpen && !settingsStore.isNewLogsEnabled) {
setTimeout(() => {
chatEventBus.emit('focusInput');
}, 0);
}
},
{ immediate: true },
);
</script>
<template>
@@ -160,7 +144,6 @@ watch(
tabindex="0"
>
<LogsPanelHeader
v-if="isNewLogsEnabled"
data-test-id="chat-header"
:title="locale.baseText('chat.window.title')"
@click="emit('clickHeader')"
@@ -199,49 +182,11 @@ watch(
</N8nTooltip>
</template>
</LogsPanelHeader>
<header v-else :class="$style.chatHeader">
<span :class="$style.chatTitle">{{ locale.baseText('chat.window.title') }}</span>
<div :class="$style.session">
<span>{{ locale.baseText('chat.window.session.title') }}</span>
<N8nTooltip placement="left">
<template #content>
{{ sessionId }}
</template>
<span
:class="[$style.sessionId, clipboard.isSupported.value ? $style.copyable : '']"
data-test-id="chat-session-id"
@click="clipboard.isSupported.value ? copySessionId() : null"
>{{ sessionId }}</span
>
</N8nTooltip>
<N8nIconButton
:class="$style.headerButton"
data-test-id="refresh-session-button"
outline
type="secondary"
size="mini"
icon="undo"
:title="locale.baseText('chat.window.session.reset')"
@click="onRefreshSession"
/>
<N8nIconButton
v-if="showCloseButton"
:class="$style.headerButton"
outline
type="secondary"
size="mini"
icon="times"
@click="emit('close')"
/>
</div>
</header>
<main v-if="isOpen" :class="$style.chatBody">
<main v-if="isOpen" :class="$style.chatBody" data-test-id="canvas-chat-body">
<MessagesList
:messages="messages"
:class="$style.messages"
:empty-text="
isNewLogsEnabled ? locale.baseText('chat.window.chat.emptyChatMessage.v2') : undefined
"
:empty-text="locale.baseText('chat.window.chat.emptyChatMessage.v2')"
>
<template #beforeMessage="{ message }">
<MessageOptionTooltip

View File

@@ -1,14 +1,12 @@
<script setup lang="ts">
import LogsPanel from '@/components/CanvasChat/future/LogsPanel.vue';
import { useSettingsStore } from '@/stores/settings.store';
import LogsPanel from '@/features/logs/components/LogsPanel.vue';
import { useWorkflowsStore } from '@/stores/workflows.store';
import { computed } from 'vue';
const { isNewLogsEnabled } = useSettingsStore();
const workflowsStore = useWorkflowsStore();
const hasExecutionData = computed(() => workflowsStore.workflowExecutionData);
</script>
<template>
<LogsPanel v-if="isNewLogsEnabled && hasExecutionData" :is-read-only="true" />
<LogsPanel v-if="hasExecutionData" :is-read-only="true" />
</template>

View File

@@ -5,7 +5,6 @@ import { createRouter, createWebHistory } from 'vue-router';
import { createTestingPinia, type TestingPinia } from '@pinia/testing';
import { h } from 'vue';
import {
createTestLogEntry,
createTestNode,
createTestTaskData,
createTestWorkflow,
@@ -14,8 +13,9 @@ import {
import { mockedStore } from '@/__tests__/utils';
import { useSettingsStore } from '@/stores/settings.store';
import { type FrontendSettings } from '@n8n/api-types';
import { LOG_DETAILS_PANEL_STATE } from '../../types/logs';
import type { LogEntry } from '@/components/RunDataAi/utils';
import { LOG_DETAILS_PANEL_STATE } from '@/features/logs/logs.constants';
import type { LogEntry } from '../logs.types';
import { createTestLogEntry } from '../__test__/mocks';
describe('LogDetailsPanel', () => {
let pinia: TestingPinia;

View File

@@ -1,24 +1,22 @@
<script setup lang="ts">
import LogsViewExecutionSummary from '@/components/CanvasChat/future/components/LogsViewExecutionSummary.vue';
import LogsPanelHeader from '@/components/CanvasChat/future/components/LogsPanelHeader.vue';
import LogsViewRunData from '@/components/CanvasChat/future/components/LogsViewRunData.vue';
import LogsViewExecutionSummary from '@/features/logs/components/LogsViewExecutionSummary.vue';
import LogsPanelHeader from '@/features/logs/components/LogsPanelHeader.vue';
import LogsViewRunData from '@/features/logs/components/LogsViewRunData.vue';
import { useResizablePanel } from '@/composables/useResizablePanel';
import {
LOG_DETAILS_PANEL_STATE,
type LatestNodeInfo,
type LogEntry,
type LogDetailsPanelState,
} from '@/components/CanvasChat/types/logs';
} from '@/features/logs/logs.types';
import NodeIcon from '@/components/NodeIcon.vue';
import { useI18n } from '@n8n/i18n';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import LogsViewNodeName from '@/components/CanvasChat/future/components/LogsViewNodeName.vue';
import {
getSubtreeTotalConsumedTokens,
type LogEntry,
type LatestNodeInfo,
} from '@/components/RunDataAi/utils';
import LogsViewNodeName from '@/features/logs/components/LogsViewNodeName.vue';
import { N8nButton, N8nResizeWrapper } from '@n8n/design-system';
import { computed, useTemplateRef } from 'vue';
import KeyboardShortcutTooltip from '@/components/KeyboardShortcutTooltip.vue';
import { getSubtreeTotalConsumedTokens } from '@/features/logs/logs.utils';
import { LOG_DETAILS_PANEL_STATE } from '@/features/logs/logs.constants';
const MIN_IO_PANEL_WIDTH = 200;

View File

@@ -12,10 +12,10 @@ import {
aiChatWorkflow,
aiManualExecutionResponse,
aiManualWorkflow,
} from '../../__test__/data';
} from '../__test__/data';
import { usePushConnectionStore } from '@/stores/pushConnection.store';
import { createTestWorkflowObject } from '@/__tests__/mocks';
import { createLogTree, flattenLogEntries } from '@/components/RunDataAi/utils';
import { createLogTree, flattenLogEntries } from '../logs.utils';
describe('LogsOverviewPanel', () => {
let pinia: TestingPinia;

View File

@@ -1,22 +1,21 @@
<script setup lang="ts">
import LogsPanelHeader from '@/components/CanvasChat/future/components/LogsPanelHeader.vue';
import { useClearExecutionButtonVisible } from '@/composables/useClearExecutionButtonVisible';
import LogsPanelHeader from '@/features/logs/components/LogsPanelHeader.vue';
import { useClearExecutionButtonVisible } from '@/features/logs/composables/useClearExecutionButtonVisible';
import { useI18n } from '@n8n/i18n';
import { N8nButton, N8nRadioButtons, N8nText, N8nTooltip } from '@n8n/design-system';
import { computed, nextTick, toRef, watch } from 'vue';
import LogsOverviewRow from '@/components/CanvasChat/future/components/LogsOverviewRow.vue';
import LogsOverviewRow from '@/features/logs/components/LogsOverviewRow.vue';
import { useRunWorkflow } from '@/composables/useRunWorkflow';
import { useRouter } from 'vue-router';
import LogsViewExecutionSummary from '@/components/CanvasChat/future/components/LogsViewExecutionSummary.vue';
import LogsViewExecutionSummary from '@/features/logs/components/LogsViewExecutionSummary.vue';
import {
getSubtreeTotalConsumedTokens,
getTotalConsumedTokens,
hasSubExecution,
type LatestNodeInfo,
type LogEntry,
} from '@/components/RunDataAi/utils';
} from '@/features/logs/logs.utils';
import { useVirtualList } from '@vueuse/core';
import { type IExecutionResponse } from '@/Interface';
import type { LatestNodeInfo, LogEntry } from '@/features/logs/logs.types';
const {
isOpen,

View File

@@ -2,18 +2,15 @@
import { computed, nextTick, useTemplateRef, watch } from 'vue';
import { N8nButton, N8nIcon, N8nIconButton, N8nText } from '@n8n/design-system';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import LogsViewConsumedTokenCountText from '@/features/logs/components/LogsViewConsumedTokenCountText.vue';
import upperFirst from 'lodash/upperFirst';
import { useI18n } from '@n8n/i18n';
import LogsViewConsumedTokenCountText from '@/components/CanvasChat/future/components/LogsViewConsumedTokenCountText.vue';
import { I18nT } from 'vue-i18n';
import { toDayMonth, toTime } from '@/utils/formatters/dateFormatter';
import LogsViewNodeName from '@/components/CanvasChat/future/components/LogsViewNodeName.vue';
import {
getSubtreeTotalConsumedTokens,
type LatestNodeInfo,
type LogEntry,
} from '@/components/RunDataAi/utils';
import LogsViewNodeName from '@/features/logs/components/LogsViewNodeName.vue';
import { getSubtreeTotalConsumedTokens } from '@/features/logs/logs.utils';
import { useTimestamp } from '@vueuse/core';
import type { LatestNodeInfo, LogEntry } from '@/features/logs/logs.types';
const props = defineProps<{
data: LogEntry;

View File

@@ -1,22 +1,21 @@
import { renderComponent } from '@/__tests__/render';
import { fireEvent, waitFor, within } from '@testing-library/vue';
import { mockedStore } from '@/__tests__/utils';
import LogsPanel from '@/components/CanvasChat/future/LogsPanel.vue';
import { useSettingsStore } from '@/stores/settings.store';
import LogsPanel from '@/features/logs/components/LogsPanel.vue';
import { createTestingPinia, type TestingPinia } from '@pinia/testing';
import { setActivePinia } from 'pinia';
import { createRouter, createWebHistory } from 'vue-router';
import { useWorkflowsStore } from '@/stores/workflows.store';
import { h, nextTick } from 'vue';
import { computed, h, nextTick, ref } from 'vue';
import {
aiAgentNode,
aiChatExecutionResponse,
aiChatWorkflow,
aiManualWorkflow,
chatTriggerNode,
nodeTypes,
} from '../__test__/data';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { LOGS_PANEL_STATE } from '../types/logs';
import { IN_PROGRESS_EXECUTION_ID } from '@/constants';
import { useCanvasOperations } from '@/composables/useCanvasOperations';
import { useNDVStore } from '@/stores/ndv.store';
@@ -24,12 +23,38 @@ import { deepCopy } from 'n8n-workflow';
import { createTestTaskData } from '@/__tests__/mocks';
import { useLogsStore } from '@/stores/logs.store';
import { useUIStore } from '@/stores/ui.store';
import { LOGS_PANEL_STATE } from '../logs.constants';
import { ChatOptionsSymbol, ChatSymbol } from '@n8n/chat/constants';
import { userEvent } from '@testing-library/user-event';
import type { ChatMessage } from '@n8n/chat/types';
import * as useChatMessaging from '@/features/logs/composables/useChatMessaging';
import { chatEventBus } from '@n8n/chat/event-buses';
import { useToast } from '@/composables/useToast';
vi.mock('@/composables/useToast', () => {
const showMessage = vi.fn();
const showError = vi.fn();
return {
useToast: () => {
return {
showMessage,
showError,
clearAllStickyNotifications: vi.fn(),
};
},
};
});
vi.mock('@/stores/pushConnection.store', () => ({
usePushConnectionStore: vi.fn().mockReturnValue({
isConnected: true,
}),
}));
describe('LogsPanel', () => {
const VIEWPORT_HEIGHT = 800;
let pinia: TestingPinia;
let settingsStore: ReturnType<typeof mockedStore<typeof useSettingsStore>>;
let workflowsStore: ReturnType<typeof mockedStore<typeof useWorkflowsStore>>;
let nodeTypeStore: ReturnType<typeof mockedStore<typeof useNodeTypesStore>>;
let logsStore: ReturnType<typeof mockedStore<typeof useLogsStore>>;
@@ -39,6 +64,10 @@ describe('LogsPanel', () => {
function render() {
return renderComponent(LogsPanel, {
global: {
provide: {
[ChatSymbol as symbol]: {},
[ChatOptionsSymbol as symbol]: {},
},
plugins: [
createRouter({
history: createWebHistory(),
@@ -55,9 +84,6 @@ describe('LogsPanel', () => {
setActivePinia(pinia);
settingsStore = mockedStore(useSettingsStore);
settingsStore.isNewLogsEnabled = true;
workflowsStore = mockedStore(useWorkflowsStore);
workflowsStore.setWorkflowExecutionData(null);
@@ -83,6 +109,10 @@ describe('LogsPanel', () => {
} as DOMRect);
});
afterEach(() => {
vi.clearAllMocks();
});
it('should render collapsed panel by default', async () => {
const rendered = render();
@@ -432,4 +462,318 @@ describe('LogsPanel', () => {
expect(await findByRole('treeitem', { selected: true })).toHaveTextContent(/AI Model/);
});
});
describe('chat', () => {
beforeEach(() => {
logsStore.toggleOpen(true);
workflowsStore.setWorkflow(aiChatWorkflow);
});
describe('rendering', () => {
it('should render chat when panel is open', () => {
const { getByTestId } = render();
expect(getByTestId('canvas-chat-body')).toBeInTheDocument();
});
it('should not render chat when panel is closed', async () => {
const { queryByTestId } = render();
logsStore.toggleOpen(false);
await waitFor(() => expect(queryByTestId('canvas-chat-body')).not.toBeInTheDocument());
});
it('should show correct input placeholder', async () => {
const { findByTestId } = render();
expect(await findByTestId('chat-input')).toBeInTheDocument();
});
});
describe('message handling', () => {
beforeEach(() => {
vi.spyOn(chatEventBus, 'emit');
workflowsStore.runWorkflow.mockResolvedValue({ executionId: 'test-execution-id' });
});
it('should send message and show response', async () => {
const { findByTestId, findByText, getByText } = render();
// Send message
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Hello AI!');
await userEvent.keyboard('{Enter}');
// Verify message and response
expect(await findByText('Hello AI!')).toBeInTheDocument();
workflowsStore.setWorkflowExecutionData({ ...aiChatExecutionResponse, status: 'success' });
await waitFor(() => expect(getByText('AI response message')).toBeInTheDocument());
// Verify workflow execution
expect(workflowsStore.runWorkflow).toHaveBeenCalledWith(
expect.objectContaining({
runData: undefined,
triggerToStartFrom: {
name: 'Chat',
data: {
data: {
main: [
[
{
json: {
action: 'sendMessage',
chatInput: 'Hello AI!',
sessionId: expect.any(String),
},
},
],
],
},
executionIndex: 0,
executionStatus: 'success',
executionTime: 0,
source: [null],
startTime: expect.any(Number),
},
},
}),
);
});
it('should show loading state during message processing', async () => {
const { findByTestId, queryByTestId } = render();
// Send message
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Test message');
await userEvent.keyboard('{Enter}');
await waitFor(() => expect(queryByTestId('chat-message-typing')).toBeInTheDocument());
workflowsStore.setActiveExecutionId(undefined);
workflowsStore.setWorkflowExecutionData({ ...aiChatExecutionResponse, status: 'success' });
await waitFor(() => expect(queryByTestId('chat-message-typing')).not.toBeInTheDocument());
});
it('should handle workflow execution errors', async () => {
workflowsStore.runWorkflow.mockRejectedValueOnce(new Error());
const { findByTestId } = render();
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Hello AI!');
await userEvent.keyboard('{Enter}');
const toast = useToast();
expect(toast.showError).toHaveBeenCalledWith(new Error(), 'Problem running workflow');
});
});
describe('session management', () => {
const mockMessages: ChatMessage[] = [
{
id: '1',
text: 'Existing message',
sender: 'user',
},
];
beforeEach(() => {
vi.spyOn(useChatMessaging, 'useChatMessaging').mockImplementation(({ messages }) => {
messages.value.push(...mockMessages);
return {
sendMessage: vi.fn(),
previousMessageIndex: ref(0),
isLoading: computed(() => false),
};
});
});
it('should allow copying session ID', async () => {
const clipboardSpy = vi.fn();
document.execCommand = clipboardSpy;
const { getByTestId } = render();
await userEvent.click(getByTestId('chat-session-id'));
const toast = useToast();
expect(clipboardSpy).toHaveBeenCalledWith('copy');
expect(toast.showMessage).toHaveBeenCalledWith({
message: '',
title: 'Copied to clipboard',
type: 'success',
});
});
it('should refresh session when messages exist', async () => {
const { getByTestId } = render();
const originalSessionId = getByTestId('chat-session-id').textContent;
await userEvent.click(getByTestId('refresh-session-button'));
expect(getByTestId('chat-session-id').textContent).not.toEqual(originalSessionId);
});
});
describe('file handling', () => {
beforeEach(() => {
vi.spyOn(useChatMessaging, 'useChatMessaging').mockReturnValue({
sendMessage: vi.fn(),
previousMessageIndex: ref(0),
isLoading: computed(() => false),
});
logsStore.state = LOGS_PANEL_STATE.ATTACHED;
workflowsStore.allowFileUploads = true;
});
it('should enable file uploads when allowed by chat trigger node', async () => {
workflowsStore.setNodes(aiChatWorkflow.nodes);
workflowsStore.setNodeParameters({
name: chatTriggerNode.name,
value: { options: { allowFileUploads: true } },
});
const { getByTestId, queryByTestId } = render();
expect(getByTestId('canvas-chat')).toBeInTheDocument();
expect(getByTestId('chat-attach-file-button')).toBeInTheDocument();
workflowsStore.setNodeParameters({
name: chatTriggerNode.name,
value: { options: { allowFileUploads: false } },
});
await waitFor(() =>
expect(queryByTestId('chat-attach-file-button')).not.toBeInTheDocument(),
);
});
});
describe('message history handling', () => {
it('should properly navigate through message history with wrap-around', async () => {
workflowsStore.resetChatMessages();
workflowsStore.appendChatMessage('Message 1');
workflowsStore.appendChatMessage('Message 2');
workflowsStore.appendChatMessage('Message 3');
const { findByTestId } = render();
const input = await findByTestId('chat-input');
chatEventBus.emit('focusInput');
// First up should show most recent message
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 3');
// Second up should show second most recent
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 2');
// Third up should show oldest message
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 1');
// Fourth up should wrap around to most recent
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 3');
// Down arrow should go in reverse
await userEvent.keyboard('{ArrowDown}');
expect(input).toHaveValue('Message 1');
});
it('should reset message history navigation on new input', async () => {
workflowsStore.resetChatMessages();
workflowsStore.appendChatMessage('Message 1');
workflowsStore.appendChatMessage('Message 2');
const { findByTestId } = render();
const input = await findByTestId('chat-input');
chatEventBus.emit('focusInput');
// Navigate to oldest message
await userEvent.keyboard('{ArrowUp}'); // Most recent
await userEvent.keyboard('{ArrowUp}'); // Oldest
expect(input).toHaveValue('Message 1');
await userEvent.type(input, 'New message');
await userEvent.keyboard('{Enter}');
await userEvent.keyboard('{ArrowUp}');
expect(input).toHaveValue('Message 2');
});
});
describe('message reuse and repost', () => {
const sendMessageSpy = vi.fn();
beforeEach(() => {
const mockMessages: ChatMessage[] = [
{
id: '1',
text: 'Original message',
sender: 'user',
},
{
id: '2',
text: 'AI response',
sender: 'bot',
},
];
vi.spyOn(useChatMessaging, 'useChatMessaging').mockImplementation(({ messages }) => {
messages.value.push(...mockMessages);
return {
sendMessage: sendMessageSpy,
previousMessageIndex: ref(0),
isLoading: computed(() => false),
};
});
});
it('should repost user message with new execution', async () => {
const { findByTestId } = render();
const repostButton = await findByTestId('repost-message-button');
await userEvent.click(repostButton);
expect(sendMessageSpy).toHaveBeenCalledWith('Original message');
});
it('should show message options only for appropriate messages', async () => {
const { findByText, container } = render();
await findByText('Original message');
const userMessage = container.querySelector('.chat-message-from-user');
expect(
userMessage?.querySelector('[data-test-id="repost-message-button"]'),
).toBeInTheDocument();
expect(
userMessage?.querySelector('[data-test-id="reuse-message-button"]'),
).toBeInTheDocument();
await findByText('AI response');
const botMessage = container.querySelector('.chat-message-from-bot');
expect(
botMessage?.querySelector('[data-test-id="repost-message-button"]'),
).not.toBeInTheDocument();
expect(
botMessage?.querySelector('[data-test-id="reuse-message-button"]'),
).not.toBeInTheDocument();
});
});
describe('keyboard shortcuts', () => {
it('should handle Enter key with modifier to start new line', async () => {
const { findByTestId } = render();
const input = await findByTestId('chat-input');
await userEvent.type(input, 'Line 1');
await userEvent.keyboard('{Shift>}{Enter}{/Shift}');
await userEvent.type(input, 'Line 2');
expect(input).toHaveValue('Line 1\nLine 2');
});
});
});
});

View File

@@ -1,19 +1,19 @@
<script setup lang="ts">
import { nextTick, computed, useTemplateRef } from 'vue';
import { N8nResizeWrapper } from '@n8n/design-system';
import { useChatState } from '@/components/CanvasChat/composables/useChatState';
import LogsOverviewPanel from '@/components/CanvasChat/future/components/LogsOverviewPanel.vue';
import ChatMessagesPanel from '@/components/CanvasChat/components/ChatMessagesPanel.vue';
import LogsDetailsPanel from '@/components/CanvasChat/future/components/LogDetailsPanel.vue';
import LogsPanelActions from '@/components/CanvasChat/future/components/LogsPanelActions.vue';
import { useLogsPanelLayout } from '@/components/CanvasChat/future/composables/useLogsPanelLayout';
import { useLogsExecutionData } from '@/components/CanvasChat/future/composables/useLogsExecutionData';
import { type LogEntry } from '@/components/RunDataAi/utils';
import { useChatState } from '@/features/logs/composables/useChatState';
import LogsOverviewPanel from '@/features/logs/components/LogsOverviewPanel.vue';
import ChatMessagesPanel from '@/features/logs/components/ChatMessagesPanel.vue';
import LogsDetailsPanel from '@/features/logs/components/LogDetailsPanel.vue';
import LogsPanelActions from '@/features/logs/components/LogsPanelActions.vue';
import { useLogsExecutionData } from '@/features/logs/composables/useLogsExecutionData';
import { useNDVStore } from '@/stores/ndv.store';
import { ndvEventBus } from '@/event-bus';
import { useLogsSelection } from '@/components/CanvasChat/future/composables/useLogsSelection';
import { useLogsTreeExpand } from '@/components/CanvasChat/future/composables/useLogsTreeExpand';
import { useLogsSelection } from '@/features/logs/composables/useLogsSelection';
import { useLogsTreeExpand } from '@/features/logs/composables/useLogsTreeExpand';
import { type LogEntry } from '@/features/logs/logs.types';
import { useLogsStore } from '@/stores/logs.store';
import { useLogsPanelLayout } from '@/features/logs/composables/useLogsPanelLayout';
const props = withDefaults(defineProps<{ isReadOnly?: boolean }>(), { isReadOnly: false });

View File

@@ -1,7 +1,7 @@
<script setup lang="ts">
import { formatTokenUsageCount } from '@/components/RunDataAi/utils';
import { useI18n } from '@n8n/i18n';
import { type LlmTokenUsageData } from '@/Interface';
import { formatTokenUsageCount } from '@/utils/aiUtils';
import { N8nTooltip } from '@n8n/design-system';
const { consumedTokens } = defineProps<{ consumedTokens: LlmTokenUsageData }>();

View File

@@ -1,5 +1,5 @@
<script setup lang="ts">
import LogsViewConsumedTokenCountText from '@/components/CanvasChat/future/components/LogsViewConsumedTokenCountText.vue';
import LogsViewConsumedTokenCountText from '@/features/logs/components/LogsViewConsumedTokenCountText.vue';
import { useI18n } from '@n8n/i18n';
import { type LlmTokenUsageData } from '@/Interface';
import { N8nText } from '@n8n/design-system';

View File

@@ -1,6 +1,6 @@
<script setup lang="ts">
import RunData from '@/components/RunData.vue';
import { type LogEntry } from '@/components/RunDataAi/utils';
import { type LogEntry } from '@/features/logs/logs.types';
import { useI18n } from '@n8n/i18n';
import { type IRunDataDisplayMode, type NodePanelType } from '@/Interface';
import { useNDVStore } from '@/stores/ndv.store';

View File

@@ -17,7 +17,7 @@ import { usePinnedData } from '@/composables/usePinnedData';
import { MODAL_CONFIRM } from '@/constants';
import { useI18n } from '@n8n/i18n';
import type { IExecutionPushResponse, INodeUi } from '@/Interface';
import { extractBotResponse, getInputKey } from '@/components/CanvasChat/utils';
import { extractBotResponse, getInputKey } from '@/features/logs/logs.utils';
export type RunWorkflowChatPayload = {
triggerNode: string;

View File

@@ -1,30 +1,26 @@
import type { RunWorkflowChatPayload } from '@/components/CanvasChat/composables/useChatMessaging';
import { useChatMessaging } from '@/components/CanvasChat/composables/useChatMessaging';
import { useChatTrigger } from '@/components/CanvasChat/composables/useChatTrigger';
import type { RunWorkflowChatPayload } from '@/features/logs/composables/useChatMessaging';
import { useChatMessaging } from '@/features/logs/composables/useChatMessaging';
import { useI18n } from '@n8n/i18n';
import { useNodeHelpers } from '@/composables/useNodeHelpers';
import { useRunWorkflow } from '@/composables/useRunWorkflow';
import { VIEWS } from '@/constants';
import { type INodeUi } from '@/Interface';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { useWorkflowsStore } from '@/stores/workflows.store';
import { ChatOptionsSymbol, ChatSymbol } from '@n8n/chat/constants';
import { chatEventBus } from '@n8n/chat/event-buses';
import type { Chat, ChatMessage, ChatOptions } from '@n8n/chat/types';
import { type INode } from 'n8n-workflow';
import { v4 as uuid } from 'uuid';
import type { Ref } from 'vue';
import { computed, provide, ref, watch } from 'vue';
import { useRouter } from 'vue-router';
import { restoreChatHistory } from '@/components/CanvasChat/utils';
import { useLogsStore } from '@/stores/logs.store';
import { restoreChatHistory } from '@/features/logs/logs.utils';
import type { INodeParameters } from 'n8n-workflow';
import { isChatNode } from '@/utils/aiUtils';
interface ChatState {
currentSessionId: Ref<string>;
messages: Ref<ChatMessage[]>;
previousChatMessages: Ref<string[]>;
chatTriggerNode: Ref<INodeUi | null>;
connectedNode: Ref<INode | null>;
sendMessage: (message: string, files?: File[]) => Promise<void>;
refreshSession: () => void;
displayExecution: (executionId: string) => void;
@@ -33,7 +29,6 @@ interface ChatState {
export function useChatState(isReadOnly: boolean): ChatState {
const locale = useI18n();
const workflowsStore = useWorkflowsStore();
const nodeTypesStore = useNodeTypesStore();
const logsStore = useLogsStore();
const router = useRouter();
const nodeHelpers = useNodeHelpers();
@@ -43,15 +38,19 @@ export function useChatState(isReadOnly: boolean): ChatState {
const currentSessionId = ref<string>(uuid().replace(/-/g, ''));
const previousChatMessages = computed(() => workflowsStore.getPastChatMessages);
const workflow = computed(() => workflowsStore.getCurrentWorkflow());
// Initialize features with injected dependencies
const { chatTriggerNode, connectedNode, allowFileUploads, allowedFilesMimeTypes } =
useChatTrigger({
workflow,
getNodeByName: workflowsStore.getNodeByName,
getNodeType: nodeTypesStore.getNodeType,
});
const chatTriggerNode = computed(
() => Object.values(workflowsStore.allNodes).find(isChatNode) ?? null,
);
const allowFileUploads = computed(
() =>
(chatTriggerNode.value?.parameters?.options as INodeParameters)?.allowFileUploads === true,
);
const allowedFilesMimeTypes = computed(
() =>
(
chatTriggerNode.value?.parameters?.options as INodeParameters
)?.allowedFilesMimeTypes?.toString() ?? '',
);
const { sendMessage, isLoading } = useChatMessaging({
chatTrigger: chatTriggerNode,
@@ -176,7 +175,7 @@ export function useChatState(isReadOnly: boolean): ChatState {
function displayExecution(executionId: string) {
const route = router.resolve({
name: VIEWS.EXECUTION_PREVIEW,
params: { name: workflow.value.id, executionId },
params: { name: workflowsStore.workflowId, executionId },
});
window.open(route.href, '_blank');
}
@@ -185,8 +184,6 @@ export function useChatState(isReadOnly: boolean): ChatState {
currentSessionId,
messages: computed(() => (isReadOnly ? restoredChatMessages.value : messages.value)),
previousChatMessages,
chatTriggerNode,
connectedNode,
sendMessage,
refreshSession,
displayExecution,

View File

@@ -2,8 +2,8 @@ import { START_NODE_TYPE } from '@/constants';
import { useSourceControlStore } from '@/stores/sourceControl.store';
import { useWorkflowsStore } from '@/stores/workflows.store';
import { computed } from 'vue';
import { useCanvasOperations } from '@/composables/useCanvasOperations';
import { useRoute } from 'vue-router';
import { useCanvasOperations } from './useCanvasOperations';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
export function useClearExecutionButtonVisible() {
@@ -28,6 +28,6 @@ export function useClearExecutionButtonVisible() {
!isReadOnlyEnvironment.value &&
!isWorkflowRunning.value &&
!allTriggerNodesDisabled.value &&
workflowExecutionData.value,
!!workflowExecutionData.value,
);
}

View File

@@ -5,7 +5,7 @@ import { createTestingPinia } from '@pinia/testing';
import { mockedStore } from '@/__tests__/utils';
import { useWorkflowsStore } from '@/stores/workflows.store';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { nodeTypes } from '../../__test__/data';
import { nodeTypes } from '../__test__/data';
import {
createTestNode,
createTestTaskData,

View File

@@ -1,19 +1,14 @@
import { watch, computed, ref } from 'vue';
import { isChatNode } from '../../utils';
import { type IExecutionResponse } from '@/Interface';
import { Workflow, type IRunExecutionData } from 'n8n-workflow';
import { useWorkflowsStore } from '@/stores/workflows.store';
import { useNodeHelpers } from '@/composables/useNodeHelpers';
import { useThrottleFn } from '@vueuse/core';
import {
createLogTree,
deepToRaw,
mergeStartData,
type LatestNodeInfo,
type LogEntry,
} from '@/components/RunDataAi/utils';
import { createLogTree, deepToRaw, mergeStartData } from '@/features/logs/logs.utils';
import { parse } from 'flatted';
import { useToast } from '@/composables/useToast';
import type { LatestNodeInfo, LogEntry } from '../logs.types';
import { isChatNode } from '@/utils/aiUtils';
export function useLogsExecutionData() {
const nodeHelpers = useNodeHelpers();

View File

@@ -1,15 +1,15 @@
import { computed, type ShallowRef } from 'vue';
import { useTelemetry } from '@/composables/useTelemetry';
import { watch } from 'vue';
import { useLogsStore } from '@/stores/logs.store';
import { useResizablePanel } from '@/composables/useResizablePanel';
import { usePiPWindow } from '@/features/logs/composables/usePiPWindow';
import {
LOGS_PANEL_STATE,
LOCAL_STORAGE_OVERVIEW_PANEL_WIDTH,
LOCAL_STORAGE_PANEL_HEIGHT,
LOCAL_STORAGE_PANEL_WIDTH,
} from '../../composables/useResize';
import { LOGS_PANEL_STATE } from '../../types/logs';
import { usePiPWindow } from '../../composables/usePiPWindow';
import { useTelemetry } from '@/composables/useTelemetry';
import { watch } from 'vue';
import { useResizablePanel } from '../../../../composables/useResizablePanel';
import { useLogsStore } from '@/stores/logs.store';
} from '@/features/logs/logs.constants';
export function useLogsPanelLayout(
pipContainer: Readonly<ShallowRef<HTMLElement | null>>,

View File

@@ -1,11 +1,10 @@
import type { LogEntrySelection } from '@/components/CanvasChat/types/logs';
import type { LogEntry, LogEntrySelection } from '@/features/logs/logs.types';
import {
findLogEntryRec,
findSelectedLogEntry,
getDepth,
getEntryAtRelativeIndex,
type LogEntry,
} from '@/components/RunDataAi/utils';
} from '@/features/logs/logs.utils';
import { useTelemetry } from '@/composables/useTelemetry';
import { canvasEventBus } from '@/event-bus/canvas';
import type { IExecutionResponse } from '@/Interface';

View File

@@ -1,5 +1,6 @@
import { flattenLogEntries, type LogEntry } from '@/components/RunDataAi/utils';
import { flattenLogEntries } from '@/features/logs/logs.utils';
import { computed, ref, type ComputedRef } from 'vue';
import type { LogEntry } from '../logs.types';
export function useLogsTreeExpand(entries: ComputedRef<LogEntry[]>) {
const collapsedEntries = ref<Record<string, boolean>>({});

View File

@@ -0,0 +1,17 @@
export const LOCAL_STORAGE_PANEL_HEIGHT = 'N8N_CANVAS_CHAT_HEIGHT';
export const LOCAL_STORAGE_PANEL_WIDTH = 'N8N_CANVAS_CHAT_WIDTH';
export const LOCAL_STORAGE_OVERVIEW_PANEL_WIDTH = 'N8N_LOGS_OVERVIEW_PANEL_WIDTH';
export const LOGS_PANEL_STATE = {
CLOSED: 'closed',
ATTACHED: 'attached',
FLOATING: 'floating',
} as const;
export const LOG_DETAILS_PANEL_STATE = {
INPUT: 'input',
OUTPUT: 'output',
BOTH: 'both',
} as const;

View File

@@ -0,0 +1,43 @@
import type { LOG_DETAILS_PANEL_STATE, LOGS_PANEL_STATE } from '@/features/logs/logs.constants';
import type { INodeUi, LlmTokenUsageData } from '@/Interface';
import type { IRunExecutionData, ITaskData, Workflow } from 'n8n-workflow';
export interface LogEntry {
parent?: LogEntry;
node: INodeUi;
id: string;
children: LogEntry[];
depth: number;
runIndex: number;
runData: ITaskData;
consumedTokens: LlmTokenUsageData;
workflow: Workflow;
executionId: string;
execution: IRunExecutionData;
}
export interface LogTreeCreationContext {
parent: LogEntry | undefined;
depth: number;
workflow: Workflow;
executionId: string;
data: IRunExecutionData;
workflows: Record<string, Workflow>;
subWorkflowData: Record<string, IRunExecutionData>;
}
export interface LatestNodeInfo {
disabled: boolean;
deleted: boolean;
name: string;
}
export type LogEntrySelection =
| { type: 'initial' }
| { type: 'selected'; id: string }
| { type: 'none' };
export type LogsPanelState = (typeof LOGS_PANEL_STATE)[keyof typeof LOGS_PANEL_STATE];
export type LogDetailsPanelState =
(typeof LOG_DETAILS_PANEL_STATE)[keyof typeof LOG_DETAILS_PANEL_STATE];

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,541 @@
import type { IExecutionResponse, INodeUi, LlmTokenUsageData, IWorkflowDb } from '@/Interface';
import { addTokenUsageData, emptyTokenUsageData, isChatNode } from '@/utils/aiUtils';
import {
NodeConnectionTypes,
type IDataObject,
AGENT_LANGCHAIN_NODE_TYPE,
type IRunExecutionData,
type ITaskData,
type ITaskStartedData,
type Workflow,
} from 'n8n-workflow';
import type { LogEntry, LogEntrySelection, LogTreeCreationContext } from './logs.types';
import { isProxy, isReactive, isRef, toRaw } from 'vue';
import { CHAT_TRIGGER_NODE_TYPE, MANUAL_CHAT_TRIGGER_NODE_TYPE } from '@/constants';
import { type ChatMessage } from '@n8n/chat/types';
import get from 'lodash-es/get';
import isEmpty from 'lodash-es/isEmpty';
import { v4 as uuid } from 'uuid';
function getConsumedTokens(task: ITaskData): LlmTokenUsageData {
if (!task.data) {
return emptyTokenUsageData;
}
const tokenUsage = Object.values(task.data)
.flat()
.flat()
.reduce<LlmTokenUsageData>((acc, curr) => {
const tokenUsageData = curr?.json?.tokenUsage ?? curr?.json?.tokenUsageEstimate;
if (!tokenUsageData) return acc;
return addTokenUsageData(acc, {
...(tokenUsageData as Omit<LlmTokenUsageData, 'isEstimate'>),
isEstimate: !!curr?.json.tokenUsageEstimate,
});
}, emptyTokenUsageData);
return tokenUsage;
}
function createNode(
node: INodeUi,
context: LogTreeCreationContext,
runIndex: number,
runData: ITaskData,
children: LogEntry[] = [],
): LogEntry {
return {
parent: context.parent,
node,
id: `${context.workflow.id}:${node.name}:${context.executionId}:${runIndex}`,
depth: context.depth,
runIndex,
runData,
children,
consumedTokens: getConsumedTokens(runData),
workflow: context.workflow,
executionId: context.executionId,
execution: context.data,
};
}
export function getTreeNodeData(
nodeName: string,
runData: ITaskData,
runIndex: number | undefined,
context: LogTreeCreationContext,
): LogEntry[] {
const node = context.workflow.getNode(nodeName);
return node ? getTreeNodeDataRec(node, runData, context, runIndex) : [];
}
function getChildNodes(
treeNode: LogEntry,
node: INodeUi,
runIndex: number | undefined,
context: LogTreeCreationContext,
) {
if (hasSubExecution(treeNode)) {
const workflowId = treeNode.runData.metadata?.subExecution?.workflowId;
const executionId = treeNode.runData.metadata?.subExecution?.executionId;
const workflow = workflowId ? context.workflows[workflowId] : undefined;
const subWorkflowRunData = executionId ? context.subWorkflowData[executionId] : undefined;
if (!workflow || !subWorkflowRunData || !executionId) {
return [];
}
return createLogTreeRec({
...context,
parent: treeNode,
depth: context.depth + 1,
workflow,
executionId,
data: subWorkflowRunData,
});
}
// Get the first level of children
const connectedSubNodes = context.workflow.getParentNodes(node.name, 'ALL_NON_MAIN', 1);
const isExecutionRoot =
treeNode.parent === undefined || treeNode.executionId !== treeNode.parent.executionId;
return connectedSubNodes.flatMap((subNodeName) =>
(context.data.resultData.runData[subNodeName] ?? []).flatMap((t, index) => {
// At root depth, filter out node executions that weren't triggered by this node
// This prevents showing duplicate executions when a sub-node is connected to multiple parents
// Only filter nodes that have source information with valid previousNode references
const isMatched =
isExecutionRoot && t.source.some((source) => source !== null)
? t.source.some(
(source) =>
source?.previousNode === node.name &&
(runIndex === undefined || source.previousNodeRun === runIndex),
)
: runIndex === undefined || index === runIndex;
if (!isMatched) {
return [];
}
const subNode = context.workflow.getNode(subNodeName);
return subNode
? getTreeNodeDataRec(
subNode,
t,
{ ...context, depth: context.depth + 1, parent: treeNode },
index,
)
: [];
}),
);
}
function getTreeNodeDataRec(
node: INodeUi,
runData: ITaskData,
context: LogTreeCreationContext,
runIndex: number | undefined,
): LogEntry[] {
const treeNode = createNode(node, context, runIndex ?? 0, runData);
const children = getChildNodes(treeNode, node, runIndex, context).sort(sortLogEntries);
treeNode.children = children;
return [treeNode];
}
export function getTotalConsumedTokens(...usage: LlmTokenUsageData[]): LlmTokenUsageData {
return usage.reduce(addTokenUsageData, emptyTokenUsageData);
}
export function getSubtreeTotalConsumedTokens(
treeNode: LogEntry,
includeSubWorkflow: boolean,
): LlmTokenUsageData {
const executionId = treeNode.executionId;
function calculate(currentNode: LogEntry): LlmTokenUsageData {
if (!includeSubWorkflow && currentNode.executionId !== executionId) {
return emptyTokenUsageData;
}
return getTotalConsumedTokens(
currentNode.consumedTokens,
...currentNode.children.map(calculate),
);
}
return calculate(treeNode);
}
function findLogEntryToAutoSelectRec(subTree: LogEntry[], depth: number): LogEntry | undefined {
for (const entry of subTree) {
if (entry.runData?.error) {
return entry;
}
const childAutoSelect = findLogEntryToAutoSelectRec(entry.children, depth + 1);
if (childAutoSelect) {
return childAutoSelect;
}
if (entry.node.type === AGENT_LANGCHAIN_NODE_TYPE) {
return entry;
}
}
return depth === 0 ? subTree[0] : undefined;
}
export function createLogTree(
workflow: Workflow,
response: IExecutionResponse,
workflows: Record<string, Workflow> = {},
subWorkflowData: Record<string, IRunExecutionData> = {},
) {
return createLogTreeRec({
parent: undefined,
depth: 0,
executionId: response.id,
workflow,
workflows,
data: response.data ?? { resultData: { runData: {} } },
subWorkflowData,
});
}
function createLogTreeRec(context: LogTreeCreationContext) {
const runs = Object.entries(context.data.resultData.runData)
.flatMap(([nodeName, taskData]) =>
context.workflow.getChildNodes(nodeName, 'ALL_NON_MAIN').length > 0 ||
context.workflow.getNode(nodeName)?.disabled
? [] // skip sub nodes and disabled nodes
: taskData.map((task, runIndex) => ({
nodeName,
runData: task,
runIndex,
nodeHasMultipleRuns: taskData.length > 1,
})),
)
.sort(sortLogEntries);
return runs.flatMap(({ nodeName, runIndex, runData, nodeHasMultipleRuns }) =>
getTreeNodeData(nodeName, runData, nodeHasMultipleRuns ? runIndex : undefined, context),
);
}
export function findLogEntryRec(
isMatched: (entry: LogEntry) => boolean,
entries: LogEntry[],
): LogEntry | undefined {
for (const entry of entries) {
if (isMatched(entry)) {
return entry;
}
const child = findLogEntryRec(isMatched, entry.children);
if (child) {
return child;
}
}
return undefined;
}
export function findSelectedLogEntry(
selection: LogEntrySelection,
entries: LogEntry[],
): LogEntry | undefined {
switch (selection.type) {
case 'initial':
return findLogEntryToAutoSelectRec(entries, 0);
case 'none':
return undefined;
case 'selected': {
const entry = findLogEntryRec((e) => e.id === selection.id, entries);
if (entry) {
return entry;
}
return findLogEntryToAutoSelectRec(entries, 0);
}
}
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function deepToRaw<T>(sourceObj: T): T {
const seen = new WeakMap();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const objectIterator = (input: any): any => {
if (seen.has(input)) {
return input;
}
if (input !== null && typeof input === 'object') {
seen.set(input, true);
}
if (Array.isArray(input)) {
return input.map((item) => objectIterator(item));
}
if (isRef(input) || isReactive(input) || isProxy(input)) {
return objectIterator(toRaw(input));
}
if (
input !== null &&
typeof input === 'object' &&
Object.getPrototypeOf(input) === Object.prototype
) {
return Object.keys(input).reduce((acc, key) => {
acc[key as keyof typeof acc] = objectIterator(input[key]);
return acc;
}, {} as T);
}
return input;
};
return objectIterator(sourceObj);
}
export function flattenLogEntries(
entries: LogEntry[],
collapsedEntryIds: Record<string, boolean>,
ret: LogEntry[] = [],
): LogEntry[] {
for (const entry of entries) {
ret.push(entry);
if (!collapsedEntryIds[entry.id]) {
flattenLogEntries(entry.children, collapsedEntryIds, ret);
}
}
return ret;
}
export function getEntryAtRelativeIndex(
entries: LogEntry[],
id: string,
relativeIndex: number,
): LogEntry | undefined {
const offset = entries.findIndex((e) => e.id === id);
return offset === -1 ? undefined : entries[offset + relativeIndex];
}
function sortLogEntries<T extends { runData: ITaskData }>(a: T, b: T) {
// We rely on execution index only when startTime is different
// Because it is reset to 0 when execution is waited, and therefore not necessarily unique
if (a.runData.startTime === b.runData.startTime) {
return a.runData.executionIndex - b.runData.executionIndex;
}
return a.runData.startTime - b.runData.startTime;
}
export function mergeStartData(
startData: { [nodeName: string]: ITaskStartedData[] },
response: IExecutionResponse,
): IExecutionResponse {
if (!response.data) {
return response;
}
const nodeNames = [
...new Set(
Object.keys(startData).concat(Object.keys(response.data.resultData.runData)),
).values(),
];
const runData = Object.fromEntries(
nodeNames.map<[string, ITaskData[]]>((nodeName) => {
const tasks = response.data?.resultData.runData[nodeName] ?? [];
const mergedTasks = tasks.concat(
(startData[nodeName] ?? [])
.filter((task) =>
// To remove duplicate runs, we check start time in addition to execution index
// because nodes such as Wait and Form emits multiple websocket events with
// different execution index for a single run
tasks.every(
(t) => t.startTime < task.startTime && t.executionIndex !== task.executionIndex,
),
)
.map<ITaskData>((task) => ({
...task,
executionTime: 0,
executionStatus: 'running',
})),
);
return [nodeName, mergedTasks];
}),
);
return {
...response,
data: {
...response.data,
resultData: {
...response.data.resultData,
runData,
},
},
};
}
export function hasSubExecution(entry: LogEntry): boolean {
return !!entry.runData.metadata?.subExecution;
}
export function getDefaultCollapsedEntries(entries: LogEntry[]): Record<string, boolean> {
const ret: Record<string, boolean> = {};
function collect(children: LogEntry[]) {
for (const entry of children) {
if (hasSubExecution(entry) && entry.children.length === 0) {
ret[entry.id] = true;
}
collect(entry.children);
}
}
collect(entries);
return ret;
}
export function getDepth(entry: LogEntry): number {
let depth = 0;
let currentEntry = entry;
while (currentEntry.parent !== undefined) {
currentEntry = currentEntry.parent;
depth++;
}
return depth;
}
export function getInputKey(node: INodeUi): string {
if (node.type === MANUAL_CHAT_TRIGGER_NODE_TYPE && node.typeVersion < 1.1) {
return 'input';
}
if (node.type === CHAT_TRIGGER_NODE_TYPE) {
return 'chatInput';
}
return 'chatInput';
}
function extractChatInput(
workflow: IWorkflowDb,
resultData: IRunExecutionData['resultData'],
): ChatMessage | undefined {
const chatTrigger = workflow.nodes.find(isChatNode);
if (chatTrigger === undefined) {
return undefined;
}
const inputKey = getInputKey(chatTrigger);
const runData = (resultData.runData[chatTrigger.name] ?? [])[0];
const message = runData?.data?.[NodeConnectionTypes.Main]?.[0]?.[0]?.json?.[inputKey];
if (runData === undefined || typeof message !== 'string') {
return undefined;
}
return {
text: message,
sender: 'user',
id: uuid(),
};
}
export function extractBotResponse(
resultData: IRunExecutionData['resultData'],
executionId: string,
emptyText?: string,
): ChatMessage | undefined {
const lastNodeExecuted = resultData.lastNodeExecuted;
if (!lastNodeExecuted) return undefined;
const nodeResponseDataArray = get(resultData.runData, lastNodeExecuted) ?? [];
const nodeResponseData = nodeResponseDataArray[nodeResponseDataArray.length - 1];
let responseMessage: string;
if (get(nodeResponseData, 'error')) {
responseMessage = '[ERROR: ' + get(nodeResponseData, 'error.message') + ']';
} else {
const responseData = get(nodeResponseData, 'data.main[0][0].json');
const text = extractResponseText(responseData) ?? emptyText;
if (!text) {
return undefined;
}
responseMessage = text;
}
return {
text: responseMessage,
sender: 'bot',
id: executionId ?? uuid(),
};
}
/** Extracts response message from workflow output */
function extractResponseText(responseData?: IDataObject): string | undefined {
if (!responseData || isEmpty(responseData)) {
return undefined;
}
// Paths where the response message might be located
const paths = ['output', 'text', 'response.text'];
const matchedPath = paths.find((path) => get(responseData, path));
if (!matchedPath) return JSON.stringify(responseData, null, 2);
const matchedOutput = get(responseData, matchedPath);
if (typeof matchedOutput === 'object') {
return '```json\n' + JSON.stringify(matchedOutput, null, 2) + '\n```';
}
return matchedOutput?.toString() ?? '';
}
export function restoreChatHistory(
workflowExecutionData: IExecutionResponse | null,
emptyText?: string,
): ChatMessage[] {
if (!workflowExecutionData?.data) {
return [];
}
const userMessage = extractChatInput(
workflowExecutionData.workflowData,
workflowExecutionData.data.resultData,
);
const botMessage = extractBotResponse(
workflowExecutionData.data.resultData,
workflowExecutionData.id,
emptyText,
);
return [...(userMessage ? [userMessage] : []), ...(botMessage ? [botMessage] : [])];
}

View File

@@ -27,9 +27,8 @@ const ErrorView = async () => await import('./views/ErrorView.vue');
const ForgotMyPasswordView = async () => await import('./views/ForgotMyPasswordView.vue');
const MainHeader = async () => await import('@/components/MainHeader/MainHeader.vue');
const MainSidebar = async () => await import('@/components/MainSidebar.vue');
const CanvasChatSwitch = async () => await import('@/components/CanvasChat/CanvasChatSwitch.vue');
const DemoFooter = async () =>
await import('@/components/CanvasChat/future/components/DemoFooter.vue');
const LogsPanel = async () => await import('@/features/logs/components/LogsPanel.vue');
const DemoFooter = async () => await import('@/features/logs/components/DemoFooter.vue');
const NodeView = async () => await import('@/views/NodeView.vue');
const WorkflowExecutionsView = async () => await import('@/views/WorkflowExecutionsView.vue');
const WorkflowExecutionsLandingPage = async () =>
@@ -343,7 +342,7 @@ export const routes: RouteRecordRaw[] = [
default: NodeView,
header: MainHeader,
sidebar: MainSidebar,
footer: CanvasChatSwitch,
footer: LogsPanel,
},
meta: {
nodeView: true,
@@ -377,7 +376,7 @@ export const routes: RouteRecordRaw[] = [
default: NodeView,
header: MainHeader,
sidebar: MainSidebar,
footer: CanvasChatSwitch,
footer: LogsPanel,
},
meta: {
nodeView: true,

View File

@@ -1,8 +1,4 @@
import {
LOG_DETAILS_PANEL_STATE,
LOGS_PANEL_STATE,
type LogDetailsPanelState,
} from '@/components/CanvasChat/types/logs';
import { type LogDetailsPanelState } from '@/features/logs/logs.types';
import { useTelemetry } from '@/composables/useTelemetry';
import {
LOCAL_STORAGE_LOGS_PANEL_DETAILS_PANEL,
@@ -12,6 +8,7 @@ import {
import { useLocalStorage } from '@vueuse/core';
import { defineStore } from 'pinia';
import { computed, ref } from 'vue';
import { LOG_DETAILS_PANEL_STATE, LOGS_PANEL_STATE } from '@/features/logs/logs.constants';
export const useLogsStore = defineStore('logs', () => {
const isOpen = useLocalStorage(LOCAL_STORAGE_LOGS_PANEL_OPEN, false);

View File

@@ -192,8 +192,6 @@ export const useSettingsStore = defineStore(STORES.SETTINGS, () => {
const isDevRelease = computed(() => settings.value.releaseChannel === 'dev');
const isNewLogsEnabled = computed(() => !!settings.value.logsView?.enabled);
const setSettings = (newSettings: FrontendSettings) => {
settings.value = newSettings;
userManagement.value = newSettings.userManagement;
@@ -456,7 +454,6 @@ export const useSettingsStore = defineStore(STORES.SETTINGS, () => {
isAskAiEnabled,
isAiCreditsEnabled,
aiCreditsQuota,
isNewLogsEnabled,
experimental__minZoomNodeSettingsInCanvas,
reset,
testLdapConnection,

View File

@@ -737,11 +737,6 @@ describe('useWorkflowsStore', () => {
});
});
it('should replace existing placeholder task data in new log view', () => {
settingsStore.settings = {
logsView: {
enabled: true,
},
} as FrontendSettings;
const successEventWithExecutionIndex = deepCopy(successEvent);
successEventWithExecutionIndex.data.executionIndex = 1;

View File

@@ -99,8 +99,7 @@ import { useUsersStore } from '@/stores/users.store';
import { updateCurrentUserSettings } from '@/api/users';
import { useExecutingNode } from '@/composables/useExecutingNode';
import type { NodeExecuteBefore } from '@n8n/api-types/push/execution';
import { useLogsStore } from './logs.store';
import { isChatNode } from '@/components/CanvasChat/utils';
import { isChatNode } from '@/utils/aiUtils';
const defaults: Omit<IWorkflowDb, 'id'> & { settings: NonNullable<IWorkflowDb['settings']> } = {
name: '',
@@ -135,7 +134,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
const rootStore = useRootStore();
const nodeHelpers = useNodeHelpers();
const usersStore = useUsersStore();
const logsStore = useLogsStore();
const nodeTypesStore = useNodeTypesStore();
const version = computed(() => settingsStore.partialExecutionVersion);
@@ -1373,11 +1371,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
const { [node.name]: removedNodeMetadata, ...remainingNodeMetadata } = nodeMetadata.value;
nodeMetadata.value = remainingNodeMetadata;
// If chat trigger node is removed, close chat
if (node.type === CHAT_TRIGGER_NODE_TYPE && !settingsStore.isNewLogsEnabled) {
logsStore.toggleOpen(false);
}
if (workflow.value.pinData && workflow.value.pinData.hasOwnProperty(node.name)) {
const { [node.name]: removedPinData, ...remainingPinData } = workflow.value.pinData;
workflow.value = {
@@ -1611,7 +1604,7 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
existingRunIndex > -1 && !hasWaitingItems ? existingRunIndex : tasksData.length - 1;
const status = tasksData[index]?.executionStatus ?? 'unknown';
if ('waiting' === status || (settingsStore.isNewLogsEnabled && 'running' === status)) {
if ('waiting' === status || 'running' === status) {
tasksData.splice(index, 1, data);
} else {
tasksData.push(data);

View File

@@ -1,4 +1,5 @@
import { parseAiContent } from '@/utils/aiUtils';
import type { LlmTokenUsageData } from '@/Interface';
import { addTokenUsageData, formatTokenUsageCount, parseAiContent } from '@/utils/aiUtils';
import { NodeConnectionTypes } from 'n8n-workflow';
describe(parseAiContent, () => {
@@ -79,3 +80,56 @@ describe(parseAiContent, () => {
]);
});
});
describe(addTokenUsageData, () => {
it('should return sum of consumed tokens', () => {
expect(
addTokenUsageData(
{ completionTokens: 1, promptTokens: 100, totalTokens: 1000, isEstimate: false },
{ completionTokens: 0, promptTokens: 1, totalTokens: 2, isEstimate: false },
),
).toEqual({ completionTokens: 1, promptTokens: 101, totalTokens: 1002, isEstimate: false });
});
it('should set isEstimate to true if either of the arguments is an estimation', () => {
const usageData = { completionTokens: 0, promptTokens: 0, totalTokens: 0, isEstimate: false };
expect(addTokenUsageData(usageData, usageData)).toEqual({
...usageData,
isEstimate: false,
});
expect(addTokenUsageData({ ...usageData, isEstimate: true }, usageData)).toEqual({
...usageData,
isEstimate: true,
});
expect(addTokenUsageData(usageData, { ...usageData, isEstimate: true })).toEqual({
...usageData,
isEstimate: true,
});
expect(
addTokenUsageData({ ...usageData, isEstimate: true }, { ...usageData, isEstimate: true }),
).toEqual({
...usageData,
isEstimate: true,
});
});
});
describe(formatTokenUsageCount, () => {
const usageData: LlmTokenUsageData = {
completionTokens: 11,
promptTokens: 22,
totalTokens: 33,
isEstimate: false,
};
it('should return the number of specified field', () => {
expect(formatTokenUsageCount(usageData, 'completion')).toBe('11');
expect(formatTokenUsageCount(usageData, 'prompt')).toBe('22');
expect(formatTokenUsageCount(usageData, 'total')).toBe('33');
});
it('should prepend "~" if the usage data is an estimation', () => {
expect(formatTokenUsageCount({ ...usageData, isEstimate: true }, 'total')).toBe('~33');
});
});

View File

@@ -1,3 +1,5 @@
import { CHAT_TRIGGER_NODE_TYPE, MANUAL_CHAT_TRIGGER_NODE_TYPE } from '@/constants';
import type { INodeUi, LlmTokenUsageData } from '@/Interface';
import type { IDataObject, INodeExecutionData, NodeConnectionType } from 'n8n-workflow';
import { isObjectEmpty, NodeConnectionTypes } from 'n8n-workflow';
@@ -240,3 +242,40 @@ export function parseAiContent(
.filter((c): c is IDataObject => c !== undefined)
.map((c) => ({ raw: c, parsedContent: parser(c) }));
}
export const emptyTokenUsageData: LlmTokenUsageData = {
completionTokens: 0,
promptTokens: 0,
totalTokens: 0,
isEstimate: false,
};
export function addTokenUsageData(
one: LlmTokenUsageData,
another: LlmTokenUsageData,
): LlmTokenUsageData {
return {
completionTokens: one.completionTokens + another.completionTokens,
promptTokens: one.promptTokens + another.promptTokens,
totalTokens: one.totalTokens + another.totalTokens,
isEstimate: one.isEstimate || another.isEstimate,
};
}
export function formatTokenUsageCount(
usage: LlmTokenUsageData,
field: 'total' | 'prompt' | 'completion',
) {
const count =
field === 'total'
? usage.totalTokens
: field === 'completion'
? usage.completionTokens
: usage.promptTokens;
return usage.isEstimate ? `~${count}` : count.toLocaleString();
}
export function isChatNode(node: INodeUi) {
return [CHAT_TRIGGER_NODE_TYPE, MANUAL_CHAT_TRIGGER_NODE_TYPE].includes(node.type);
}

View File

@@ -110,7 +110,6 @@ import { useNDVStore } from '@/stores/ndv.store';
import { getBounds, getNodesWithNormalizedPosition, getNodeViewTab } from '@/utils/nodeViewUtils';
import CanvasStopCurrentExecutionButton from '@/components/canvas/elements/buttons/CanvasStopCurrentExecutionButton.vue';
import CanvasStopWaitingForWebhookButton from '@/components/canvas/elements/buttons/CanvasStopWaitingForWebhookButton.vue';
import CanvasClearExecutionDataButton from '@/components/canvas/elements/buttons/CanvasClearExecutionDataButton.vue';
import { nodeViewEventBus } from '@/event-bus';
import { tryToParseNumber } from '@/utils/typesUtils';
import { useTemplatesStore } from '@/stores/templates.store';
@@ -124,7 +123,6 @@ import { createCanvasConnectionHandleString } from '@/utils/canvasUtils';
import { isValidNodeConnectionType } from '@/utils/typeGuards';
import { getEasyAiWorkflowJson } from '@/utils/easyAiWorkflowUtils';
import type { CanvasLayoutEvent } from '@/composables/useCanvasLayout';
import { useClearExecutionButtonVisible } from '@/composables/useClearExecutionButtonVisible';
import { useWorkflowSaving } from '@/composables/useWorkflowSaving';
import { useBuilderStore } from '@/stores/builder.store';
import { useFoldersStore } from '@/stores/folders.store';
@@ -134,6 +132,7 @@ import { useAgentRequestStore } from '@n8n/stores/useAgentRequestStore';
import { needsAgentInput } from '@/utils/nodes/nodeTransforms';
import { useLogsStore } from '@/stores/logs.store';
import { canvasEventBus } from '@/event-bus/canvas';
import CanvasChatButton from '@/components/canvas/elements/buttons/CanvasChatButton.vue';
defineOptions({
name: 'NodeView',
@@ -1239,8 +1238,6 @@ const isStopWaitingForWebhookButtonVisible = computed(
() => isWorkflowRunning.value && isExecutionWaitingForWebhook.value,
);
const isClearExecutionButtonVisible = useClearExecutionButtonVisible();
async function onRunWorkflowToNode(id: string) {
const node = workflowsStore.getNodeById(id);
if (!node) return;
@@ -1369,11 +1366,6 @@ async function onStopWaitingForWebhook() {
await stopWaitingForWebhook();
}
async function onClearExecutionData() {
workflowsStore.workflowExecutionData = null;
nodeHelpers.updateNodesExecutionIssues();
}
function onRunWorkflowButtonMouseEnter() {
nodeViewEventBus.emit('runWorkflowButton:mouseenter');
}
@@ -2062,10 +2054,6 @@ onBeforeUnmount(() => {
v-if="isStopWaitingForWebhookButtonVisible"
@click="onStopWaitingForWebhook"
/>
<CanvasClearExecutionDataButton
v-if="isClearExecutionButtonVisible && !settingsStore.isNewLogsEnabled"
@click="onClearExecutionData"
/>
</div>
<N8nCallout