mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-19 11:01:15 +00:00
feat: AI Workflow Builder agent (no-changelog) (#17423)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
This commit is contained in:
@@ -81,3 +81,18 @@ export async function claimFreeAiCredits(
|
||||
projectId,
|
||||
} as IDataObject);
|
||||
}
|
||||
|
||||
export async function getAiSessions(
|
||||
ctx: IRestApiContext,
|
||||
workflowId?: string,
|
||||
): Promise<{
|
||||
sessions: Array<{
|
||||
sessionId: string;
|
||||
messages: ChatRequest.MessageResponse[];
|
||||
lastUpdated: string;
|
||||
}>;
|
||||
}> {
|
||||
return await makeRestApiRequest(ctx, 'POST', '/ai/sessions', {
|
||||
workflowId,
|
||||
} as IDataObject);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import AskAssistantBuild from './AskAssistantBuild.vue';
|
||||
import { useBuilderStore } from '@/stores/builder.store';
|
||||
import { mockedStore } from '@/__tests__/utils';
|
||||
import { STORES } from '@n8n/stores';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
|
||||
vi.mock('@/event-bus', () => ({
|
||||
nodeViewEventBus: {
|
||||
@@ -34,11 +35,45 @@ vi.mock('@n8n/i18n', async (importOriginal) => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock('vue-router', () => {
|
||||
const params = {};
|
||||
const push = vi.fn();
|
||||
const replace = vi.fn();
|
||||
const resolve = vi.fn().mockImplementation(() => ({ href: '' }));
|
||||
return {
|
||||
useRoute: () => ({
|
||||
params,
|
||||
}),
|
||||
useRouter: () => ({
|
||||
push,
|
||||
replace,
|
||||
resolve,
|
||||
}),
|
||||
RouterLink: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('@/composables/useWorkflowSaving', () => ({
|
||||
useWorkflowSaving: vi.fn().mockReturnValue({
|
||||
getCurrentWorkflow: vi.fn(),
|
||||
saveCurrentWorkflow: vi.fn(),
|
||||
getWorkflowDataToSave: vi.fn(),
|
||||
setDocumentTitle: vi.fn(),
|
||||
executeData: vi.fn(),
|
||||
getNodeTypes: vi.fn().mockReturnValue([]),
|
||||
}),
|
||||
}));
|
||||
|
||||
const workflowPrompt = 'Create a workflow';
|
||||
describe('AskAssistantBuild', () => {
|
||||
const sessionId = faker.string.uuid();
|
||||
const renderComponent = createComponentRenderer(AskAssistantBuild);
|
||||
let builderStore: ReturnType<typeof mockedStore<typeof useBuilderStore>>;
|
||||
let workflowsStore: ReturnType<typeof mockedStore<typeof useWorkflowsStore>>;
|
||||
|
||||
beforeAll(() => {
|
||||
Element.prototype.scrollTo = vi.fn(() => {});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
@@ -57,13 +92,21 @@ describe('AskAssistantBuild', () => {
|
||||
|
||||
setActivePinia(pinia);
|
||||
builderStore = mockedStore(useBuilderStore);
|
||||
workflowsStore = mockedStore(useWorkflowsStore);
|
||||
|
||||
// Mock action implementations
|
||||
builderStore.initBuilderChat = vi.fn();
|
||||
builderStore.sendChatMessage = vi.fn();
|
||||
builderStore.resetBuilderChat = vi.fn();
|
||||
builderStore.addAssistantMessages = vi.fn();
|
||||
builderStore.$onAction = vi.fn().mockReturnValue(vi.fn());
|
||||
builderStore.applyWorkflowUpdate = vi
|
||||
.fn()
|
||||
.mockReturnValue({ success: true, workflowData: {}, newNodeIds: [] });
|
||||
builderStore.getWorkflowSnapshot = vi.fn().mockReturnValue('{}');
|
||||
builderStore.workflowMessages = [];
|
||||
builderStore.toolMessages = [];
|
||||
builderStore.workflowPrompt = workflowPrompt;
|
||||
|
||||
workflowsStore.workflowId = 'abc123';
|
||||
});
|
||||
|
||||
describe('rendering', () => {
|
||||
@@ -76,7 +119,7 @@ describe('AskAssistantBuild', () => {
|
||||
renderComponent();
|
||||
|
||||
// Basic verification that no methods were called on mount
|
||||
expect(builderStore.initBuilderChat).not.toHaveBeenCalled();
|
||||
expect(builderStore.sendChatMessage).not.toHaveBeenCalled();
|
||||
expect(builderStore.addAssistantMessages).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -97,106 +140,175 @@ describe('AskAssistantBuild', () => {
|
||||
|
||||
await flushPromises();
|
||||
|
||||
expect(builderStore.initBuilderChat).toHaveBeenCalledWith(testMessage, 'chat');
|
||||
expect(builderStore.sendChatMessage).toHaveBeenCalledWith({ text: testMessage });
|
||||
});
|
||||
});
|
||||
|
||||
describe('feedback handling', () => {
|
||||
const workflowJson = '{"nodes": [], "connections": {}}';
|
||||
beforeEach(() => {
|
||||
builderStore.chatMessages = [
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'workflow-generated',
|
||||
read: true,
|
||||
codeSnippet: workflowJson,
|
||||
},
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'rate-workflow',
|
||||
read: true,
|
||||
content: '',
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
it('should track feedback when user rates the workflow positively', async () => {
|
||||
const { findByTestId } = renderComponent();
|
||||
describe('when workflow-updated message exists', () => {
|
||||
beforeEach(() => {
|
||||
// Use $patch to ensure reactivity
|
||||
builderStore.$patch({
|
||||
chatMessages: [
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'workflow-updated',
|
||||
read: true,
|
||||
codeSnippet: workflowJson,
|
||||
},
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'text',
|
||||
content: 'Wat',
|
||||
read: true,
|
||||
showRating: true,
|
||||
ratingStyle: 'regular',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
// Find thumbs up button in RateWorkflowMessage component
|
||||
const thumbsUpButton = await findByTestId('message-thumbs-up-button');
|
||||
thumbsUpButton.click();
|
||||
it('should track feedback when user rates the workflow positively', async () => {
|
||||
// Render component after setting up the store state
|
||||
const { findByTestId } = renderComponent();
|
||||
|
||||
await flushPromises();
|
||||
await flushPromises();
|
||||
|
||||
expect(trackMock).toHaveBeenCalledWith('User rated workflow generation', {
|
||||
helpful: true,
|
||||
prompt: 'Create a workflow',
|
||||
workflow_json: workflowJson,
|
||||
// Find thumbs up button in RateWorkflowMessage component
|
||||
const thumbsUpButton = await findByTestId('message-thumbs-up-button');
|
||||
await fireEvent.click(thumbsUpButton);
|
||||
|
||||
await flushPromises();
|
||||
|
||||
expect(trackMock).toHaveBeenCalledWith('User rated workflow generation', {
|
||||
helpful: true,
|
||||
workflow_id: 'abc123',
|
||||
});
|
||||
});
|
||||
|
||||
it('should track feedback when user rates the workflow negatively', async () => {
|
||||
const { findByTestId } = renderComponent();
|
||||
|
||||
await flushPromises();
|
||||
|
||||
// Find thumbs down button in RateWorkflowMessage component
|
||||
const thumbsDownButton = await findByTestId('message-thumbs-down-button');
|
||||
await fireEvent.click(thumbsDownButton);
|
||||
|
||||
await flushPromises();
|
||||
|
||||
expect(trackMock).toHaveBeenCalledWith('User rated workflow generation', {
|
||||
helpful: false,
|
||||
workflow_id: 'abc123',
|
||||
});
|
||||
});
|
||||
|
||||
it('should track text feedback when submitted', async () => {
|
||||
const { findByTestId } = renderComponent();
|
||||
|
||||
const feedbackText = 'This workflow is great but could be improved';
|
||||
|
||||
// Click thumbs down to show feedback form
|
||||
const thumbsDownButton = await findByTestId('message-thumbs-down-button');
|
||||
thumbsDownButton.click();
|
||||
|
||||
await flushPromises();
|
||||
|
||||
// Type feedback and submit
|
||||
const feedbackInput = await findByTestId('message-feedback-input');
|
||||
await fireEvent.update(feedbackInput, feedbackText);
|
||||
|
||||
const submitButton = await findByTestId('message-submit-feedback-button');
|
||||
submitButton.click();
|
||||
|
||||
await flushPromises();
|
||||
|
||||
expect(trackMock).toHaveBeenCalledWith(
|
||||
'User submitted workflow generation feedback',
|
||||
expect.objectContaining({
|
||||
feedback: feedbackText,
|
||||
workflow_id: 'abc123',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should track feedback when user rates the workflow negatively', async () => {
|
||||
const { findByTestId } = renderComponent();
|
||||
describe('when no workflow-updated message exists', () => {
|
||||
beforeEach(() => {
|
||||
builderStore.$patch({
|
||||
chatMessages: [
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'text',
|
||||
content: 'This is just an informational message',
|
||||
read: true,
|
||||
showRating: false,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
// Find thumbs down button in RateWorkflowMessage component
|
||||
const thumbsDownButton = await findByTestId('message-thumbs-down-button');
|
||||
thumbsDownButton.click();
|
||||
it('should not show rating buttons when no workflow update occurred', async () => {
|
||||
const { queryAllByTestId } = renderComponent();
|
||||
|
||||
await flushPromises();
|
||||
await flushPromises();
|
||||
|
||||
expect(trackMock).toHaveBeenCalledWith('User rated workflow generation', {
|
||||
helpful: false,
|
||||
prompt: 'Create a workflow',
|
||||
workflow_json: workflowJson,
|
||||
// Rating buttons should not be present
|
||||
expect(queryAllByTestId('message-thumbs-up-button')).toHaveLength(0);
|
||||
expect(queryAllByTestId('message-thumbs-down-button')).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('should track text feedback when submitted', async () => {
|
||||
const { findByTestId } = renderComponent();
|
||||
describe('when tools are still running', () => {
|
||||
beforeEach(() => {
|
||||
builderStore.$patch({
|
||||
chatMessages: [
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'tool',
|
||||
toolName: 'add_nodes',
|
||||
status: 'running',
|
||||
updates: [],
|
||||
read: true,
|
||||
},
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'workflow-updated',
|
||||
read: true,
|
||||
codeSnippet: workflowJson,
|
||||
},
|
||||
{
|
||||
id: faker.string.uuid(),
|
||||
role: 'assistant',
|
||||
type: 'text',
|
||||
content: 'Working on your workflow...',
|
||||
read: true,
|
||||
showRating: true,
|
||||
ratingStyle: 'minimal',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
const feedbackText = 'This workflow is great but could be improved';
|
||||
it('should show minimal rating style when tools are still running', async () => {
|
||||
const { findByTestId } = renderComponent();
|
||||
|
||||
// Click thumbs down to show feedback form
|
||||
const thumbsDownButton = await findByTestId('message-thumbs-down-button');
|
||||
thumbsDownButton.click();
|
||||
await flushPromises();
|
||||
|
||||
await flushPromises();
|
||||
// Check that rating buttons exist but in minimal style
|
||||
const thumbsUpButton = await findByTestId('message-thumbs-up-button');
|
||||
expect(thumbsUpButton).toBeInTheDocument();
|
||||
|
||||
// Type feedback and submit
|
||||
const feedbackInput = await findByTestId('message-feedback-input');
|
||||
await fireEvent.update(feedbackInput, feedbackText);
|
||||
|
||||
const submitButton = await findByTestId('message-submit-feedback-button');
|
||||
submitButton.click();
|
||||
|
||||
await flushPromises();
|
||||
|
||||
expect(trackMock).toHaveBeenCalledWith(
|
||||
'User submitted workflow generation feedback',
|
||||
expect.objectContaining({
|
||||
feedback: feedbackText,
|
||||
prompt: 'Create a workflow',
|
||||
workflow_json: workflowJson,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('new workflow generation', () => {
|
||||
it('should unsubscribe from store actions on unmount', async () => {
|
||||
const unsubscribeMock = vi.fn();
|
||||
builderStore.$onAction = vi.fn().mockReturnValue(unsubscribeMock);
|
||||
|
||||
const { unmount } = renderComponent();
|
||||
|
||||
// Unmount component
|
||||
unmount();
|
||||
|
||||
// Should unsubscribe when component is unmounted
|
||||
expect(unsubscribeMock).toHaveBeenCalled();
|
||||
// The minimal style should have icon-only buttons (no label)
|
||||
expect(thumbsUpButton.textContent).toBe('');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
<script lang="ts" setup>
|
||||
import { useBuilderStore } from '@/stores/builder.store';
|
||||
import { useUsersStore } from '@/stores/users.store';
|
||||
import { computed, watch, ref, onBeforeUnmount } from 'vue';
|
||||
import { computed, watch, ref } from 'vue';
|
||||
import AskAssistantChat from '@n8n/design-system/components/AskAssistantChat/AskAssistantChat.vue';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
import type { WorkflowDataUpdate } from '@n8n/rest-api-client/api/workflows';
|
||||
import { nodeViewEventBus } from '@/event-bus';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { useI18n } from '@n8n/i18n';
|
||||
import { STICKY_NODE_TYPE } from '@/constants';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { useRoute, useRouter } from 'vue-router';
|
||||
import { useWorkflowSaving } from '@/composables/useWorkflowSaving';
|
||||
import type { RatingFeedback } from '@n8n/design-system/types/assistant';
|
||||
import { isWorkflowUpdatedMessage } from '@n8n/design-system/types/assistant';
|
||||
import { nodeViewEventBus } from '@/event-bus';
|
||||
|
||||
const emit = defineEmits<{
|
||||
close: [];
|
||||
@@ -17,151 +19,104 @@ const emit = defineEmits<{
|
||||
const builderStore = useBuilderStore();
|
||||
const usersStore = useUsersStore();
|
||||
const telemetry = useTelemetry();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const i18n = useI18n();
|
||||
const helpful = ref(false);
|
||||
const generationStartTime = ref(0);
|
||||
const route = useRoute();
|
||||
const router = useRouter();
|
||||
const workflowSaver = useWorkflowSaving({ router });
|
||||
|
||||
// Track processed workflow updates
|
||||
const processedWorkflowUpdates = ref(new Set<string>());
|
||||
const trackedTools = ref(new Set<string>());
|
||||
|
||||
const user = computed(() => ({
|
||||
firstName: usersStore.currentUser?.firstName ?? '',
|
||||
lastName: usersStore.currentUser?.lastName ?? '',
|
||||
}));
|
||||
|
||||
const workflowGenerated = ref(false);
|
||||
const loadingMessage = computed(() => builderStore.assistantThinkingMessage);
|
||||
const generatedWorkflowJson = computed(
|
||||
() => builderStore.chatMessages.find((msg) => msg.type === 'workflow-generated')?.codeSnippet,
|
||||
);
|
||||
const currentRoute = computed(() => route.name);
|
||||
|
||||
async function onUserMessage(content: string) {
|
||||
// If there is no current session running, initialize the support chat session
|
||||
await builderStore.initBuilderChat(content, 'chat');
|
||||
}
|
||||
const isNewWorkflow = workflowsStore.isNewWorkflow;
|
||||
|
||||
function fixWorkflowStickiesPosition(workflowData: WorkflowDataUpdate): WorkflowDataUpdate {
|
||||
const STICKY_WIDTH = 480;
|
||||
const HEADERS_HEIGHT = 40;
|
||||
const NEW_LINE_HEIGHT = 20;
|
||||
const CHARACTER_WIDTH = 65;
|
||||
const NODE_WIDTH = 100;
|
||||
const stickyNodes = workflowData.nodes?.filter((node) => node.type === STICKY_NODE_TYPE);
|
||||
const nonStickyNodes = workflowData.nodes?.filter((node) => node.type !== STICKY_NODE_TYPE);
|
||||
|
||||
const fixedStickies = stickyNodes?.map((node, index) => {
|
||||
const content = node.parameters.content?.toString() ?? '';
|
||||
const newLines = content.match(/\n/g) ?? [];
|
||||
// Match any markdown heading from # to ###### at the start of a line
|
||||
const headings = content.match(/^#{1,6} /gm) ?? [];
|
||||
const headingHeight = headings.length * HEADERS_HEIGHT;
|
||||
const newLinesHeight = newLines.length * NEW_LINE_HEIGHT;
|
||||
const contentHeight = (content.length / CHARACTER_WIDTH) * NEW_LINE_HEIGHT;
|
||||
const height = Math.ceil(headingHeight + newLinesHeight + contentHeight) + NEW_LINE_HEIGHT;
|
||||
|
||||
const firstNode = nonStickyNodes?.[0];
|
||||
const xPos = (firstNode?.position[0] ?? 0) + index * (STICKY_WIDTH + NODE_WIDTH);
|
||||
return {
|
||||
...node,
|
||||
parameters: {
|
||||
...node.parameters,
|
||||
height,
|
||||
width: STICKY_WIDTH,
|
||||
},
|
||||
position: [xPos, -1 * (height + 50)] as [number, number],
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
...workflowData,
|
||||
nodes: [...(nonStickyNodes ?? []), ...(fixedStickies ?? [])],
|
||||
};
|
||||
}
|
||||
|
||||
function onInsertWorkflow(code: string) {
|
||||
let workflowData: WorkflowDataUpdate;
|
||||
try {
|
||||
workflowData = JSON.parse(code);
|
||||
} catch (error) {
|
||||
console.error('Error parsing workflow data', error);
|
||||
return;
|
||||
// Save the workflow to get workflow ID which is used for session
|
||||
if (isNewWorkflow) {
|
||||
await workflowSaver.saveCurrentWorkflow();
|
||||
}
|
||||
|
||||
telemetry.track('Workflow generated from prompt', {
|
||||
prompt: builderStore.workflowPrompt,
|
||||
latency: new Date().getTime() - generationStartTime.value,
|
||||
workflow_json: generatedWorkflowJson.value,
|
||||
});
|
||||
|
||||
nodeViewEventBus.emit('importWorkflowData', {
|
||||
data: fixWorkflowStickiesPosition(workflowData),
|
||||
tidyUp: true,
|
||||
});
|
||||
workflowGenerated.value = true;
|
||||
builderStore.addAssistantMessages(
|
||||
[
|
||||
{
|
||||
type: 'rate-workflow',
|
||||
content: i18n.baseText('aiAssistant.builder.feedbackPrompt'),
|
||||
role: 'assistant',
|
||||
},
|
||||
],
|
||||
uuid(),
|
||||
);
|
||||
}
|
||||
|
||||
function onNewWorkflow() {
|
||||
builderStore.resetBuilderChat();
|
||||
workflowGenerated.value = false;
|
||||
helpful.value = false;
|
||||
generationStartTime.value = new Date().getTime();
|
||||
}
|
||||
|
||||
function onThumbsUp() {
|
||||
helpful.value = true;
|
||||
telemetry.track('User rated workflow generation', {
|
||||
helpful: helpful.value,
|
||||
prompt: builderStore.workflowPrompt,
|
||||
workflow_json: generatedWorkflowJson.value,
|
||||
});
|
||||
}
|
||||
|
||||
function onThumbsDown() {
|
||||
helpful.value = false;
|
||||
telemetry.track('User rated workflow generation', {
|
||||
helpful: helpful.value,
|
||||
prompt: builderStore.workflowPrompt,
|
||||
workflow_json: generatedWorkflowJson.value,
|
||||
});
|
||||
}
|
||||
|
||||
function onSubmitFeedback(feedback: string) {
|
||||
telemetry.track('User submitted workflow generation feedback', {
|
||||
helpful: helpful.value,
|
||||
feedback,
|
||||
prompt: builderStore.workflowPrompt,
|
||||
workflow_json: generatedWorkflowJson.value,
|
||||
});
|
||||
builderStore.sendChatMessage({ text: content });
|
||||
}
|
||||
|
||||
// Watch for workflow updates and apply them
|
||||
watch(
|
||||
() => builderStore.chatMessages,
|
||||
() => builderStore.workflowMessages,
|
||||
(messages) => {
|
||||
if (workflowGenerated.value) return;
|
||||
messages
|
||||
.filter((msg) => {
|
||||
return msg.id && !processedWorkflowUpdates.value.has(msg.id);
|
||||
})
|
||||
.forEach((msg) => {
|
||||
if (msg.id && isWorkflowUpdatedMessage(msg)) {
|
||||
processedWorkflowUpdates.value.add(msg.id);
|
||||
|
||||
const workflowGeneratedMessage = messages.find((msg) => msg.type === 'workflow-generated');
|
||||
if (workflowGeneratedMessage) {
|
||||
onInsertWorkflow(workflowGeneratedMessage.codeSnippet);
|
||||
}
|
||||
const currentWorkflowJson = builderStore.getWorkflowSnapshot();
|
||||
const result = builderStore.applyWorkflowUpdate(msg.codeSnippet);
|
||||
|
||||
if (result.success) {
|
||||
// Import the updated workflow
|
||||
nodeViewEventBus.emit('importWorkflowData', {
|
||||
data: result.workflowData,
|
||||
tidyUp: true,
|
||||
nodesIdsToTidyUp: result.newNodeIds,
|
||||
regenerateIds: false,
|
||||
});
|
||||
// Track tool usage for telemetry
|
||||
const newToolMessages = builderStore.toolMessages.filter(
|
||||
(toolMsg) =>
|
||||
toolMsg.status !== 'running' &&
|
||||
toolMsg.toolCallId &&
|
||||
!trackedTools.value.has(toolMsg.toolCallId),
|
||||
);
|
||||
|
||||
newToolMessages.forEach((toolMsg) => trackedTools.value.add(toolMsg.toolCallId ?? ''));
|
||||
|
||||
telemetry.track('Workflow modified by builder', {
|
||||
tools_called: newToolMessages.map((toolMsg) => toolMsg.toolName),
|
||||
start_workflow_json: currentWorkflowJson,
|
||||
end_workflow_json: msg.codeSnippet,
|
||||
workflow_id: workflowsStore.workflowId,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
{ deep: true },
|
||||
);
|
||||
|
||||
const unsubscribe = builderStore.$onAction(({ name }) => {
|
||||
if (name === 'initBuilderChat') {
|
||||
onNewWorkflow();
|
||||
}
|
||||
});
|
||||
function onNewWorkflow() {
|
||||
builderStore.resetBuilderChat();
|
||||
processedWorkflowUpdates.value.clear();
|
||||
trackedTools.value.clear();
|
||||
}
|
||||
|
||||
onBeforeUnmount(() => {
|
||||
unsubscribe();
|
||||
function onFeedback(feedback: RatingFeedback) {
|
||||
if (feedback.rating) {
|
||||
telemetry.track('User rated workflow generation', {
|
||||
helpful: feedback.rating === 'up',
|
||||
workflow_id: workflowsStore.workflowId,
|
||||
});
|
||||
}
|
||||
if (feedback.feedback) {
|
||||
telemetry.track('User submitted workflow generation feedback', {
|
||||
feedback: feedback.feedback,
|
||||
workflow_id: workflowsStore.workflowId,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Reset on route change
|
||||
watch(currentRoute, () => {
|
||||
onNewWorkflow();
|
||||
});
|
||||
</script>
|
||||
|
||||
@@ -172,16 +127,13 @@ onBeforeUnmount(() => {
|
||||
:messages="builderStore.chatMessages"
|
||||
:streaming="builderStore.streaming"
|
||||
:loading-message="loadingMessage"
|
||||
:session-id="builderStore.currentSessionId"
|
||||
:mode="i18n.baseText('aiAssistant.builder.mode')"
|
||||
:title="'n8n AI'"
|
||||
:scroll-on-new-message="true"
|
||||
:placeholder="i18n.baseText('aiAssistant.builder.placeholder')"
|
||||
@close="emit('close')"
|
||||
@message="onUserMessage"
|
||||
@thumbs-up="onThumbsUp"
|
||||
@thumbs-down="onThumbsDown"
|
||||
@submit-feedback="onSubmitFeedback"
|
||||
@insert-workflow="onInsertWorkflow"
|
||||
@feedback="onFeedback"
|
||||
>
|
||||
<template #header>
|
||||
<slot name="header" />
|
||||
@@ -191,21 +143,6 @@ onBeforeUnmount(() => {
|
||||
i18n.baseText('aiAssistant.builder.placeholder')
|
||||
}}</n8n-text>
|
||||
</template>
|
||||
<template v-if="workflowGenerated" #inputPlaceholder>
|
||||
<div :class="$style.newWorkflowButtonWrapper">
|
||||
<n8n-button
|
||||
type="secondary"
|
||||
size="small"
|
||||
:class="$style.newWorkflowButton"
|
||||
@click="onNewWorkflow"
|
||||
>
|
||||
{{ i18n.baseText('aiAssistant.builder.generateNew') }}
|
||||
</n8n-button>
|
||||
<n8n-text :class="$style.newWorkflowText">
|
||||
{{ i18n.baseText('aiAssistant.builder.newWorkflowNotice') }}
|
||||
</n8n-text>
|
||||
</div>
|
||||
</template>
|
||||
</AskAssistantChat>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@@ -28,7 +28,7 @@ function onResizeDebounced(data: { direction: string; x: number; width: number }
|
||||
function toggleAssistantMode() {
|
||||
isBuildMode.value = !isBuildMode.value;
|
||||
if (isBuildMode.value) {
|
||||
builderStore.openChat();
|
||||
void builderStore.openChat();
|
||||
} else {
|
||||
assistantStore.openChat();
|
||||
}
|
||||
@@ -50,7 +50,7 @@ const unsubscribeAssistantStore = assistantStore.$onAction(({ name }) => {
|
||||
const unsubscribeBuilderStore = builderStore.$onAction(({ name }) => {
|
||||
// When assistant is opened from error or credentials help
|
||||
// switch from build mode to chat mode
|
||||
if (name === 'initBuilderChat') {
|
||||
if (name === 'sendChatMessage') {
|
||||
isBuildMode.value = true;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -714,7 +714,11 @@ async function onContextMenuAction(action: ContextMenuAction, nodeIds: string[])
|
||||
}
|
||||
}
|
||||
|
||||
async function onTidyUp(payload: { source: CanvasLayoutSource }) {
|
||||
async function onTidyUp(payload: { source: CanvasLayoutSource; nodeIdsFilter?: string[] }) {
|
||||
if (payload.nodeIdsFilter && payload.nodeIdsFilter.length > 0) {
|
||||
clearSelectedNodes();
|
||||
addSelectedNodes(payload.nodeIdsFilter.map(findNode).filter(isPresent));
|
||||
}
|
||||
const applyOnSelection = selectedNodes.value.length > 1;
|
||||
const target = applyOnSelection ? 'selection' : 'all';
|
||||
const result = layout(target);
|
||||
|
||||
@@ -3,15 +3,21 @@ import { ref, computed } from 'vue';
|
||||
import { useI18n } from '@n8n/i18n';
|
||||
import { useCanvasNode } from '@/composables/useCanvasNode';
|
||||
import { useBuilderStore } from '@/stores/builder.store';
|
||||
import { useRouter } from 'vue-router';
|
||||
import { useWorkflowSaving } from '@/composables/useWorkflowSaving';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
|
||||
const emit = defineEmits<{
|
||||
delete: [id: string];
|
||||
}>();
|
||||
const i18n = useI18n();
|
||||
const router = useRouter();
|
||||
|
||||
const { id } = useCanvasNode();
|
||||
const builderStore = useBuilderStore();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
|
||||
const workflowSaver = useWorkflowSaving({ router });
|
||||
const isPromptVisible = ref(true);
|
||||
const isFocused = ref(false);
|
||||
|
||||
@@ -19,9 +25,17 @@ const prompt = ref('');
|
||||
const hasContent = computed(() => prompt.value.trim().length > 0);
|
||||
|
||||
async function onSubmit() {
|
||||
builderStore.openChat();
|
||||
const isNewWorkflow = workflowsStore.isNewWorkflow;
|
||||
|
||||
// Save the workflow to get workflow ID which is used for session
|
||||
if (isNewWorkflow) {
|
||||
await workflowSaver.saveCurrentWorkflow();
|
||||
}
|
||||
// Here we need to await for chat to open and session to be loaded
|
||||
await builderStore.openChat();
|
||||
emit('delete', id.value);
|
||||
await builderStore.initBuilderChat(prompt.value, 'canvas');
|
||||
|
||||
builderStore.sendChatMessage({ text: prompt.value, source: 'canvas' });
|
||||
isPromptVisible.value = false;
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -180,7 +180,7 @@ export const useAIAssistantHelpers = () => {
|
||||
* @param nodeNames The names of the nodes to get the schema for
|
||||
* @returns An array of NodeExecutionSchema objects
|
||||
*/
|
||||
function getNodesSchemas(nodeNames: string[]) {
|
||||
function getNodesSchemas(nodeNames: string[], excludeValues?: boolean) {
|
||||
const schemas: ChatRequest.NodeExecutionSchema[] = [];
|
||||
for (const name of nodeNames) {
|
||||
const node = workflowsStore.getNodeByName(name);
|
||||
@@ -188,7 +188,10 @@ export const useAIAssistantHelpers = () => {
|
||||
continue;
|
||||
}
|
||||
const { getSchemaForExecutionData, getInputDataWithPinned } = useDataSchema();
|
||||
const schema = getSchemaForExecutionData(executionDataToJson(getInputDataWithPinned(node)));
|
||||
const schema = getSchemaForExecutionData(
|
||||
executionDataToJson(getInputDataWithPinned(node)),
|
||||
excludeValues,
|
||||
);
|
||||
schemas.push({
|
||||
nodeName: node.name,
|
||||
schema,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,354 @@
|
||||
import type { ChatUI } from '@n8n/design-system/types/assistant';
|
||||
import type { ChatRequest } from '@/types/assistant.types';
|
||||
import { useI18n } from '@n8n/i18n';
|
||||
import { isTextMessage, isWorkflowUpdatedMessage, isToolMessage } from '@/types/assistant.types';
|
||||
|
||||
export interface MessageProcessingResult {
|
||||
messages: ChatUI.AssistantMessage[];
|
||||
thinkingMessage?: string;
|
||||
shouldClearThinking: boolean;
|
||||
}
|
||||
|
||||
export function useBuilderMessages() {
|
||||
const locale = useI18n();
|
||||
|
||||
/**
|
||||
* Apply rating logic to messages - only show rating on the last AI text message after workflow-updated
|
||||
* when no tools are running
|
||||
*/
|
||||
function applyRatingLogic(messages: ChatUI.AssistantMessage[]): ChatUI.AssistantMessage[] {
|
||||
// Check if any tools are still running
|
||||
const hasRunningTools = messages.some(
|
||||
(m) => m.type === 'tool' && (m as ChatUI.ToolMessage).status === 'running',
|
||||
);
|
||||
|
||||
// Don't apply rating if tools are still running
|
||||
if (hasRunningTools) {
|
||||
// Remove any existing ratings
|
||||
return messages.map((message) => {
|
||||
if (message.type === 'text' && 'showRating' in message) {
|
||||
// Pick all properties except showRating and ratingStyle
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { showRating, ratingStyle, ...cleanMessage } = message as ChatUI.TextMessage & {
|
||||
showRating?: boolean;
|
||||
ratingStyle?: string;
|
||||
};
|
||||
return cleanMessage;
|
||||
}
|
||||
return message;
|
||||
});
|
||||
}
|
||||
|
||||
// Find the index of the last workflow-updated message
|
||||
let lastWorkflowUpdateIndex = -1;
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (messages[i].type === 'workflow-updated') {
|
||||
lastWorkflowUpdateIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If no workflow-updated, return messages as-is
|
||||
if (lastWorkflowUpdateIndex === -1) {
|
||||
return messages;
|
||||
}
|
||||
|
||||
// Find the last assistant text message after workflow-updated
|
||||
let lastAssistantTextIndex = -1;
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
if (
|
||||
messages[i].type === 'text' &&
|
||||
messages[i].role === 'assistant' &&
|
||||
i > lastWorkflowUpdateIndex
|
||||
) {
|
||||
lastAssistantTextIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply rating only to the last assistant text message after workflow-updated
|
||||
return messages.map((message, index) => {
|
||||
if (
|
||||
message.type === 'text' &&
|
||||
message.role === 'assistant' &&
|
||||
index === lastAssistantTextIndex
|
||||
) {
|
||||
return {
|
||||
...message,
|
||||
showRating: true,
|
||||
ratingStyle: 'regular',
|
||||
};
|
||||
}
|
||||
// Remove any existing rating from other messages
|
||||
if (message.type === 'text' && 'showRating' in message) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { showRating, ratingStyle, ...cleanMessage } = message as ChatUI.TextMessage & {
|
||||
showRating?: boolean;
|
||||
ratingStyle?: string;
|
||||
};
|
||||
return cleanMessage;
|
||||
}
|
||||
return message;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single message and add it to the messages array
|
||||
*/
|
||||
function processSingleMessage(
|
||||
messages: ChatUI.AssistantMessage[],
|
||||
msg: ChatRequest.MessageResponse,
|
||||
messageId: string,
|
||||
): boolean {
|
||||
let shouldClearThinking = false;
|
||||
|
||||
if (isTextMessage(msg)) {
|
||||
messages.push({
|
||||
id: messageId,
|
||||
role: 'assistant',
|
||||
type: 'text',
|
||||
content: msg.text,
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage);
|
||||
shouldClearThinking = true;
|
||||
} else if (isWorkflowUpdatedMessage(msg)) {
|
||||
messages.push({
|
||||
...msg,
|
||||
id: messageId,
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage);
|
||||
// Don't clear thinking for workflow updates - they're just state changes
|
||||
} else if (isToolMessage(msg)) {
|
||||
processToolMessage(messages, msg, messageId);
|
||||
} else if ('type' in msg && msg.type === 'error' && 'content' in msg) {
|
||||
// Handle error messages from the API
|
||||
// API sends error messages with type: 'error' and content field
|
||||
messages.push({
|
||||
id: messageId,
|
||||
role: 'assistant',
|
||||
type: 'error',
|
||||
content: msg.content,
|
||||
read: false,
|
||||
});
|
||||
shouldClearThinking = true;
|
||||
}
|
||||
|
||||
return shouldClearThinking;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a tool message - either update existing or add new
|
||||
*/
|
||||
function processToolMessage(
|
||||
messages: ChatUI.AssistantMessage[],
|
||||
msg: ChatRequest.ToolMessage,
|
||||
messageId: string,
|
||||
): void {
|
||||
// Use toolCallId as the message ID for consistency across updates
|
||||
const toolMessageId = msg.toolCallId || messageId;
|
||||
|
||||
// Check if we already have this tool message
|
||||
const existingIndex = msg.toolCallId
|
||||
? messages.findIndex(
|
||||
(m) => m.type === 'tool' && (m as ChatUI.ToolMessage).toolCallId === msg.toolCallId,
|
||||
)
|
||||
: -1;
|
||||
|
||||
if (existingIndex !== -1) {
|
||||
// Update existing tool message - merge updates array
|
||||
const existing = messages[existingIndex] as ChatUI.ToolMessage;
|
||||
const toolMessage: ChatUI.ToolMessage = {
|
||||
...existing,
|
||||
status: msg.status,
|
||||
updates: [...(existing.updates || []), ...(msg.updates || [])],
|
||||
};
|
||||
messages[existingIndex] = toolMessage as ChatUI.AssistantMessage;
|
||||
} else {
|
||||
// Add new tool message
|
||||
const toolMessage: ChatUI.AssistantMessage = {
|
||||
id: toolMessageId,
|
||||
role: 'assistant',
|
||||
type: 'tool',
|
||||
toolName: msg.toolName,
|
||||
toolCallId: msg.toolCallId,
|
||||
status: msg.status,
|
||||
updates: msg.updates || [],
|
||||
read: false,
|
||||
};
|
||||
messages.push(toolMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the thinking message based on tool states
|
||||
*/
|
||||
function determineThinkingMessage(messages: ChatUI.AssistantMessage[]): string | undefined {
|
||||
// Check ALL messages to determine state
|
||||
const allToolMessages = messages.filter(
|
||||
(msg): msg is ChatUI.ToolMessage => msg.type === 'tool',
|
||||
);
|
||||
const hasAnyRunningTools = allToolMessages.some((msg) => msg.status === 'running');
|
||||
const hasCompletedTools = allToolMessages.some((msg) => msg.status === 'completed');
|
||||
|
||||
// Find the last completed tool message
|
||||
let lastCompletedToolIndex = -1;
|
||||
for (let i = messages.length - 1; i >= 0; i--) {
|
||||
const msg = messages[i];
|
||||
if (msg.type === 'tool' && (msg as ChatUI.ToolMessage).status === 'completed') {
|
||||
lastCompletedToolIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there's any text message after the last completed tool
|
||||
// Note: workflow-updated messages shouldn't count as they're just canvas state updates
|
||||
let hasTextAfterTools = false;
|
||||
if (lastCompletedToolIndex !== -1) {
|
||||
for (let i = lastCompletedToolIndex + 1; i < messages.length; i++) {
|
||||
const msg = messages[i];
|
||||
if (msg.type === 'text') {
|
||||
hasTextAfterTools = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// - If any tools are running, show "Running tools..."
|
||||
// - If all tools are done and no text response yet, show "Processing results..."
|
||||
// - Otherwise, clear the thinking message
|
||||
if (hasAnyRunningTools) {
|
||||
return locale.baseText('aiAssistant.thinkingSteps.runningTools');
|
||||
} else if (hasCompletedTools && !hasTextAfterTools) {
|
||||
return locale.baseText('aiAssistant.thinkingSteps.processingResults');
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function processAssistantMessages(
|
||||
currentMessages: ChatUI.AssistantMessage[],
|
||||
newMessages: ChatRequest.MessageResponse[],
|
||||
baseId: string,
|
||||
): MessageProcessingResult {
|
||||
const mutableMessages = [...currentMessages];
|
||||
let shouldClearThinking = false;
|
||||
|
||||
newMessages.forEach((msg, index) => {
|
||||
// Generate unique ID for each message in the batch
|
||||
const messageId = `${baseId}-${index}`;
|
||||
const clearThinking = processSingleMessage(mutableMessages, msg, messageId);
|
||||
shouldClearThinking = shouldClearThinking || clearThinking;
|
||||
});
|
||||
|
||||
const thinkingMessage = determineThinkingMessage(mutableMessages);
|
||||
|
||||
// Apply rating logic only to messages after workflow-updated
|
||||
const finalMessages = applyRatingLogic(mutableMessages);
|
||||
|
||||
return {
|
||||
messages: finalMessages,
|
||||
thinkingMessage,
|
||||
shouldClearThinking: shouldClearThinking && mutableMessages.length > currentMessages.length,
|
||||
};
|
||||
}
|
||||
|
||||
function createUserMessage(content: string, id: string): ChatUI.AssistantMessage {
|
||||
return {
|
||||
id,
|
||||
role: 'user',
|
||||
type: 'text',
|
||||
content,
|
||||
read: true,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
function createErrorMessage(
|
||||
content: string,
|
||||
id: string,
|
||||
retry?: () => Promise<void>,
|
||||
): ChatUI.AssistantMessage {
|
||||
return {
|
||||
id,
|
||||
role: 'assistant',
|
||||
type: 'error',
|
||||
content,
|
||||
retry,
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
function clearMessages(): ChatUI.AssistantMessage[] {
|
||||
return [];
|
||||
}
|
||||
|
||||
function addMessages(
|
||||
currentMessages: ChatUI.AssistantMessage[],
|
||||
newMessages: ChatUI.AssistantMessage[],
|
||||
): ChatUI.AssistantMessage[] {
|
||||
return [...currentMessages, ...newMessages];
|
||||
}
|
||||
|
||||
function mapAssistantMessageToUI(
|
||||
message: ChatRequest.MessageResponse,
|
||||
id: string,
|
||||
): ChatUI.AssistantMessage {
|
||||
// Handle specific message types using type guards
|
||||
if (isTextMessage(message)) {
|
||||
return {
|
||||
id,
|
||||
role: message.role ?? 'assistant',
|
||||
type: 'text',
|
||||
content: message.text,
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
if (isWorkflowUpdatedMessage(message)) {
|
||||
return {
|
||||
...message,
|
||||
id,
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
if (isToolMessage(message)) {
|
||||
return {
|
||||
id,
|
||||
role: 'assistant',
|
||||
type: 'tool',
|
||||
toolName: message.toolName,
|
||||
toolCallId: message.toolCallId,
|
||||
status: message.status,
|
||||
updates: message.updates || [],
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
// Handle event messages
|
||||
if ('type' in message && message.type === 'event') {
|
||||
return {
|
||||
...message,
|
||||
id,
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
// Default fallback
|
||||
return {
|
||||
id,
|
||||
role: 'assistant',
|
||||
type: 'text',
|
||||
content: locale.baseText('aiAssistant.thinkingSteps.thinking'),
|
||||
read: false,
|
||||
} as ChatUI.AssistantMessage;
|
||||
}
|
||||
|
||||
return {
|
||||
processAssistantMessages,
|
||||
createUserMessage,
|
||||
createErrorMessage,
|
||||
clearMessages,
|
||||
addMessages,
|
||||
mapAssistantMessageToUI,
|
||||
};
|
||||
}
|
||||
@@ -1835,10 +1835,12 @@ export function useCanvasOperations() {
|
||||
trackBulk = true,
|
||||
trackHistory = true,
|
||||
viewport,
|
||||
regenerateIds = true,
|
||||
}: {
|
||||
importTags?: boolean;
|
||||
trackBulk?: boolean;
|
||||
trackHistory?: boolean;
|
||||
regenerateIds?: boolean;
|
||||
viewport?: ViewportBoundaries;
|
||||
} = {},
|
||||
): Promise<WorkflowDataUpdate> {
|
||||
@@ -1884,8 +1886,10 @@ export function useCanvasOperations() {
|
||||
// Set all new ids when pasting/importing workflows
|
||||
if (node.id) {
|
||||
const previousId = node.id;
|
||||
const newId = nodeHelpers.assignNodeId(node);
|
||||
nodeIdMap[newId] = previousId;
|
||||
if (regenerateIds) {
|
||||
const newId = nodeHelpers.assignNodeId(node);
|
||||
nodeIdMap[newId] = previousId;
|
||||
}
|
||||
} else {
|
||||
nodeHelpers.assignNodeId(node);
|
||||
}
|
||||
|
||||
@@ -738,7 +738,7 @@ export const NDV_UI_OVERHAUL_EXPERIMENT = {
|
||||
};
|
||||
|
||||
export const WORKFLOW_BUILDER_EXPERIMENT = {
|
||||
name: '30_workflow_builder',
|
||||
name: '036_workflow_builder_agent',
|
||||
control: 'control',
|
||||
variant: 'variant',
|
||||
};
|
||||
|
||||
56
packages/frontend/editor-ui/src/helpers/builderHelpers.ts
Normal file
56
packages/frontend/editor-ui/src/helpers/builderHelpers.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import type { ChatRequest } from '@/types/assistant.types';
|
||||
import { useAIAssistantHelpers } from '@/composables/useAIAssistantHelpers';
|
||||
import type { IRunExecutionData, NodeExecutionSchema } from 'n8n-workflow';
|
||||
import type { IWorkflowDb } from '@/Interface';
|
||||
|
||||
export function generateMessageId(): string {
|
||||
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
|
||||
export function createBuilderPayload(
|
||||
text: string,
|
||||
options: {
|
||||
quickReplyType?: string;
|
||||
executionData?: IRunExecutionData['resultData'];
|
||||
workflow?: IWorkflowDb;
|
||||
nodesForSchema?: string[];
|
||||
} = {},
|
||||
): ChatRequest.UserChatMessage {
|
||||
const assistantHelpers = useAIAssistantHelpers();
|
||||
const workflowContext: {
|
||||
currentWorkflow?: Partial<IWorkflowDb>;
|
||||
executionData?: IRunExecutionData['resultData'];
|
||||
executionSchema?: NodeExecutionSchema[];
|
||||
} = {};
|
||||
|
||||
if (options.workflow) {
|
||||
workflowContext.currentWorkflow = {
|
||||
...assistantHelpers.simplifyWorkflowForAssistant(options.workflow),
|
||||
id: options.workflow.id,
|
||||
};
|
||||
}
|
||||
|
||||
if (options.executionData) {
|
||||
workflowContext.executionData = assistantHelpers.simplifyResultData(options.executionData);
|
||||
}
|
||||
|
||||
if (options.nodesForSchema?.length) {
|
||||
workflowContext.executionSchema = assistantHelpers.getNodesSchemas(
|
||||
options.nodesForSchema,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
role: 'user',
|
||||
type: 'message',
|
||||
text,
|
||||
quickReplyType: options.quickReplyType,
|
||||
workflowContext,
|
||||
};
|
||||
}
|
||||
|
||||
export function shouldShowChat(routeName: string): boolean {
|
||||
const ENABLED_VIEWS = ['workflow', 'workflowExecution'];
|
||||
return ENABLED_VIEWS.includes(routeName);
|
||||
}
|
||||
@@ -30,6 +30,7 @@ import { useUIStore } from './ui.store';
|
||||
import AiUpdatedCodeMessage from '@/components/AiUpdatedCodeMessage.vue';
|
||||
import { useCredentialsStore } from './credentials.store';
|
||||
import { useAIAssistantHelpers } from '@/composables/useAIAssistantHelpers';
|
||||
import { useBuilderStore } from './builder.store';
|
||||
|
||||
export const MAX_CHAT_WIDTH = 425;
|
||||
export const MIN_CHAT_WIDTH = 300;
|
||||
@@ -62,6 +63,7 @@ export const useAssistantStore = defineStore(STORES.ASSISTANT, () => {
|
||||
const locale = useI18n();
|
||||
const telemetry = useTelemetry();
|
||||
const assistantHelpers = useAIAssistantHelpers();
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
const suggestions = ref<{
|
||||
[suggestionId: string]: {
|
||||
@@ -170,6 +172,11 @@ export const useAssistantStore = defineStore(STORES.ASSISTANT, () => {
|
||||
if (chatWindowOpen.value) {
|
||||
closeChat();
|
||||
} else {
|
||||
if (builderStore.isAIBuilderEnabled) {
|
||||
// If builder is enabled, open it instead of assistant
|
||||
void builderStore.openChat();
|
||||
return;
|
||||
}
|
||||
openChat();
|
||||
}
|
||||
}
|
||||
@@ -180,7 +187,7 @@ export const useAssistantStore = defineStore(STORES.ASSISTANT, () => {
|
||||
(msg) => !(msg.id === id && msg.role === 'assistant'),
|
||||
);
|
||||
assistantThinkingMessage.value = undefined;
|
||||
newMessages.forEach((msg) => {
|
||||
(newMessages ?? []).forEach((msg) => {
|
||||
if (msg.type === 'message') {
|
||||
messages.push({
|
||||
id,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import { setActivePinia, createPinia } from 'pinia';
|
||||
import { ENABLED_VIEWS, useBuilderStore } from '@/stores/builder.store';
|
||||
import type { ChatRequest } from '@/types/assistant.types';
|
||||
import { usePostHog } from './posthog.store';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
import { defaultSettings } from '../__tests__/defaults';
|
||||
@@ -69,6 +68,12 @@ describe('AI Builder store', () => {
|
||||
track.mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.clearAllTimers();
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('initializes with default values', () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
@@ -99,10 +104,10 @@ describe('AI Builder store', () => {
|
||||
expect(builderStore.chatWidth).toBe(MAX_CHAT_WIDTH);
|
||||
});
|
||||
|
||||
it('should open chat window', () => {
|
||||
it('should open chat window', async () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
builderStore.openChat();
|
||||
await builderStore.openChat();
|
||||
expect(builderStore.chatWindowOpen).toBe(true);
|
||||
});
|
||||
|
||||
@@ -113,101 +118,236 @@ describe('AI Builder store', () => {
|
||||
expect(builderStore.chatWindowOpen).toBe(false);
|
||||
});
|
||||
|
||||
it('can add a simple assistant message', () => {
|
||||
it('can process a simple assistant message through API', async () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
const message: ChatRequest.MessageResponse = {
|
||||
type: 'message',
|
||||
role: 'assistant',
|
||||
text: 'Hello!',
|
||||
};
|
||||
builderStore.addAssistantMessages([message], '1');
|
||||
expect(builderStore.chatMessages.length).toBe(1);
|
||||
expect(builderStore.chatMessages[0]).toEqual({
|
||||
id: '1',
|
||||
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, onDone) => {
|
||||
onMessage({
|
||||
messages: [
|
||||
{
|
||||
type: 'message',
|
||||
role: 'assistant',
|
||||
text: 'Hello!',
|
||||
},
|
||||
],
|
||||
sessionId: 'test-session',
|
||||
});
|
||||
onDone();
|
||||
});
|
||||
|
||||
builderStore.sendChatMessage({ text: 'Hi' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
expect(builderStore.chatMessages[0].role).toBe('user');
|
||||
expect(builderStore.chatMessages[1]).toMatchObject({
|
||||
type: 'text',
|
||||
role: 'assistant',
|
||||
content: 'Hello!',
|
||||
quickReplies: undefined,
|
||||
read: true, // Builder messages are always read
|
||||
read: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('can add a workflow step message', () => {
|
||||
it('can process a workflow-updated message through API', async () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
const message: ChatRequest.MessageResponse = {
|
||||
type: 'workflow-step',
|
||||
role: 'assistant',
|
||||
steps: ['Step 1', 'Step 2'],
|
||||
};
|
||||
builderStore.addAssistantMessages([message], '1');
|
||||
expect(builderStore.chatMessages.length).toBe(1);
|
||||
expect(builderStore.chatMessages[0]).toEqual({
|
||||
id: '1',
|
||||
type: 'workflow-step',
|
||||
role: 'assistant',
|
||||
steps: ['Step 1', 'Step 2'],
|
||||
read: true,
|
||||
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, onDone) => {
|
||||
onMessage({
|
||||
messages: [
|
||||
{
|
||||
type: 'workflow-updated',
|
||||
role: 'assistant',
|
||||
codeSnippet: '{"nodes":[],"connections":[]}',
|
||||
},
|
||||
],
|
||||
sessionId: 'test-session',
|
||||
});
|
||||
onDone();
|
||||
});
|
||||
});
|
||||
|
||||
it('can add a workflow-generated message', () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
const message: ChatRequest.MessageResponse = {
|
||||
type: 'workflow-generated',
|
||||
builderStore.sendChatMessage({ text: 'Create workflow' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
expect(builderStore.chatMessages[1]).toMatchObject({
|
||||
type: 'workflow-updated',
|
||||
role: 'assistant',
|
||||
codeSnippet: '{"nodes":[],"connections":[]}',
|
||||
};
|
||||
builderStore.addAssistantMessages([message], '1');
|
||||
expect(builderStore.chatMessages.length).toBe(1);
|
||||
expect(builderStore.chatMessages[0]).toEqual({
|
||||
id: '1',
|
||||
type: 'workflow-generated',
|
||||
role: 'assistant',
|
||||
codeSnippet: '{"nodes":[],"connections":[]}',
|
||||
read: true,
|
||||
read: false,
|
||||
});
|
||||
|
||||
// Verify workflow messages are accessible via computed property
|
||||
expect(builderStore.workflowMessages.length).toBe(1);
|
||||
});
|
||||
|
||||
it('can add a rate-workflow message', () => {
|
||||
it('should show processing results message when tools complete', async () => {
|
||||
vi.useFakeTimers();
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
const message: ChatRequest.MessageResponse = {
|
||||
type: 'rate-workflow',
|
||||
role: 'assistant',
|
||||
content: 'How was the workflow?',
|
||||
};
|
||||
builderStore.addAssistantMessages([message], '1');
|
||||
expect(builderStore.chatMessages.length).toBe(1);
|
||||
expect(builderStore.chatMessages[0]).toEqual({
|
||||
id: '1',
|
||||
type: 'rate-workflow',
|
||||
role: 'assistant',
|
||||
content: 'How was the workflow?',
|
||||
read: true,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let onMessageCallback: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let onDoneCallback: any;
|
||||
|
||||
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, onDone) => {
|
||||
onMessageCallback = onMessage;
|
||||
onDoneCallback = onDone;
|
||||
});
|
||||
});
|
||||
|
||||
it('should reset builder chat session', () => {
|
||||
const builderStore = useBuilderStore();
|
||||
builderStore.sendChatMessage({ text: 'Add nodes and connect them' });
|
||||
|
||||
const message: ChatRequest.MessageResponse = {
|
||||
type: 'message',
|
||||
role: 'assistant',
|
||||
text: 'Hello!',
|
||||
quickReplies: [
|
||||
{ text: 'Yes', type: 'text' },
|
||||
{ text: 'No', type: 'text' },
|
||||
// Initially shows "Thinking..." from prepareForStreaming
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Thinking...');
|
||||
|
||||
// First tool starts
|
||||
onMessageCallback({
|
||||
messages: [
|
||||
{
|
||||
type: 'tool',
|
||||
role: 'assistant',
|
||||
toolName: 'add_nodes',
|
||||
toolCallId: 'call-1',
|
||||
status: 'running',
|
||||
updates: [{ type: 'input', data: {} }],
|
||||
},
|
||||
],
|
||||
};
|
||||
builderStore.addAssistantMessages([message], '1');
|
||||
expect(builderStore.chatMessages.length).toBe(1);
|
||||
});
|
||||
|
||||
// Should show "Running tools..."
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Running tools...');
|
||||
|
||||
// Second tool starts (different toolCallId)
|
||||
onMessageCallback({
|
||||
messages: [
|
||||
{
|
||||
type: 'tool',
|
||||
role: 'assistant',
|
||||
toolName: 'connect_nodes',
|
||||
toolCallId: 'call-2',
|
||||
status: 'running',
|
||||
updates: [{ type: 'input', data: {} }],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Still showing "Running tools..." with multiple tools
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Running tools...');
|
||||
|
||||
// First tool completes
|
||||
onMessageCallback({
|
||||
messages: [
|
||||
{
|
||||
type: 'tool',
|
||||
role: 'assistant',
|
||||
toolName: 'add_nodes',
|
||||
toolCallId: 'call-1',
|
||||
status: 'completed',
|
||||
updates: [{ type: 'output', data: { success: true } }],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Still "Running tools..." because second tool is still running
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Running tools...');
|
||||
|
||||
// Second tool completes
|
||||
onMessageCallback({
|
||||
messages: [
|
||||
{
|
||||
type: 'tool',
|
||||
role: 'assistant',
|
||||
toolName: 'connect_nodes',
|
||||
toolCallId: 'call-2',
|
||||
status: 'completed',
|
||||
updates: [{ type: 'output', data: { success: true } }],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Now should show "Processing results..." because all tools completed
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Processing results...');
|
||||
|
||||
// Call onDone to stop streaming
|
||||
onDoneCallback();
|
||||
|
||||
// Message should persist after streaming ends
|
||||
expect(builderStore.streaming).toBe(false);
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Processing results...');
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should keep processing message when workflow-updated arrives', async () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, onDone) => {
|
||||
// Tool completes
|
||||
onMessage({
|
||||
messages: [
|
||||
{
|
||||
type: 'tool',
|
||||
role: 'assistant',
|
||||
toolName: 'add_nodes',
|
||||
toolCallId: 'call-1',
|
||||
status: 'completed',
|
||||
updates: [{ type: 'output', data: { success: true } }],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Workflow update arrives
|
||||
onMessage({
|
||||
messages: [
|
||||
{
|
||||
type: 'workflow-updated',
|
||||
role: 'assistant',
|
||||
codeSnippet: '{"nodes": [], "connections": {}}',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Call onDone to stop streaming
|
||||
onDone();
|
||||
});
|
||||
|
||||
builderStore.sendChatMessage({ text: 'Add a node' });
|
||||
|
||||
// Should show "Processing results..." when tool completes
|
||||
await vi.waitFor(() =>
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Processing results...'),
|
||||
);
|
||||
|
||||
// Should still show "Processing results..." after workflow-updated
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages).toHaveLength(3)); // user + tool + workflow
|
||||
expect(builderStore.assistantThinkingMessage).toBe('Processing results...');
|
||||
|
||||
// Verify streaming has ended
|
||||
expect(builderStore.streaming).toBe(false);
|
||||
});
|
||||
|
||||
it('should reset builder chat session', async () => {
|
||||
const builderStore = useBuilderStore();
|
||||
|
||||
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, onDone) => {
|
||||
onMessage({
|
||||
messages: [
|
||||
{
|
||||
type: 'message',
|
||||
role: 'assistant',
|
||||
text: 'Hello!',
|
||||
quickReplies: [
|
||||
{ text: 'Yes', type: 'text' },
|
||||
{ text: 'No', type: 'text' },
|
||||
],
|
||||
},
|
||||
],
|
||||
sessionId: 'test-session',
|
||||
});
|
||||
onDone();
|
||||
});
|
||||
|
||||
builderStore.sendChatMessage({ text: 'Hi' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
|
||||
builderStore.resetBuilderChat();
|
||||
expect(builderStore.chatMessages).toEqual([]);
|
||||
expect(builderStore.currentSessionId).toBeUndefined();
|
||||
expect(builderStore.assistantThinkingMessage).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not show builder if disabled in settings', () => {
|
||||
@@ -259,13 +399,13 @@ describe('AI Builder store', () => {
|
||||
onDone();
|
||||
});
|
||||
|
||||
await builderStore.initBuilderChat('I want to build a workflow', 'chat');
|
||||
builderStore.sendChatMessage({ text: 'I want to build a workflow' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
|
||||
expect(apiSpy).toHaveBeenCalled();
|
||||
expect(builderStore.currentSessionId).toEqual(mockSessionId);
|
||||
expect(builderStore.chatMessages.length).toBe(2); // user message + assistant response
|
||||
expect(builderStore.chatMessages[0].role).toBe('user');
|
||||
expect(builderStore.chatMessages[1].role).toBe('assistant');
|
||||
expect(builderStore.streaming).toBe(false);
|
||||
});
|
||||
|
||||
it('should send a follow-up message in an existing session', async () => {
|
||||
@@ -302,18 +442,15 @@ describe('AI Builder store', () => {
|
||||
onDone();
|
||||
});
|
||||
|
||||
await builderStore.initBuilderChat('I want to build a workflow', 'chat');
|
||||
|
||||
// Should be 2 messages now (user question + assistant response)
|
||||
expect(builderStore.chatMessages.length).toBe(2);
|
||||
builderStore.sendChatMessage({ text: 'I want to build a workflow' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
|
||||
// Send a follow-up message
|
||||
await builderStore.sendMessage({ text: 'Generate a workflow for me' });
|
||||
builderStore.sendChatMessage({ text: 'Generate a workflow for me' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(4));
|
||||
|
||||
const thirdMessage = builderStore.chatMessages[2] as ChatUI.TextMessage;
|
||||
const fourthMessage = builderStore.chatMessages[3] as ChatUI.TextMessage;
|
||||
// Should be 4 messages now (2 initial + user follow-up + assistant response)
|
||||
expect(builderStore.chatMessages.length).toBe(4);
|
||||
expect(thirdMessage.role).toBe('user');
|
||||
expect(thirdMessage.type).toBe('text');
|
||||
expect(thirdMessage.content).toBe('Generate a workflow for me');
|
||||
@@ -330,10 +467,8 @@ describe('AI Builder store', () => {
|
||||
onError(new Error('An API error occurred'));
|
||||
});
|
||||
|
||||
await builderStore.initBuilderChat('I want to build a workflow', 'chat');
|
||||
|
||||
// Should have user message + error message
|
||||
expect(builderStore.chatMessages.length).toBe(2);
|
||||
builderStore.sendChatMessage({ text: 'I want to build a workflow' });
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
expect(builderStore.chatMessages[0].role).toBe('user');
|
||||
expect(builderStore.chatMessages[1].type).toBe('error');
|
||||
|
||||
@@ -341,6 +476,9 @@ describe('AI Builder store', () => {
|
||||
const errorMessage = builderStore.chatMessages[1] as ChatUI.ErrorMessage;
|
||||
expect(errorMessage.retry).toBeDefined();
|
||||
|
||||
// Verify streaming state was reset
|
||||
expect(builderStore.streaming).toBe(false);
|
||||
|
||||
// Set up a successful response for the retry
|
||||
apiSpy.mockImplementationOnce((_ctx, _payload, onMessage, onDone) => {
|
||||
onMessage({
|
||||
@@ -357,10 +495,10 @@ describe('AI Builder store', () => {
|
||||
});
|
||||
|
||||
// Retry the failed request
|
||||
await errorMessage.retry?.();
|
||||
|
||||
// Should now have just the user message and success message
|
||||
expect(builderStore.chatMessages.length).toBe(2);
|
||||
if (errorMessage.retry) {
|
||||
void errorMessage.retry();
|
||||
await vi.waitFor(() => expect(builderStore.chatMessages.length).toBe(2));
|
||||
}
|
||||
expect(builderStore.chatMessages[0].role).toBe('user');
|
||||
expect(builderStore.chatMessages[1].type).toBe('text');
|
||||
expect((builderStore.chatMessages[1] as ChatUI.TextMessage).content).toBe(
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { chatWithBuilder } from '@/api/ai';
|
||||
import type { VIEWS } from '@/constants';
|
||||
import {
|
||||
ASK_AI_SLIDE_OUT_DURATION_MS,
|
||||
@@ -6,12 +5,10 @@ import {
|
||||
WORKFLOW_BUILDER_EXPERIMENT,
|
||||
} from '@/constants';
|
||||
import { STORES } from '@n8n/stores';
|
||||
import type { ChatRequest } from '@/types/assistant.types';
|
||||
import type { ChatUI } from '@n8n/design-system/types/assistant';
|
||||
import { isToolMessage, isWorkflowUpdatedMessage } from '@n8n/design-system/types/assistant';
|
||||
import { defineStore } from 'pinia';
|
||||
import { computed, ref, watch } from 'vue';
|
||||
import { useRootStore } from '@n8n/stores/useRootStore';
|
||||
import { useUsersStore } from './users.store';
|
||||
import { computed, ref } from 'vue';
|
||||
import { useRoute } from 'vue-router';
|
||||
import { useSettingsStore } from './settings.store';
|
||||
import { assert } from '@n8n/utils/assert';
|
||||
@@ -19,8 +16,16 @@ import { useI18n } from '@n8n/i18n';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
import { useUIStore } from './ui.store';
|
||||
import { usePostHog } from './posthog.store';
|
||||
import { useNodeTypesStore } from './nodeTypes.store';
|
||||
import { DEFAULT_CHAT_WIDTH, MAX_CHAT_WIDTH, MIN_CHAT_WIDTH } from './assistant.store';
|
||||
import { useWorkflowsStore } from './workflows.store';
|
||||
import { useBuilderMessages } from '@/composables/useBuilderMessages';
|
||||
import { chatWithBuilder, getAiSessions } from '@/api/ai';
|
||||
import { generateMessageId, createBuilderPayload } from '@/helpers/builderHelpers';
|
||||
import { useRootStore } from '@n8n/stores/useRootStore';
|
||||
import type { WorkflowDataUpdate } from '@n8n/rest-api-client/api/workflows';
|
||||
import pick from 'lodash/pick';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import { useToast } from '@/composables/useToast';
|
||||
|
||||
export const ENABLED_VIEWS = [...EDITABLE_CANVAS_VIEWS];
|
||||
|
||||
@@ -30,19 +35,26 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
|
||||
const chatMessages = ref<ChatUI.AssistantMessage[]>([]);
|
||||
const chatWindowOpen = ref<boolean>(false);
|
||||
const streaming = ref<boolean>(false);
|
||||
const currentSessionId = ref<string | undefined>();
|
||||
const assistantThinkingMessage = ref<string | undefined>();
|
||||
|
||||
// Store dependencies
|
||||
const settings = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const usersStore = useUsersStore();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const uiStore = useUIStore();
|
||||
const route = useRoute();
|
||||
const locale = useI18n();
|
||||
const telemetry = useTelemetry();
|
||||
const posthogStore = usePostHog();
|
||||
const nodeTypesStore = useNodeTypesStore();
|
||||
|
||||
// Composables
|
||||
const {
|
||||
processAssistantMessages,
|
||||
createUserMessage,
|
||||
createErrorMessage,
|
||||
clearMessages,
|
||||
mapAssistantMessageToUI,
|
||||
} = useBuilderMessages();
|
||||
|
||||
// Computed properties
|
||||
const isAssistantEnabled = computed(() => settings.isAiAssistantEnabled);
|
||||
@@ -71,25 +83,40 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
|
||||
);
|
||||
});
|
||||
|
||||
// No need to track unread messages in the AI Builder
|
||||
const unreadCount = computed(() => 0);
|
||||
const toolMessages = computed(() => chatMessages.value.filter(isToolMessage));
|
||||
|
||||
const workflowMessages = computed(() => chatMessages.value.filter(isWorkflowUpdatedMessage));
|
||||
|
||||
// Chat management functions
|
||||
/**
|
||||
* Resets the entire chat session to initial state.
|
||||
* Called when user navigates away from workflow or explicitly requests a new workflow.
|
||||
* Note: Does not persist the cleared state - sessions can still be reloaded via loadSessions().
|
||||
*/
|
||||
function resetBuilderChat() {
|
||||
clearMessages();
|
||||
currentSessionId.value = undefined;
|
||||
chatMessages.value = clearMessages();
|
||||
assistantThinkingMessage.value = undefined;
|
||||
}
|
||||
|
||||
function openChat() {
|
||||
/**
|
||||
* Opens the chat panel and adjusts the canvas viewport to make room.
|
||||
*/
|
||||
async function openChat() {
|
||||
chatWindowOpen.value = true;
|
||||
chatMessages.value = chatMessages.value.map((msg) => ({ ...msg, read: true }));
|
||||
chatMessages.value = [];
|
||||
uiStore.appGridDimensions = {
|
||||
...uiStore.appGridDimensions,
|
||||
width: window.innerWidth - chatWidth.value,
|
||||
};
|
||||
await loadSessions();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the chat panel with a delayed viewport restoration.
|
||||
* The delay (ASK_AI_SLIDE_OUT_DURATION_MS + 50ms) ensures the slide-out animation
|
||||
* completes before expanding the canvas, preventing visual jarring.
|
||||
* Messages remain in memory.
|
||||
*/
|
||||
function closeChat() {
|
||||
chatWindowOpen.value = false;
|
||||
// Looks smoother if we wait for slide animation to finish before updating the grid width
|
||||
@@ -106,236 +133,244 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
|
||||
}, ASK_AI_SLIDE_OUT_DURATION_MS + 50);
|
||||
}
|
||||
|
||||
function clearMessages() {
|
||||
chatMessages.value = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates chat panel width with enforced boundaries.
|
||||
* Width is clamped between MIN_CHAT_WIDTH (330px) and MAX_CHAT_WIDTH (650px)
|
||||
* to ensure usability on various screen sizes.
|
||||
*/
|
||||
function updateWindowWidth(width: number) {
|
||||
chatWidth.value = Math.min(Math.max(width, MIN_CHAT_WIDTH), MAX_CHAT_WIDTH);
|
||||
}
|
||||
|
||||
// Message handling functions
|
||||
function addAssistantMessages(newMessages: ChatRequest.MessageResponse[], id: string) {
|
||||
const read = true; // Always mark as read in builder
|
||||
const messages = [...chatMessages.value].filter(
|
||||
(msg) => !(msg.id === id && msg.role === 'assistant'),
|
||||
);
|
||||
assistantThinkingMessage.value = undefined;
|
||||
|
||||
newMessages.forEach((msg) => {
|
||||
if (msg.type === 'message') {
|
||||
messages.push({
|
||||
id,
|
||||
type: 'text',
|
||||
role: 'assistant',
|
||||
content: msg.text,
|
||||
quickReplies: msg.quickReplies,
|
||||
codeSnippet: msg.codeSnippet,
|
||||
read,
|
||||
});
|
||||
} else if (msg.type === 'workflow-step' && 'steps' in msg) {
|
||||
messages.push({
|
||||
id,
|
||||
type: 'workflow-step',
|
||||
role: 'assistant',
|
||||
steps: msg.steps,
|
||||
read,
|
||||
});
|
||||
} else if (msg.type === 'prompt-validation' && !msg.isWorkflowPrompt) {
|
||||
messages.push({
|
||||
id,
|
||||
role: 'assistant',
|
||||
type: 'error',
|
||||
content: locale.baseText('aiAssistant.builder.invalidPrompt'),
|
||||
read: true,
|
||||
});
|
||||
} else if (msg.type === 'workflow-node' && 'nodes' in msg) {
|
||||
const mappedNodes = msg.nodes.map(
|
||||
(node) => nodeTypesStore.getNodeType(node)?.displayName ?? node,
|
||||
);
|
||||
messages.push({
|
||||
id,
|
||||
type: 'workflow-node',
|
||||
role: 'assistant',
|
||||
nodes: mappedNodes,
|
||||
read,
|
||||
});
|
||||
} else if (msg.type === 'workflow-composed' && 'nodes' in msg) {
|
||||
messages.push({
|
||||
id,
|
||||
type: 'workflow-composed',
|
||||
role: 'assistant',
|
||||
nodes: msg.nodes,
|
||||
read,
|
||||
});
|
||||
} else if (msg.type === 'workflow-generated' && 'codeSnippet' in msg) {
|
||||
messages.push({
|
||||
id,
|
||||
type: 'workflow-generated',
|
||||
role: 'assistant',
|
||||
codeSnippet: msg.codeSnippet,
|
||||
read,
|
||||
});
|
||||
} else if (msg.type === 'rate-workflow') {
|
||||
messages.push({
|
||||
id,
|
||||
type: 'rate-workflow',
|
||||
role: 'assistant',
|
||||
content: msg.content,
|
||||
read,
|
||||
});
|
||||
}
|
||||
});
|
||||
chatMessages.value = messages;
|
||||
}
|
||||
|
||||
function addAssistantError(content: string, id: string, retry?: () => Promise<void>) {
|
||||
chatMessages.value.push({
|
||||
id,
|
||||
role: 'assistant',
|
||||
type: 'error',
|
||||
content,
|
||||
read: true,
|
||||
retry,
|
||||
});
|
||||
}
|
||||
|
||||
function addLoadingAssistantMessage(message: string) {
|
||||
assistantThinkingMessage.value = message;
|
||||
}
|
||||
|
||||
function addUserMessage(content: string, id: string) {
|
||||
chatMessages.value.push({
|
||||
id,
|
||||
role: 'user',
|
||||
type: 'text',
|
||||
content,
|
||||
read: true,
|
||||
});
|
||||
}
|
||||
|
||||
function stopStreaming() {
|
||||
streaming.value = false;
|
||||
}
|
||||
|
||||
// Error handling
|
||||
/**
|
||||
* Handles streaming errors by creating an error message with optional retry capability.
|
||||
* Cleans up streaming state and removes the thinking indicator.
|
||||
* The retry function, if provided, will remove the error message before retrying.
|
||||
* Tracks error telemetry
|
||||
*/
|
||||
function handleServiceError(e: unknown, id: string, retry?: () => Promise<void>) {
|
||||
assert(e instanceof Error);
|
||||
|
||||
stopStreaming();
|
||||
assistantThinkingMessage.value = undefined;
|
||||
addAssistantError(
|
||||
|
||||
const errorMessage = createErrorMessage(
|
||||
locale.baseText('aiAssistant.serviceError.message', { interpolate: { message: e.message } }),
|
||||
id,
|
||||
retry,
|
||||
);
|
||||
chatMessages.value = [...chatMessages.value, errorMessage];
|
||||
|
||||
telemetry.track('Workflow generation errored', {
|
||||
error: e.message,
|
||||
prompt: workflowPrompt.value,
|
||||
workflow_id: workflowsStore.workflowId,
|
||||
});
|
||||
}
|
||||
|
||||
// API interaction
|
||||
function getRandomId() {
|
||||
return `${Math.floor(Math.random() * 100000000)}`;
|
||||
// Helper functions
|
||||
/**
|
||||
* Prepares UI for incoming streaming response.
|
||||
* Adds user message immediately for visual feedback, shows thinking indicator,
|
||||
* and ensures chat is open. Called before initiating API request to minimize
|
||||
* perceived latency.
|
||||
*/
|
||||
function prepareForStreaming(userMessage: string, messageId: string) {
|
||||
const userMsg = createUserMessage(userMessage, messageId);
|
||||
chatMessages.value = [...chatMessages.value, userMsg];
|
||||
addLoadingAssistantMessage(locale.baseText('aiAssistant.thinkingSteps.thinking'));
|
||||
streaming.value = true;
|
||||
}
|
||||
|
||||
function onEachStreamingMessage(response: ChatRequest.ResponsePayload, id: string) {
|
||||
if (response.sessionId && !currentSessionId.value) {
|
||||
currentSessionId.value = response.sessionId;
|
||||
telemetry.track('Assistant session started', {
|
||||
chat_session_id: currentSessionId.value,
|
||||
task: 'workflow-generation',
|
||||
});
|
||||
} else if (currentSessionId.value !== response.sessionId) {
|
||||
// Ignore messages from other sessions
|
||||
return;
|
||||
}
|
||||
addAssistantMessages(response.messages, id);
|
||||
}
|
||||
|
||||
function onDoneStreaming() {
|
||||
stopStreaming();
|
||||
/**
|
||||
* Creates a retry function that removes the associated error message before retrying.
|
||||
* This ensures the chat doesn't accumulate multiple error messages for the same failure.
|
||||
* The messageId parameter refers to the error message to remove, not the original user message.
|
||||
*/
|
||||
function createRetryHandler(messageId: string, retryFn: () => Promise<void>) {
|
||||
return async () => {
|
||||
// Remove the error message before retrying
|
||||
chatMessages.value = chatMessages.value.filter((msg) => msg.id !== messageId);
|
||||
await retryFn();
|
||||
};
|
||||
}
|
||||
|
||||
// Core API functions
|
||||
async function initBuilderChat(userMessage: string, source: 'chat' | 'canvas') {
|
||||
telemetry.track('User submitted workflow prompt', {
|
||||
source,
|
||||
prompt: userMessage,
|
||||
});
|
||||
resetBuilderChat();
|
||||
const id = getRandomId();
|
||||
|
||||
addUserMessage(userMessage, id);
|
||||
addLoadingAssistantMessage(locale.baseText('aiAssistant.thinkingSteps.thinking'));
|
||||
openChat();
|
||||
streaming.value = true;
|
||||
|
||||
const payload: ChatRequest.InitBuilderChat = {
|
||||
role: 'user',
|
||||
type: 'init-builder-chat',
|
||||
user: {
|
||||
firstName: usersStore.currentUser?.firstName ?? '',
|
||||
},
|
||||
question: userMessage,
|
||||
};
|
||||
|
||||
chatWithBuilder(
|
||||
rootStore.restApiContext,
|
||||
{
|
||||
payload,
|
||||
},
|
||||
(msg) => onEachStreamingMessage(msg, id),
|
||||
() => onDoneStreaming(),
|
||||
(e) => handleServiceError(e, id, async () => await initBuilderChat(userMessage, 'chat')),
|
||||
);
|
||||
}
|
||||
|
||||
async function sendMessage(
|
||||
chatMessage: Pick<ChatRequest.UserChatMessage, 'text' | 'quickReplyType'>,
|
||||
) {
|
||||
/**
|
||||
* Sends a message to the AI builder service and handles the streaming response.
|
||||
* Prevents concurrent requests by checking streaming state.
|
||||
* Captures workflow state before sending for comparison in telemetry.
|
||||
* Creates a retry handler that preserves the original message context.
|
||||
* Note: This function is NOT async - streaming happens via callbacks.
|
||||
*/
|
||||
function sendChatMessage(options: {
|
||||
text: string;
|
||||
source?: 'chat' | 'canvas';
|
||||
quickReplyType?: string;
|
||||
}) {
|
||||
if (streaming.value) {
|
||||
return;
|
||||
}
|
||||
|
||||
const id = getRandomId();
|
||||
const { text, source = 'chat', quickReplyType } = options;
|
||||
const messageId = generateMessageId();
|
||||
|
||||
const retry = async () => {
|
||||
chatMessages.value = chatMessages.value.filter((msg) => msg.id !== id);
|
||||
await sendMessage(chatMessage);
|
||||
};
|
||||
const currentWorkflowJson = getWorkflowSnapshot();
|
||||
telemetry.track('User submitted builder message', {
|
||||
source,
|
||||
message: text,
|
||||
start_workflow_json: currentWorkflowJson,
|
||||
workflow_id: workflowsStore.workflowId,
|
||||
});
|
||||
|
||||
prepareForStreaming(text, messageId);
|
||||
|
||||
const executionResult = workflowsStore.workflowExecutionData?.data?.resultData;
|
||||
const payload = createBuilderPayload(text, {
|
||||
quickReplyType,
|
||||
workflow: workflowsStore.workflow,
|
||||
executionData: executionResult,
|
||||
nodesForSchema: Object.keys(workflowsStore.nodesByName),
|
||||
});
|
||||
const retry = createRetryHandler(messageId, async () => sendChatMessage(options));
|
||||
|
||||
try {
|
||||
addUserMessage(chatMessage.text, id);
|
||||
addLoadingAssistantMessage(locale.baseText('aiAssistant.thinkingSteps.thinking'));
|
||||
|
||||
streaming.value = true;
|
||||
assert(currentSessionId.value);
|
||||
|
||||
chatWithBuilder(
|
||||
rootStore.restApiContext,
|
||||
{
|
||||
payload: {
|
||||
role: 'user',
|
||||
type: 'message',
|
||||
text: chatMessage.text,
|
||||
quickReplyType: chatMessage.quickReplyType,
|
||||
},
|
||||
sessionId: currentSessionId.value,
|
||||
{ payload },
|
||||
(response) => {
|
||||
const result = processAssistantMessages(
|
||||
chatMessages.value,
|
||||
response.messages,
|
||||
generateMessageId(),
|
||||
);
|
||||
chatMessages.value = result.messages;
|
||||
|
||||
if (result.shouldClearThinking) {
|
||||
assistantThinkingMessage.value = undefined;
|
||||
}
|
||||
|
||||
if (result.thinkingMessage) {
|
||||
assistantThinkingMessage.value = result.thinkingMessage;
|
||||
}
|
||||
},
|
||||
(msg) => onEachStreamingMessage(msg, id),
|
||||
() => onDoneStreaming(),
|
||||
(e) => handleServiceError(e, id, retry),
|
||||
() => stopStreaming(),
|
||||
(e) => handleServiceError(e, messageId, retry),
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
// in case of assert
|
||||
handleServiceError(e, id, retry);
|
||||
handleServiceError(e, messageId, retry);
|
||||
}
|
||||
}
|
||||
// Reset on route change
|
||||
watch(route, () => {
|
||||
resetBuilderChat();
|
||||
});
|
||||
|
||||
/**
|
||||
* Loads the most recent chat session for the current workflow.
|
||||
* Only loads if a workflow ID exists (not for new unsaved workflows).
|
||||
* Replaces current chat messages entirely - does NOT merge with existing messages.
|
||||
* Sessions are ordered by recency, so sessions[0] is always the latest.
|
||||
* Silently fails and returns empty array on error to prevent UI disruption.
|
||||
*/
|
||||
async function loadSessions() {
|
||||
const workflowId = workflowsStore.workflowId;
|
||||
if (!workflowId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await getAiSessions(rootStore.restApiContext, workflowId);
|
||||
const sessions = response.sessions || [];
|
||||
|
||||
// Load the most recent session if available
|
||||
if (sessions.length > 0) {
|
||||
const latestSession = sessions[0];
|
||||
|
||||
// Clear existing messages
|
||||
chatMessages.value = clearMessages();
|
||||
|
||||
// Convert and add messages from the session
|
||||
const convertedMessages = latestSession.messages
|
||||
.map((msg) => {
|
||||
const id = generateMessageId();
|
||||
return mapAssistantMessageToUI(msg, id);
|
||||
})
|
||||
// Do not include wf updated messages from session
|
||||
.filter((msg) => msg.type !== 'workflow-updated');
|
||||
|
||||
chatMessages.value = convertedMessages;
|
||||
}
|
||||
|
||||
return sessions;
|
||||
} catch (error) {
|
||||
console.error('Failed to load AI sessions:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function captureCurrentWorkflowState() {
|
||||
const nodePositions = new Map<string, [number, number]>();
|
||||
const existingNodeIds = new Set<string>();
|
||||
|
||||
workflowsStore.allNodes.forEach((node) => {
|
||||
nodePositions.set(node.id, [...node.position]);
|
||||
existingNodeIds.add(node.id);
|
||||
});
|
||||
|
||||
return {
|
||||
nodePositions,
|
||||
existingNodeIds,
|
||||
currentWorkflowJson: JSON.stringify(pick(workflowsStore.workflow, ['nodes', 'connections'])),
|
||||
};
|
||||
}
|
||||
|
||||
function applyWorkflowUpdate(workflowJson: string) {
|
||||
let workflowData: WorkflowDataUpdate;
|
||||
try {
|
||||
workflowData = jsonParse<WorkflowDataUpdate>(workflowJson);
|
||||
} catch (error) {
|
||||
useToast().showMessage({
|
||||
type: 'error',
|
||||
title: locale.baseText('aiAssistant.builder.workflowParsingError.title'),
|
||||
message: locale.baseText('aiAssistant.builder.workflowParsingError.content'),
|
||||
});
|
||||
return { success: false, error };
|
||||
}
|
||||
|
||||
// Capture current state before clearing
|
||||
const { nodePositions } = captureCurrentWorkflowState();
|
||||
|
||||
// Clear existing workflow
|
||||
workflowsStore.removeAllConnections({ setStateDirty: false });
|
||||
workflowsStore.removeAllNodes({ setStateDirty: false, removePinData: true });
|
||||
|
||||
// Restore positions for nodes that still exist and identify new nodes
|
||||
const nodesIdsToTidyUp: string[] = [];
|
||||
if (workflowData.nodes) {
|
||||
workflowData.nodes = workflowData.nodes.map((node) => {
|
||||
const savedPosition = nodePositions.get(node.id);
|
||||
if (savedPosition) {
|
||||
return { ...node, position: savedPosition };
|
||||
} else {
|
||||
// This is a new node, add it to the tidy up list
|
||||
nodesIdsToTidyUp.push(node.id);
|
||||
}
|
||||
return node;
|
||||
});
|
||||
}
|
||||
|
||||
return { success: true, workflowData, newNodeIds: nodesIdsToTidyUp };
|
||||
}
|
||||
|
||||
function getWorkflowSnapshot() {
|
||||
return JSON.stringify(pick(workflowsStore.workflow, ['nodes', 'connections']));
|
||||
}
|
||||
|
||||
// Public API
|
||||
return {
|
||||
@@ -344,24 +379,24 @@ export const useBuilderStore = defineStore(STORES.BUILDER, () => {
|
||||
canShowAssistantButtonsOnCanvas,
|
||||
chatWidth,
|
||||
chatMessages,
|
||||
unreadCount,
|
||||
streaming,
|
||||
isAssistantOpen,
|
||||
canShowAssistant,
|
||||
currentSessionId,
|
||||
assistantThinkingMessage,
|
||||
chatWindowOpen,
|
||||
isAIBuilderEnabled,
|
||||
workflowPrompt,
|
||||
toolMessages,
|
||||
workflowMessages,
|
||||
|
||||
// Methods
|
||||
updateWindowWidth,
|
||||
closeChat,
|
||||
openChat,
|
||||
resetBuilderChat,
|
||||
initBuilderChat,
|
||||
sendMessage,
|
||||
addAssistantMessages,
|
||||
handleServiceError,
|
||||
sendChatMessage,
|
||||
loadSessions,
|
||||
applyWorkflowUpdate,
|
||||
getWorkflowSnapshot,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
IRunExecutionData,
|
||||
ITaskData,
|
||||
} from 'n8n-workflow';
|
||||
import type { ChatUI } from '@n8n/design-system/types/assistant';
|
||||
|
||||
export namespace ChatRequest {
|
||||
export interface NodeExecutionSchema {
|
||||
@@ -63,17 +64,6 @@ export namespace ChatRequest {
|
||||
question: string;
|
||||
}
|
||||
|
||||
export interface InitBuilderChat {
|
||||
role: 'user';
|
||||
type: 'init-builder-chat';
|
||||
user: {
|
||||
firstName: string;
|
||||
};
|
||||
context?: UserContext & WorkflowContext;
|
||||
workflowContext?: WorkflowContext;
|
||||
question: string;
|
||||
}
|
||||
|
||||
export interface InitCredHelp {
|
||||
role: 'user';
|
||||
type: 'init-cred-help';
|
||||
@@ -127,118 +117,70 @@ export namespace ChatRequest {
|
||||
|
||||
export type RequestPayload =
|
||||
| {
|
||||
payload: InitErrorHelper | InitSupportChat | InitCredHelp | InitBuilderChat;
|
||||
payload: InitErrorHelper | InitSupportChat | InitCredHelp;
|
||||
}
|
||||
| {
|
||||
payload: EventRequestPayload | UserChatMessage;
|
||||
sessionId: string;
|
||||
sessionId?: string;
|
||||
};
|
||||
|
||||
interface CodeDiffMessage {
|
||||
role: 'assistant';
|
||||
type: 'code-diff';
|
||||
description?: string;
|
||||
codeDiff?: string;
|
||||
suggestionId: string;
|
||||
solution_count: number;
|
||||
// Re-export types from design-system for backward compatibility
|
||||
export type ToolMessage = ChatUI.ToolMessage;
|
||||
|
||||
// API-specific types that extend UI types
|
||||
export interface CodeDiffMessage extends ChatUI.CodeDiffMessage {
|
||||
solution_count?: number;
|
||||
quickReplies?: ChatUI.QuickReply[];
|
||||
}
|
||||
|
||||
interface QuickReplyOption {
|
||||
text: string;
|
||||
type: string;
|
||||
isFeedback?: boolean;
|
||||
}
|
||||
|
||||
interface AssistantChatMessage {
|
||||
role: 'assistant';
|
||||
type: 'message';
|
||||
text: string;
|
||||
step?: 'n8n_documentation' | 'n8n_forum';
|
||||
codeSnippet?: string;
|
||||
}
|
||||
|
||||
interface AssistantSummaryMessage {
|
||||
role: 'assistant';
|
||||
type: 'summary';
|
||||
title: string;
|
||||
content: string;
|
||||
}
|
||||
|
||||
interface EndSessionMessage {
|
||||
role: 'assistant';
|
||||
type: 'event';
|
||||
eventName: 'end-session';
|
||||
}
|
||||
|
||||
interface AgentChatMessage {
|
||||
role: 'assistant';
|
||||
type: 'agent-suggestion';
|
||||
title: string;
|
||||
text: string;
|
||||
}
|
||||
|
||||
interface AgentThinkingStep {
|
||||
export interface AgentThinkingStep {
|
||||
role: 'assistant';
|
||||
type: 'intermediate-step';
|
||||
text: string;
|
||||
step: string;
|
||||
}
|
||||
|
||||
interface WorkflowStepMessage {
|
||||
role: 'assistant';
|
||||
type: 'workflow-step';
|
||||
steps: string[];
|
||||
// API-specific types that extend UI types
|
||||
export interface TextMessage {
|
||||
role: 'assistant' | 'user';
|
||||
type: 'message'; // API uses 'message' instead of 'text'
|
||||
text: string;
|
||||
step?: 'n8n_documentation' | 'n8n_forum';
|
||||
codeSnippet?: string;
|
||||
quickReplies?: ChatUI.QuickReply[];
|
||||
}
|
||||
|
||||
interface WorkflowNodeMessage {
|
||||
export interface SummaryMessage {
|
||||
role: 'assistant';
|
||||
type: 'workflow-node';
|
||||
nodes: string[];
|
||||
}
|
||||
|
||||
interface WorkflowPromptValidationMessage {
|
||||
role: 'assistant';
|
||||
type: 'prompt-validation';
|
||||
isWorkflowPrompt: boolean;
|
||||
}
|
||||
interface WorkflowComposedMessage {
|
||||
role: 'assistant';
|
||||
type: 'workflow-composed';
|
||||
nodes: Array<{
|
||||
parameters: Record<string, unknown>;
|
||||
type: string;
|
||||
name: string;
|
||||
position: [number, number];
|
||||
}>;
|
||||
}
|
||||
interface WorkflowGeneratedMessage {
|
||||
role: 'assistant';
|
||||
type: 'workflow-generated';
|
||||
codeSnippet: string;
|
||||
}
|
||||
interface RateWorkflowMessage {
|
||||
role: 'assistant';
|
||||
type: 'rate-workflow';
|
||||
type: 'summary'; // API uses 'summary' instead of 'block'
|
||||
title: string;
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface AgentSuggestionMessage {
|
||||
role: 'assistant';
|
||||
type: 'agent-suggestion';
|
||||
title: string;
|
||||
text: string; // API uses text instead of content
|
||||
suggestionId?: string;
|
||||
}
|
||||
|
||||
// API-only types
|
||||
|
||||
export type MessageResponse =
|
||||
| ((
|
||||
| AssistantChatMessage
|
||||
| TextMessage
|
||||
| CodeDiffMessage
|
||||
| AssistantSummaryMessage
|
||||
| AgentChatMessage
|
||||
| SummaryMessage
|
||||
| AgentSuggestionMessage
|
||||
| AgentThinkingStep
|
||||
| WorkflowStepMessage
|
||||
| WorkflowNodeMessage
|
||||
| WorkflowComposedMessage
|
||||
| WorkflowPromptValidationMessage
|
||||
| WorkflowGeneratedMessage
|
||||
| RateWorkflowMessage
|
||||
| ChatUI.WorkflowUpdatedMessage
|
||||
| ToolMessage
|
||||
| ChatUI.ErrorMessage
|
||||
) & {
|
||||
quickReplies?: QuickReplyOption[];
|
||||
quickReplies?: ChatUI.QuickReply[];
|
||||
})
|
||||
| EndSessionMessage;
|
||||
| ChatUI.EndSessionMessage;
|
||||
|
||||
export interface ResponsePayload {
|
||||
sessionId?: string;
|
||||
@@ -279,3 +221,48 @@ export namespace AskAiRequest {
|
||||
forNode: 'code' | 'transform';
|
||||
}
|
||||
}
|
||||
|
||||
// Type guards for ChatRequest messages
|
||||
export function isTextMessage(msg: ChatRequest.MessageResponse): msg is ChatRequest.TextMessage {
|
||||
return 'type' in msg && msg.type === 'message' && 'text' in msg;
|
||||
}
|
||||
|
||||
export function isSummaryMessage(
|
||||
msg: ChatRequest.MessageResponse,
|
||||
): msg is ChatRequest.SummaryMessage {
|
||||
return 'type' in msg && msg.type === 'summary' && 'title' in msg && 'content' in msg;
|
||||
}
|
||||
|
||||
export function isAgentSuggestionMessage(
|
||||
msg: ChatRequest.MessageResponse,
|
||||
): msg is ChatRequest.AgentSuggestionMessage {
|
||||
return 'type' in msg && msg.type === 'agent-suggestion' && 'title' in msg && 'text' in msg;
|
||||
}
|
||||
|
||||
export function isAgentThinkingMessage(
|
||||
msg: ChatRequest.MessageResponse,
|
||||
): msg is ChatRequest.AgentThinkingStep {
|
||||
return 'type' in msg && msg.type === 'intermediate-step' && 'step' in msg;
|
||||
}
|
||||
|
||||
export function isCodeDiffMessage(
|
||||
msg: ChatRequest.MessageResponse,
|
||||
): msg is ChatRequest.CodeDiffMessage {
|
||||
return 'type' in msg && msg.type === 'code-diff' && 'codeDiff' in msg;
|
||||
}
|
||||
|
||||
export function isWorkflowUpdatedMessage(
|
||||
msg: ChatRequest.MessageResponse,
|
||||
): msg is ChatUI.WorkflowUpdatedMessage {
|
||||
return 'type' in msg && msg.type === 'workflow-updated' && 'codeSnippet' in msg;
|
||||
}
|
||||
|
||||
export function isToolMessage(msg: ChatRequest.MessageResponse): msg is ChatRequest.ToolMessage {
|
||||
return 'type' in msg && msg.type === 'tool' && 'toolName' in msg && 'status' in msg;
|
||||
}
|
||||
|
||||
export function isEndSessionMessage(
|
||||
msg: ChatRequest.MessageResponse,
|
||||
): msg is ChatUI.EndSessionMessage {
|
||||
return 'type' in msg && msg.type === 'event' && msg.eventName === 'end-session';
|
||||
}
|
||||
|
||||
@@ -181,7 +181,7 @@ export type CanvasEventBusEvents = {
|
||||
action: keyof CanvasNodeEventBusEvents;
|
||||
payload?: CanvasNodeEventBusEvents[keyof CanvasNodeEventBusEvents];
|
||||
};
|
||||
tidyUp: { source: CanvasLayoutSource };
|
||||
tidyUp: { source: CanvasLayoutSource; nodeIdsFilter?: string[] };
|
||||
};
|
||||
|
||||
export interface CanvasNodeInjectionData {
|
||||
|
||||
@@ -1083,13 +1083,18 @@ async function onImportWorkflowDataEvent(data: IDataObject) {
|
||||
const workflowData = data.data as WorkflowDataUpdate;
|
||||
await importWorkflowData(workflowData, 'file', {
|
||||
viewport: viewportBoundaries.value,
|
||||
regenerateIds: data.regenerateIds === true || data.regenerateIds === undefined,
|
||||
});
|
||||
|
||||
fitView();
|
||||
selectNodes(workflowData.nodes?.map((node) => node.id) ?? []);
|
||||
if (data.tidyUp) {
|
||||
const nodesIdsToTidyUp = data.nodesIdsToTidyUp as string[];
|
||||
setTimeout(() => {
|
||||
canvasEventBus.emit('tidyUp', { source: 'import-workflow-data' });
|
||||
canvasEventBus.emit('tidyUp', {
|
||||
source: 'import-workflow-data',
|
||||
nodeIdsFilter: nodesIdsToTidyUp,
|
||||
});
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user