mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-16 09:36:44 +00:00
refactor(core): Include AI events in log streaming relay (#10768)
This commit is contained in:
@@ -1,10 +1,5 @@
|
||||
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
|
||||
import type {
|
||||
EventNamesAiNodesType,
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
IWebhookFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow';
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import type { BaseOutputParser } from '@langchain/core/output_parsers';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
@@ -155,7 +150,7 @@ export function getSessionId(
|
||||
|
||||
export async function logAiEvent(
|
||||
executeFunctions: IExecuteFunctions,
|
||||
event: EventNamesAiNodesType,
|
||||
event: AiEvent,
|
||||
data?: IDataObject,
|
||||
) {
|
||||
try {
|
||||
|
||||
@@ -196,7 +196,7 @@ export function logWrapper(
|
||||
const payload = { action: 'getMessages', response };
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.memory.get.messages', { response });
|
||||
void logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
|
||||
return response;
|
||||
};
|
||||
} else if (prop === 'addMessage' && 'addMessage' in target) {
|
||||
@@ -213,7 +213,7 @@ export function logWrapper(
|
||||
arguments: [message],
|
||||
});
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.memory.added.message', { message });
|
||||
void logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
|
||||
};
|
||||
}
|
||||
@@ -238,13 +238,13 @@ export function logWrapper(
|
||||
arguments: [stringifiedText],
|
||||
})) as object;
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response });
|
||||
void logAiEvent(executeFunctions, 'ai-output-parsed', { text, response });
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { action: 'parse', response } }],
|
||||
]);
|
||||
return response;
|
||||
} catch (error) {
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', {
|
||||
void logAiEvent(executeFunctions, 'ai-output-parsed', {
|
||||
text,
|
||||
response: error.message ?? error,
|
||||
});
|
||||
@@ -277,7 +277,7 @@ export function logWrapper(
|
||||
arguments: [query, config],
|
||||
})) as Array<Document<Record<string, any>>>;
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.retriever.get.relevant.documents', { query });
|
||||
void logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -302,7 +302,7 @@ export function logWrapper(
|
||||
arguments: [documents],
|
||||
})) as number[][];
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.document');
|
||||
void logAiEvent(executeFunctions, 'ai-document-embedded');
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -322,7 +322,7 @@ export function logWrapper(
|
||||
method: target[prop],
|
||||
arguments: [query],
|
||||
})) as number[];
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.query');
|
||||
void logAiEvent(executeFunctions, 'ai-query-embedded');
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -367,7 +367,7 @@ export function logWrapper(
|
||||
arguments: [item, itemIndex],
|
||||
})) as number[];
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.document.processed');
|
||||
void logAiEvent(executeFunctions, 'ai-document-processed');
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { response }, pairedItem: { item: itemIndex } }],
|
||||
]);
|
||||
@@ -393,7 +393,7 @@ export function logWrapper(
|
||||
arguments: [text],
|
||||
})) as string[];
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.text.splitter.split');
|
||||
void logAiEvent(executeFunctions, 'ai-text-split');
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -417,7 +417,7 @@ export function logWrapper(
|
||||
arguments: [query],
|
||||
})) as string;
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.tool.called', { query, response });
|
||||
void logAiEvent(executeFunctions, 'ai-tool-called', { query, response });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
return response;
|
||||
};
|
||||
@@ -447,7 +447,7 @@ export function logWrapper(
|
||||
arguments: [query, k, filter, _callbacks],
|
||||
})) as Array<Document<Record<string, any>>>;
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.vector.store.searched', { query });
|
||||
void logAiEvent(executeFunctions, 'ai-vector-store-searched', { query });
|
||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||
|
||||
return response;
|
||||
|
||||
Reference in New Issue
Block a user