mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-18 02:21:13 +00:00
refactor(core): Shovel around more of AI code (no-changelog) (#12218)
This commit is contained in:
committed by
GitHub
parent
a8e7a05856
commit
2ce1644d01
@@ -12,9 +12,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
const modelField: INodeProperties = {
|
||||
displayName: 'Model',
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
|
||||
import type { ChatOllamaInput } from '@langchain/ollama';
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,12 +10,11 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { ChatOllamaInput } from '@langchain/ollama';
|
||||
import { ChatOllama } from '@langchain/ollama';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatOllama implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -9,7 +9,8 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { Cohere } from '@langchain/cohere';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,10 +8,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { Cohere } from '@langchain/cohere';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmCohere implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
|
||||
import { Ollama } from '@langchain/community/llms/ollama';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,11 +9,11 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { Ollama } from '@langchain/community/llms/ollama';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { ollamaDescription, ollamaModel, ollamaOptions } from './description';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmOllama implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { OpenAI, type ClientOptions } from '@langchain/openai';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
INodeType,
|
||||
@@ -8,9 +9,8 @@ import type {
|
||||
ILoadOptionsFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { OpenAI, type ClientOptions } from '@langchain/openai';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
type LmOpenAiOptions = {
|
||||
baseURL?: string;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { HuggingFaceInference } from '@langchain/community/llms/hf';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,10 +8,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { HuggingFaceInference } from '@langchain/community/llms/hf';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmOpenHuggingFaceInference implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -8,9 +8,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatAwsBedrock implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,10 +8,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatAzureOpenAi implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type { SafetySetting } from '@google/generative-ai';
|
||||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -6,12 +8,12 @@ import {
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
||||
import type { SafetySetting } from '@google/generative-ai';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { additionalOptions } from '../gemini-common/additional-options';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatGoogleGemini implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type { SafetySetting } from '@google/generative-ai';
|
||||
import { ProjectsClient } from '@google-cloud/resource-manager';
|
||||
import { ChatVertexAI } from '@langchain/google-vertexai';
|
||||
import { formatPrivateKey } from 'n8n-nodes-base/dist/utils/utilities';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -9,15 +13,13 @@ import {
|
||||
type JsonObject,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
import { ChatVertexAI } from '@langchain/google-vertexai';
|
||||
import type { SafetySetting } from '@google/generative-ai';
|
||||
import { ProjectsClient } from '@google-cloud/resource-manager';
|
||||
import { formatPrivateKey } from 'n8n-nodes-base/dist/utils/utilities';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { additionalOptions } from '../gemini-common/additional-options';
|
||||
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeErrorFromStatus } from './error-handling';
|
||||
import { additionalOptions } from '../gemini-common/additional-options';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatGoogleVertex implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { ChatGroq } from '@langchain/groq';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,10 +8,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { ChatGroq } from '@langchain/groq';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatGroq implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
|
||||
import type { ChatMistralAIInput } from '@langchain/mistralai';
|
||||
import { ChatMistralAI } from '@langchain/mistralai';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
@@ -7,11 +10,10 @@ import {
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { ChatMistralAIInput } from '@langchain/mistralai';
|
||||
import { ChatMistralAI } from '@langchain/mistralai';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
|
||||
export class LmChatMistralCloud implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
||||
@@ -9,11 +9,11 @@ import type {
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
import type { LLMResult } from '@langchain/core/outputs';
|
||||
import { encodingForModel } from '@langchain/core/utils/tiktoken';
|
||||
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
|
||||
import { pick } from 'lodash';
|
||||
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import { logAiEvent } from '../../utils/helpers';
|
||||
import { logAiEvent } from '@utils/helpers';
|
||||
|
||||
type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
|
||||
completionTokens: number;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { HarmBlockThreshold, HarmCategory } from '@google/generative-ai';
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import { harmCategories, harmThresholds } from './safety-options';
|
||||
|
||||
export const additionalOptions: INodeProperties = {
|
||||
|
||||
Reference in New Issue
Block a user