refactor: Migrate NodeConnectionType to const object type (no-changelog) (#14078)

Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
This commit is contained in:
Alex Grozav
2025-03-21 14:01:26 +02:00
committed by GitHub
parent 7e8179b848
commit 8215e0b59f
703 changed files with 3104 additions and 3018 deletions

View File

@@ -3,7 +3,7 @@
import { ChatAnthropic } from '@langchain/anthropic';
import type { LLMResult } from '@langchain/core/outputs';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodePropertyOptions,
type INodeProperties,
type ISupplyDataFunctions,
@@ -109,7 +109,7 @@ export class LmChatAnthropic implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -118,7 +118,7 @@ export class LmChatAnthropic implements INodeType {
},
],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiChain]),
{
...modelField,
displayOptions: {

View File

@@ -3,7 +3,7 @@
import type { ChatOllamaInput } from '@langchain/ollama';
import { ChatOllama } from '@langchain/ollama';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -45,11 +45,11 @@ export class LmChatOllama implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
...ollamaDescription,
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
ollamaModel,
ollamaOptions,
],

View File

@@ -2,7 +2,7 @@
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -51,7 +51,7 @@ export class LmChatOpenAi implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -65,7 +65,7 @@ export class LmChatOpenAi implements INodeType {
'={{ $parameter.options?.baseURL?.split("/").slice(0,-1).join("/") || $credentials?.url?.split("/").slice(0,-1).join("/") || "https://api.openai.com" }}',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName:
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { Cohere } from '@langchain/cohere';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -42,7 +42,7 @@ export class LmCohere implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -51,7 +51,7 @@ export class LmCohere implements INodeType {
},
],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName: 'Options',
name: 'options',

View File

@@ -2,7 +2,7 @@
import { Ollama } from '@langchain/community/llms/ollama';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -44,11 +44,11 @@ export class LmOllama implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
...ollamaDescription,
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
ollamaModel,
ollamaOptions,
],

View File

@@ -1,6 +1,6 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { OpenAI, type ClientOptions } from '@langchain/openai';
import { NodeConnectionType } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import type {
INodeType,
INodeTypeDescription,
@@ -53,7 +53,7 @@ export class LmOpenAi implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { HuggingFaceInference } from '@langchain/community/llms/hf';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -42,7 +42,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -51,7 +51,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
},
],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName: 'Model',
name: 'model',

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { ChatBedrockConverse } from '@langchain/aws';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -42,7 +42,7 @@ export class LmChatAwsBedrock implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -56,7 +56,7 @@ export class LmChatAwsBedrock implements INodeType {
baseURL: '=https://bedrock.{{$credentials?.region ?? "eu-central-1"}}.amazonaws.com',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiChain]),
{
displayName: 'Model',
name: 'model',

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { AzureChatOpenAI } from '@langchain/openai';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -42,7 +42,7 @@ export class LmChatAzureOpenAi implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -51,7 +51,7 @@ export class LmChatAzureOpenAi implements INodeType {
},
],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName:
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',

View File

@@ -2,7 +2,7 @@
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -44,7 +44,7 @@ export class LmChatDeepSeek implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -57,7 +57,7 @@ export class LmChatDeepSeek implements INodeType {
baseURL: '={{ $credentials?.url }}',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName:
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { SafetySetting } from '@google/generative-ai';
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
import { NodeConnectionType } from 'n8n-workflow';
import { NodeConnectionTypes } from 'n8n-workflow';
import type {
NodeError,
INodeType,
@@ -52,7 +52,7 @@ export class LmChatGoogleGemini implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -65,7 +65,7 @@ export class LmChatGoogleGemini implements INodeType {
baseURL: '={{ $credentials.host }}',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName: 'Model',
name: 'modelName',

View File

@@ -4,7 +4,7 @@ import { ProjectsClient } from '@google-cloud/resource-manager';
import { ChatVertexAI } from '@langchain/google-vertexai';
import { formatPrivateKey } from 'n8n-nodes-base/dist/utils/utilities';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -50,7 +50,7 @@ export class LmChatGoogleVertex implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -59,7 +59,7 @@ export class LmChatGoogleVertex implements INodeType {
},
],
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName: 'Project ID',
name: 'projectId',

View File

@@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { ChatGroq } from '@langchain/groq';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -42,7 +42,7 @@ export class LmChatGroq implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -54,7 +54,7 @@ export class LmChatGroq implements INodeType {
baseURL: 'https://api.groq.com/openai/v1',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiChain]),
{
displayName: 'Model',
name: 'model',

View File

@@ -3,7 +3,7 @@
import type { ChatMistralAIInput } from '@langchain/mistralai';
import { ChatMistralAI } from '@langchain/mistralai';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -44,7 +44,7 @@ export class LmChatMistralCloud implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -57,7 +57,7 @@ export class LmChatMistralCloud implements INodeType {
baseURL: 'https://api.mistral.ai/v1',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName: 'Model',
name: 'model',

View File

@@ -2,7 +2,7 @@
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import {
NodeConnectionType,
NodeConnectionTypes,
type INodeType,
type INodeTypeDescription,
type ISupplyDataFunctions,
@@ -43,7 +43,7 @@ export class LmChatOpenRouter implements INodeType {
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [],
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs: [NodeConnectionType.AiLanguageModel],
outputs: [NodeConnectionTypes.AiLanguageModel],
outputNames: ['Model'],
credentials: [
{
@@ -56,7 +56,7 @@ export class LmChatOpenRouter implements INodeType {
baseURL: '={{ $credentials?.url }}',
},
properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]),
{
displayName:
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',

View File

@@ -11,7 +11,7 @@ import type { LLMResult } from '@langchain/core/outputs';
import { encodingForModel } from '@langchain/core/utils/tiktoken';
import { pick } from 'lodash';
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow';
import { NodeConnectionTypes, NodeError, NodeOperationError } from 'n8n-workflow';
import { logAiEvent } from '@utils/helpers';
@@ -35,7 +35,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
// This is crucial for the handleLLMError handler to work correctly (it should be called before the error is propagated to the root node)
awaitHandlers = true;
connectionType = NodeConnectionType.AiLanguageModel;
connectionType = NodeConnectionTypes.AiLanguageModel;
promptTokensEstimate = 0;