From cf0008500cafd94582720a1445d0468898e71184 Mon Sep 17 00:00:00 2001 From: oleg Date: Wed, 30 Apr 2025 08:42:07 +0200 Subject: [PATCH] feat(Azure OpenAI Chat Model Node): Implement Azure Entra ID OAuth2 Authentication (#15003) --- .../@n8n/client-oauth2/src/ClientOAuth2.ts | 1 + packages/@n8n/client-oauth2/src/types.ts | 1 + ...aCognitiveServicesOAuth2Api.credentials.ts | 131 +++++++++++ .../LmChatAzureOpenAi.node.ts | 222 ++++++------------ .../N8nOAuth2TokenCredential.test.ts | 99 ++++++++ .../__tests__/api-key.handler.test.ts | 81 +++++++ .../__tests__/oauth2.handler.test.ts | 98 ++++++++ .../credentials/N8nOAuth2TokenCredential.ts | 44 ++++ .../LmChatAzureOpenAi/credentials/api-key.ts | 45 ++++ .../LmChatAzureOpenAi/credentials/oauth2.ts | 46 ++++ .../llms/LmChatAzureOpenAi/properties.ts | 137 +++++++++++ .../nodes/llms/LmChatAzureOpenAi/types.ts | 94 ++++++++ packages/@n8n/nodes-langchain/package.json | 3 + .../oauth/oauth2-credential.controller.ts | 18 +- pnpm-lock.yaml | 6 + 15 files changed, 879 insertions(+), 147 deletions(-) create mode 100644 packages/@n8n/nodes-langchain/credentials/AzureEntraCognitiveServicesOAuth2Api.credentials.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/N8nOAuth2TokenCredential.test.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/api-key.handler.test.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/oauth2.handler.test.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/N8nOAuth2TokenCredential.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/api-key.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/oauth2.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/properties.ts create mode 100644 packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/types.ts diff --git a/packages/@n8n/client-oauth2/src/ClientOAuth2.ts b/packages/@n8n/client-oauth2/src/ClientOAuth2.ts index 62e0241e6f..993de48e99 100644 --- a/packages/@n8n/client-oauth2/src/ClientOAuth2.ts +++ b/packages/@n8n/client-oauth2/src/ClientOAuth2.ts @@ -31,6 +31,7 @@ export interface ClientOAuth2Options { scopesSeparator?: ',' | ' '; authorizationGrants?: string[]; state?: string; + additionalBodyProperties?: Record; body?: Record; query?: qs.ParsedUrlQuery; ignoreSSLIssues?: boolean; diff --git a/packages/@n8n/client-oauth2/src/types.ts b/packages/@n8n/client-oauth2/src/types.ts index 26a90bd441..6fed2b2884 100644 --- a/packages/@n8n/client-oauth2/src/types.ts +++ b/packages/@n8n/client-oauth2/src/types.ts @@ -10,6 +10,7 @@ export interface OAuth2CredentialData { authUrl?: string; scope?: string; authQueryParameters?: string; + additionalBodyProperties?: string; grantType: OAuth2GrantType; ignoreSSLIssues?: boolean; oauthTokenData?: { diff --git a/packages/@n8n/nodes-langchain/credentials/AzureEntraCognitiveServicesOAuth2Api.credentials.ts b/packages/@n8n/nodes-langchain/credentials/AzureEntraCognitiveServicesOAuth2Api.credentials.ts new file mode 100644 index 0000000000..7f65175be1 --- /dev/null +++ b/packages/@n8n/nodes-langchain/credentials/AzureEntraCognitiveServicesOAuth2Api.credentials.ts @@ -0,0 +1,131 @@ +import type { ICredentialType, INodeProperties } from 'n8n-workflow'; + +const defaultScopes = ['openid', 'offline_access']; + +export class AzureEntraCognitiveServicesOAuth2Api implements ICredentialType { + name = 'azureEntraCognitiveServicesOAuth2Api'; + + // eslint-disable-next-line n8n-nodes-base/cred-class-field-display-name-missing-oauth2 + displayName = 'Azure Entra ID (Azure Active Directory) API'; + + extends = ['oAuth2Api']; + + documentationUrl = 'azureEntraCognitiveServicesOAuth2Api'; + + properties: INodeProperties[] = [ + { + displayName: 'Grant Type', + name: 'grantType', + type: 'hidden', + default: 'authorizationCode', + }, + { + displayName: 'Resource Name', + name: 'resourceName', + type: 'string', + required: true, + default: '', + }, + { + displayName: 'API Version', + name: 'apiVersion', + type: 'string', + required: true, + default: '2024-12-01-preview', + }, + { + displayName: 'Endpoint', + name: 'endpoint', + type: 'string', + default: undefined, + placeholder: 'https://westeurope.api.cognitive.microsoft.com', + }, + { + displayName: 'Tenant ID', + name: 'tenantId', + type: 'string', + default: 'common', + description: + 'Enter your Azure Tenant ID (Directory ID) or keep "common" for multi-tenant apps. Using a specific Tenant ID is generally recommended and required for certain authentication flows.', + placeholder: 'e.g., xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx or common', + }, + { + displayName: 'Authorization URL', + name: 'authUrl', + type: 'string', + default: 'https://login.microsoftonline.com/$TENANT_ID/oauth2/authorize', + }, + { + displayName: 'Access Token URL', + name: 'accessTokenUrl', + type: 'string', + default: 'https://login.microsoftonline.com/$TENANT_ID/oauth2/token', + }, + { + displayName: 'Client ID', + name: 'clientId', + type: 'string', + required: true, + default: '', + description: 'Client ID obtained from the Azure AD App Registration', + }, + { + displayName: 'Client Secret', + name: 'clientSecret', + type: 'string', + required: true, + typeOptions: { password: true }, + default: '', + description: 'Client Secret obtained from the Azure AD App Registration', + }, + { + displayName: 'Additional Body Properties', + name: 'additionalBodyProperties', + type: 'hidden', + default: + '{"grant_type": "client_credentials", "resource": "https://cognitiveservices.azure.com/"}', + }, + { + displayName: 'Authentication', + name: 'authentication', + type: 'hidden', + default: 'body', + }, + { + displayName: 'Custom Scopes', + name: 'customScopes', + type: 'boolean', + default: false, + description: + 'Define custom scopes. You might need this if the default scopes are not sufficient or if you want to minimize permissions. Ensure you include "openid" and "offline_access".', + }, + { + displayName: 'Auth URI Query Parameters', + name: 'authQueryParameters', + type: 'hidden', + default: '', + description: + 'For some services additional query parameters have to be set which can be defined here', + placeholder: '', + }, + { + displayName: 'Enabled Scopes', + name: 'enabledScopes', + type: 'string', + displayOptions: { + show: { + customScopes: [true], + }, + }, + default: defaultScopes.join(' '), + placeholder: 'openid offline_access', + description: 'Space-separated list of scopes to request.', + }, + { + displayName: 'Scope', + name: 'scope', + type: 'hidden', + default: '={{ $self.customScopes ? $self.enabledScopes : "' + defaultScopes.join(' ') + '"}}', + }, + ]; +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts index 578e7a6f58..b513f31a46 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts @@ -1,6 +1,8 @@ +/* eslint-disable n8n-nodes-base/node-execute-block-wrong-error-thrown */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { AzureChatOpenAI } from '@langchain/openai'; import { + NodeOperationError, NodeConnectionTypes, type INodeType, type INodeTypeDescription, @@ -8,8 +10,15 @@ import { type SupplyData, } from 'n8n-workflow'; -import { getConnectionHintNoticeField } from '@utils/sharedFields'; - +import { setupApiKeyAuthentication } from './credentials/api-key'; +import { setupOAuth2Authentication } from './credentials/oauth2'; +import { properties } from './properties'; +import { AuthenticationType } from './types'; +import type { + AzureOpenAIApiKeyModelConfig, + AzureOpenAIOAuth2ModelConfig, + AzureOpenAIOptions, +} from './types'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; import { N8nLlmTracing } from '../N8nLlmTracing'; @@ -48,163 +57,86 @@ export class LmChatAzureOpenAi implements INodeType { { name: 'azureOpenAiApi', required: true, - }, - ], - properties: [ - getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]), - { - displayName: - 'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.', - name: 'notice', - type: 'notice', - default: '', displayOptions: { show: { - '/options.responseFormat': ['json_object'], + authentication: [AuthenticationType.ApiKey], }, }, }, { - displayName: 'Model (Deployment) Name', - name: 'model', - type: 'string', - description: 'The name of the model(deployment) to use', - default: '', - }, - { - displayName: 'Options', - name: 'options', - placeholder: 'Add Option', - description: 'Additional options to add', - type: 'collection', - default: {}, - options: [ - { - displayName: 'Frequency Penalty', - name: 'frequencyPenalty', - default: 0, - typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, - description: - "Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim", - type: 'number', + name: 'azureEntraCognitiveServicesOAuth2Api', + required: true, + displayOptions: { + show: { + authentication: [AuthenticationType.EntraOAuth2], }, - { - displayName: 'Maximum Number of Tokens', - name: 'maxTokens', - default: -1, - description: - 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).', - type: 'number', - typeOptions: { - maxValue: 32768, - }, - }, - { - displayName: 'Response Format', - name: 'responseFormat', - default: 'text', - type: 'options', - options: [ - { - name: 'Text', - value: 'text', - description: 'Regular text response', - }, - { - name: 'JSON', - value: 'json_object', - description: - 'Enables JSON mode, which should guarantee the message the model generates is valid JSON', - }, - ], - }, - { - displayName: 'Presence Penalty', - name: 'presencePenalty', - default: 0, - typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, - description: - "Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics", - type: 'number', - }, - { - displayName: 'Sampling Temperature', - name: 'temperature', - default: 0.7, - typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, - description: - 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', - type: 'number', - }, - { - displayName: 'Timeout', - name: 'timeout', - default: 60000, - description: 'Maximum amount of time a request is allowed to take in milliseconds', - type: 'number', - }, - { - displayName: 'Max Retries', - name: 'maxRetries', - default: 2, - description: 'Maximum number of retries to attempt', - type: 'number', - }, - { - displayName: 'Top P', - name: 'topP', - default: 1, - typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, - description: - 'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.', - type: 'number', - }, - ], + }, }, ], + properties, }; async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { - const credentials = await this.getCredentials<{ - apiKey: string; - resourceName: string; - apiVersion: string; - endpoint?: string; - }>('azureOpenAiApi'); + try { + const authenticationMethod = this.getNodeParameter( + 'authentication', + itemIndex, + ) as AuthenticationType; + const modelName = this.getNodeParameter('model', itemIndex) as string; + const options = this.getNodeParameter('options', itemIndex, {}) as AzureOpenAIOptions; - const modelName = this.getNodeParameter('model', itemIndex) as string; - const options = this.getNodeParameter('options', itemIndex, {}) as { - frequencyPenalty?: number; - maxTokens?: number; - maxRetries: number; - timeout: number; - presencePenalty?: number; - temperature?: number; - topP?: number; - responseFormat?: 'text' | 'json_object'; - }; + // Set up Authentication based on selection and get configuration + let modelConfig: AzureOpenAIApiKeyModelConfig | AzureOpenAIOAuth2ModelConfig; + switch (authenticationMethod) { + case AuthenticationType.ApiKey: + modelConfig = await setupApiKeyAuthentication.call(this, 'azureOpenAiApi'); + break; + case AuthenticationType.EntraOAuth2: + modelConfig = await setupOAuth2Authentication.call( + this, + 'azureEntraCognitiveServicesOAuth2Api', + ); + break; + default: + throw new NodeOperationError(this.getNode(), 'Invalid authentication method'); + } - const model = new AzureChatOpenAI({ - azureOpenAIApiDeploymentName: modelName, - // instance name only needed to set base url - azureOpenAIApiInstanceName: !credentials.endpoint ? credentials.resourceName : undefined, - azureOpenAIApiKey: credentials.apiKey, - azureOpenAIApiVersion: credentials.apiVersion, - azureOpenAIEndpoint: credentials.endpoint, - ...options, - timeout: options.timeout ?? 60000, - maxRetries: options.maxRetries ?? 2, - callbacks: [new N8nLlmTracing(this)], - modelKwargs: options.responseFormat - ? { - response_format: { type: options.responseFormat }, - } - : undefined, - onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), - }); + this.logger.info(`Instantiating AzureChatOpenAI model with deployment: ${modelName}`); - return { - response: model, - }; + // Create and return the model + const model = new AzureChatOpenAI({ + azureOpenAIApiDeploymentName: modelName, + ...modelConfig, + ...options, + timeout: options.timeout ?? 60000, + maxRetries: options.maxRetries ?? 2, + callbacks: [new N8nLlmTracing(this)], + modelKwargs: options.responseFormat + ? { + response_format: { type: options.responseFormat }, + } + : undefined, + onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), + }); + + this.logger.info(`Azure OpenAI client initialized for deployment: ${modelName}`); + + return { + response: model, + }; + } catch (error) { + this.logger.error(`Error in LmChatAzureOpenAi.supplyData: ${error.message}`, error); + + // Re-throw NodeOperationError directly, wrap others + if (error instanceof NodeOperationError) { + throw error; + } + + throw new NodeOperationError( + this.getNode(), + `Failed to initialize Azure OpenAI client: ${error.message}`, + error, + ); + } } } diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/N8nOAuth2TokenCredential.test.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/N8nOAuth2TokenCredential.test.ts new file mode 100644 index 0000000000..c3fbe4c3d4 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/N8nOAuth2TokenCredential.test.ts @@ -0,0 +1,99 @@ +import type { INode } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; + +import { N8nOAuth2TokenCredential } from '../credentials/N8nOAuth2TokenCredential'; +import type { AzureEntraCognitiveServicesOAuth2ApiCredential } from '../types'; + +const mockNode: INode = { + id: '1', + name: 'Mock node', + typeVersion: 2, + type: 'n8n-nodes-base.mock', + position: [0, 0], + parameters: {}, +}; + +describe('N8nOAuth2TokenCredential', () => { + let mockCredential: AzureEntraCognitiveServicesOAuth2ApiCredential; + let credential: N8nOAuth2TokenCredential; + + beforeEach(() => { + // Create a mock credential with all required properties + mockCredential = { + authQueryParameters: '', + authentication: 'body', // Set valid authentication type + authUrl: '', + accessTokenUrl: '', // Added missing property + grantType: 'clientCredentials', // Corrected grant type value + clientId: '', + customScopes: false, + apiVersion: '2023-05-15', + endpoint: 'https://test.openai.azure.com', + resourceName: 'test-resource', + oauthTokenData: { + access_token: 'test-token', + expires_on: 1234567890, + ext_expires_on: 0, + }, + scope: '', + tenantId: '', + }; + + credential = new N8nOAuth2TokenCredential(mockNode, mockCredential); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + describe('getToken', () => { + it('should return a token when credentials are valid', async () => { + // Act + const result = await credential.getToken(); + + // Assert + expect(result).toEqual({ + token: 'test-token', + expiresOnTimestamp: 1234567890, + }); + }); + + it('should throw NodeOperationError when credentials do not contain token', async () => { + // Arrange - remove the token + mockCredential.oauthTokenData.access_token = ''; + credential = new N8nOAuth2TokenCredential(mockNode, mockCredential); + + // Act & Assert + await expect(credential.getToken()).rejects.toThrow(NodeOperationError); + }); + + it('should throw NodeOperationError when oauthTokenData is missing', async () => { + // Arrange - remove oauthTokenData + const incompleteCredential = { ...mockCredential }; + // @ts-expect-error: purposely making it invalid for test + delete incompleteCredential.oauthTokenData; + + credential = new N8nOAuth2TokenCredential( + mockNode, + incompleteCredential as AzureEntraCognitiveServicesOAuth2ApiCredential, + ); + + // Act & Assert + await expect(credential.getToken()).rejects.toThrow(NodeOperationError); + }); + }); + + describe('getDeploymentDetails', () => { + it('should return deployment details from credentials', async () => { + // Act + const result = await credential.getDeploymentDetails(); + + // Assert + expect(result).toEqual({ + apiVersion: '2023-05-15', + endpoint: 'https://test.openai.azure.com', + resourceName: 'test-resource', + }); + }); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/api-key.handler.test.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/api-key.handler.test.ts new file mode 100644 index 0000000000..5e2e60d8bd --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/api-key.handler.test.ts @@ -0,0 +1,81 @@ +/* eslint-disable @typescript-eslint/unbound-method */ +import { createMockExecuteFunction } from 'n8n-nodes-base/test/nodes/Helpers'; +import type { INode, ISupplyDataFunctions } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; + +import { setupApiKeyAuthentication } from '../credentials/api-key'; + +describe('setupApiKeyAuthentication', () => { + let ctx: ISupplyDataFunctions; + + beforeEach(() => { + const mockNode: INode = { + id: '1', + name: 'Mock node', + typeVersion: 2, + type: 'n8n-nodes-base.mock', + position: [0, 0], + parameters: {}, + }; + ctx = createMockExecuteFunction({}, mockNode); + ctx.logger = { + debug: jest.fn(), + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + }; + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should return valid configuration when API key is provided', async () => { + // Arrange + const mockCredentials = { + apiKey: 'test-api-key', + resourceName: 'test-resource', + apiVersion: '2023-05-15', + endpoint: 'https://test.openai.azure.com', + }; + + ctx.getCredentials = jest.fn().mockResolvedValue(mockCredentials); + // Act + const result = await setupApiKeyAuthentication.call(ctx, 'testCredential'); + // Assert + expect(result).toEqual({ + azureOpenAIApiKey: 'test-api-key', + azureOpenAIApiInstanceName: 'test-resource', + azureOpenAIApiVersion: '2023-05-15', + azureOpenAIEndpoint: 'https://test.openai.azure.com', + }); + expect(ctx.getCredentials).toHaveBeenCalledWith('testCredential'); + }); + + it('should throw NodeOperationError when API key is missing', async () => { + // Arrange + const mockCredentials = { + // No apiKey + resourceName: 'test-resource', + apiVersion: '2023-05-15', + }; + + ctx.getCredentials = jest.fn().mockResolvedValue(mockCredentials); + + // Act & Assert + await expect(setupApiKeyAuthentication.call(ctx, 'testCredential')).rejects.toThrow( + NodeOperationError, + ); + }); + + it('should throw NodeOperationError when credential retrieval fails', async () => { + // Arrange + const testError = new Error('Credential fetch failed'); + ctx.getCredentials = jest.fn().mockRejectedValue(testError); + + // Act & Assert + await expect(setupApiKeyAuthentication.call(ctx, 'testCredential')).rejects.toThrow( + NodeOperationError, + ); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/oauth2.handler.test.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/oauth2.handler.test.ts new file mode 100644 index 0000000000..4e7090d28c --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/__tests__/oauth2.handler.test.ts @@ -0,0 +1,98 @@ +/* eslint-disable @typescript-eslint/unbound-method */ +import { createMockExecuteFunction } from 'n8n-nodes-base/test/nodes/Helpers'; +import type { INode, ISupplyDataFunctions } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; + +import { setupOAuth2Authentication } from '../credentials/oauth2'; +import type { AzureEntraCognitiveServicesOAuth2ApiCredential } from '../types'; + +// Mock the N8nOAuth2TokenCredential +jest.mock('../credentials/N8nOAuth2TokenCredential', () => ({ + N8nOAuth2TokenCredential: jest.fn().mockImplementation(() => ({ + getToken: jest.fn().mockResolvedValue({ + token: 'test-token', + expiresOnTimestamp: 1234567890, + }), + getDeploymentDetails: jest.fn().mockResolvedValue({ + apiVersion: '2023-05-15', + endpoint: 'https://test.openai.azure.com', + resourceName: 'test-resource', + }), + })), +})); + +const mockNode: INode = { + id: '1', + name: 'Mock node', + typeVersion: 2, + type: 'n8n-nodes-base.mock', + position: [0, 0], + parameters: {}, +}; + +describe('setupOAuth2Authentication', () => { + let mockCredential: AzureEntraCognitiveServicesOAuth2ApiCredential; + let ctx: ISupplyDataFunctions; + beforeEach(() => { + // Set up a mock credential + mockCredential = { + authQueryParameters: '', + authentication: 'body', // Set valid authentication type + authUrl: '', + accessTokenUrl: '', // Added missing property + grantType: 'clientCredentials', // Corrected grant type value + clientId: '', + customScopes: false, + apiVersion: '2023-05-15', + endpoint: 'https://test.openai.azure.com', + resourceName: 'test-resource', + oauthTokenData: { + access_token: 'test-token', + expires_on: 1234567890, + ext_expires_on: 0, + }, + scope: '', + tenantId: '', + }; + ctx = createMockExecuteFunction({}, mockNode); + ctx.getCredentials = jest.fn().mockResolvedValue(mockCredential); + ctx.logger = { + debug: jest.fn(), + info: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + }; + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should return token provider and deployment details when successful', async () => { + // Act + const result = await setupOAuth2Authentication.call(ctx, 'testCredential'); + + // Assert + expect(result).toHaveProperty('azureADTokenProvider'); + expect(typeof result.azureADTokenProvider).toBe('function'); + expect(result).toEqual( + expect.objectContaining({ + azureOpenAIApiInstanceName: 'test-resource', + azureOpenAIApiVersion: '2023-05-15', + azureOpenAIEndpoint: 'https://test.openai.azure.com', + }), + ); + expect(ctx.getCredentials).toHaveBeenCalledWith('testCredential'); + }); + + it('should throw NodeOperationError when credential retrieval fails', async () => { + // Arrange + const testError = new Error('Credential fetch failed'); + ctx.getCredentials = jest.fn().mockRejectedValue(testError); + + // Act & Assert + await expect(setupOAuth2Authentication.call(ctx, 'testCredential')).rejects.toThrow( + NodeOperationError, + ); + }); +}); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/N8nOAuth2TokenCredential.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/N8nOAuth2TokenCredential.ts new file mode 100644 index 0000000000..63a5e70d3f --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/N8nOAuth2TokenCredential.ts @@ -0,0 +1,44 @@ +import type { TokenCredential, AccessToken } from '@azure/identity'; +import type { INode } from 'n8n-workflow'; +import { NodeOperationError } from 'n8n-workflow'; + +import type { AzureEntraCognitiveServicesOAuth2ApiCredential } from '../types'; +/** + * Adapts n8n's credential retrieval into the TokenCredential interface expected by @azure/identity + */ +export class N8nOAuth2TokenCredential implements TokenCredential { + constructor( + private node: INode, + private credential: AzureEntraCognitiveServicesOAuth2ApiCredential, + ) {} + + /** + * Gets an access token from OAuth credential + */ + async getToken(): Promise { + try { + if (!this.credential?.oauthTokenData?.access_token) { + throw new NodeOperationError(this.node, 'Failed to retrieve access token'); + } + + return { + token: this.credential.oauthTokenData.access_token, + expiresOnTimestamp: this.credential.oauthTokenData.expires_on, + }; + } catch (error) { + // Re-throw with better error message + throw new NodeOperationError(this.node, 'Failed to retrieve OAuth2 access token', error); + } + } + + /** + * Gets the deployment details from the credential + */ + async getDeploymentDetails() { + return { + apiVersion: this.credential.apiVersion, + endpoint: this.credential.endpoint, + resourceName: this.credential.resourceName, + }; + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/api-key.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/api-key.ts new file mode 100644 index 0000000000..ee3afad894 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/api-key.ts @@ -0,0 +1,45 @@ +import { NodeOperationError, OperationalError, type ISupplyDataFunctions } from 'n8n-workflow'; + +import type { AzureOpenAIApiKeyModelConfig } from '../types'; + +/** + * Handles API Key authentication setup for Azure OpenAI + */ +export async function setupApiKeyAuthentication( + this: ISupplyDataFunctions, + credentialName: string, +): Promise { + try { + // Get Azure OpenAI Config (Endpoint, Version, etc.) + const configCredentials = await this.getCredentials<{ + apiKey?: string; + resourceName: string; + apiVersion: string; + endpoint?: string; + }>(credentialName); + + if (!configCredentials.apiKey) { + throw new NodeOperationError( + this.getNode(), + 'API Key is missing in the selected Azure OpenAI API credential. Please configure the API Key or choose Entra ID authentication.', + ); + } + + this.logger.info('Using API Key authentication for Azure OpenAI.'); + + return { + azureOpenAIApiKey: configCredentials.apiKey, + azureOpenAIApiInstanceName: configCredentials.resourceName, + azureOpenAIApiVersion: configCredentials.apiVersion, + azureOpenAIEndpoint: configCredentials.endpoint, + }; + } catch (error) { + if (error instanceof OperationalError) { + throw error; + } + + this.logger.error(`Error setting up API Key authentication: ${error.message}`, error); + + throw new NodeOperationError(this.getNode(), 'Failed to retrieve API Key', error); + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/oauth2.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/oauth2.ts new file mode 100644 index 0000000000..17f7641af9 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/credentials/oauth2.ts @@ -0,0 +1,46 @@ +import { getBearerTokenProvider } from '@azure/identity'; +import { NodeOperationError, type ISupplyDataFunctions } from 'n8n-workflow'; + +import { N8nOAuth2TokenCredential } from './N8nOAuth2TokenCredential'; +import type { + AzureEntraCognitiveServicesOAuth2ApiCredential, + AzureOpenAIOAuth2ModelConfig, +} from '../types'; + +const AZURE_OPENAI_SCOPE = 'https://cognitiveservices.azure.com/.default'; +/** + * Creates Entra ID (OAuth2) authentication for Azure OpenAI + */ +export async function setupOAuth2Authentication( + this: ISupplyDataFunctions, + credentialName: string, +): Promise { + try { + const credential = + await this.getCredentials(credentialName); + // Create a TokenCredential + const entraTokenCredential = new N8nOAuth2TokenCredential(this.getNode(), credential); + const deploymentDetails = await entraTokenCredential.getDeploymentDetails(); + + // Use getBearerTokenProvider to create the function LangChain expects + // Pass the required scope for Azure Cognitive Services + const azureADTokenProvider = getBearerTokenProvider(entraTokenCredential, AZURE_OPENAI_SCOPE); + + this.logger.debug('Successfully created Azure AD Token Provider.'); + + return { + azureADTokenProvider, + azureOpenAIApiInstanceName: deploymentDetails.resourceName, + azureOpenAIApiVersion: deploymentDetails.apiVersion, + azureOpenAIEndpoint: deploymentDetails.endpoint, + }; + } catch (error) { + this.logger.error(`Error setting up Entra ID authentication: ${error.message}`, error); + + throw new NodeOperationError( + this.getNode(), + `Error setting up Entra ID authentication: ${error.message}`, + error, + ); + } +} diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/properties.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/properties.ts new file mode 100644 index 0000000000..b1c6b24ec5 --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/properties.ts @@ -0,0 +1,137 @@ +import type { INodeProperties } from 'n8n-workflow'; +import { NodeConnectionTypes } from 'n8n-workflow'; + +import { getConnectionHintNoticeField } from '@utils/sharedFields'; + +import { AuthenticationType } from './types'; + +export const properties: INodeProperties[] = [ + // eslint-disable-next-line n8n-nodes-base/node-param-default-missing + { + displayName: 'Authentication', + name: 'authentication', + type: 'options', + default: AuthenticationType.ApiKey, + options: [ + { + name: 'API Key', + value: AuthenticationType.ApiKey, + }, + { + name: 'Azure Entra ID (OAuth2)', + value: AuthenticationType.EntraOAuth2, + }, + ], + }, + getConnectionHintNoticeField([NodeConnectionTypes.AiChain, NodeConnectionTypes.AiAgent]), + { + displayName: + 'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.', + name: 'notice', + type: 'notice', + default: '', + displayOptions: { + show: { + '/options.responseFormat': ['json_object'], + }, + }, + }, + { + displayName: 'Model (Deployment) Name', + name: 'model', + type: 'string', + description: 'The name of the model(deployment) to use (e.g., gpt-4, gpt-35-turbo)', + required: true, + default: '', + }, + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + options: [ + { + displayName: 'Frequency Penalty', + name: 'frequencyPenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim", + type: 'number', + }, + { + displayName: 'Maximum Number of Tokens', + name: 'maxTokens', + default: -1, + description: + 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768). Use -1 for default.', + type: 'number', + typeOptions: { + maxValue: 128000, + }, + }, + { + displayName: 'Response Format', + name: 'responseFormat', + default: 'text', + type: 'options', + options: [ + { + name: 'Text', + value: 'text', + description: 'Regular text response', + }, + { + name: 'JSON', + value: 'json_object', + description: + 'Enables JSON mode, which should guarantee the message the model generates is valid JSON', + }, + ], + }, + { + displayName: 'Presence Penalty', + name: 'presencePenalty', + default: 0, + typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 }, + description: + "Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics", + type: 'number', + }, + { + displayName: 'Sampling Temperature', + name: 'temperature', + default: 0.7, + typeOptions: { maxValue: 2, minValue: 0, numberPrecision: 1 }, // Max temp can be 2 + description: + 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', + type: 'number', + }, + { + displayName: 'Timeout (Ms)', + name: 'timeout', + default: 60000, + description: 'Maximum amount of time a request is allowed to take in milliseconds', + type: 'number', + }, + { + displayName: 'Max Retries', + name: 'maxRetries', + default: 2, + description: 'Maximum number of retries to attempt on failure', + type: 'number', + }, + { + displayName: 'Top P', + name: 'topP', + default: 1, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.', + type: 'number', + }, + ], + }, +]; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/types.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/types.ts new file mode 100644 index 0000000000..3179e155ea --- /dev/null +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/types.ts @@ -0,0 +1,94 @@ +import type { OAuth2CredentialData } from '@n8n/client-oauth2'; +/** + * Common interfaces for Azure OpenAI configuration + */ + +/** + * Basic Azure OpenAI API configuration options + */ +export interface AzureOpenAIConfig { + apiVersion: string; + resourceName: string; + endpoint?: string; +} + +/** + * Configuration for API Key authentication + */ +export interface AzureOpenAIApiKeyConfig extends AzureOpenAIConfig { + apiKey: string; +} + +/** + * Azure OpenAI node options + */ +export interface AzureOpenAIOptions { + frequencyPenalty?: number; + maxTokens?: number; + maxRetries?: number; + timeout?: number; + presencePenalty?: number; + temperature?: number; + topP?: number; + responseFormat?: 'text' | 'json_object'; +} + +/** + * Base model configuration that can be passed to AzureChatOpenAI constructor + */ +export interface AzureOpenAIBaseModelConfig { + azureOpenAIApiInstanceName: string; + azureOpenAIApiVersion: string; + azureOpenAIEndpoint?: string; +} + +/** + * API Key model configuration that can be passed to AzureChatOpenAI constructor + */ +export interface AzureOpenAIApiKeyModelConfig extends AzureOpenAIBaseModelConfig { + azureOpenAIApiKey: string; + azureADTokenProvider?: undefined; +} + +/** + * OAuth2 model configuration that can be passed to AzureChatOpenAI constructor + */ +export interface AzureOpenAIOAuth2ModelConfig extends AzureOpenAIBaseModelConfig { + azureOpenAIApiKey?: undefined; + azureADTokenProvider: () => Promise; +} + +/** + * Authentication types supported by Azure OpenAI node + */ +export const enum AuthenticationType { + ApiKey = 'azureOpenAiApi', + EntraOAuth2 = 'azureEntraCognitiveServicesOAuth2Api', +} + +/** + * Error types for Azure OpenAI node + */ +export const enum AzureOpenAIErrorType { + AuthenticationError = 'AuthenticationError', + ConfigurationError = 'ConfigurationError', + APIError = 'APIError', + UnknownError = 'UnknownError', +} + +/** + * OAuth2 credential type used by Azure OpenAI node + */ +type TokenData = OAuth2CredentialData['oauthTokenData'] & { + expires_on: number; + ext_expires_on: number; +}; +export type AzureEntraCognitiveServicesOAuth2ApiCredential = OAuth2CredentialData & { + customScopes: boolean; + authentication: string; + apiVersion: string; + endpoint: string; + resourceName: string; + tenantId: string; + oauthTokenData: TokenData; +}; diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index 7773f92e9b..5424f84612 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -25,6 +25,7 @@ "credentials": [ "dist/credentials/AnthropicApi.credentials.js", "dist/credentials/AzureOpenAiApi.credentials.js", + "dist/credentials/AzureEntraCognitiveServicesOAuth2Api.credentials.js", "dist/credentials/CohereApi.credentials.js", "dist/credentials/DeepSeekApi.credentials.js", "dist/credentials/GooglePalmApi.credentials.js", @@ -149,6 +150,7 @@ }, "dependencies": { "@aws-sdk/client-sso-oidc": "3.666.0", + "@azure/identity": "4.3.0", "@getzep/zep-cloud": "1.0.12", "@getzep/zep-js": "0.9.0", "@google-ai/generativelanguage": "2.6.0", @@ -173,6 +175,7 @@ "@langchain/textsplitters": "0.1.0", "@modelcontextprotocol/sdk": "1.9.0", "@mozilla/readability": "0.6.0", + "@n8n/client-oauth2": "workspace:*", "@n8n/json-schema-to-zod": "workspace:*", "@n8n/typeorm": "0.3.20-12", "@n8n/typescript-config": "workspace:*", diff --git a/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts b/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts index bc4aecc75e..87bc390296 100644 --- a/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts +++ b/packages/cli/src/controllers/oauth/oauth2-credential.controller.ts @@ -5,7 +5,7 @@ import { Response } from 'express'; import omit from 'lodash/omit'; import set from 'lodash/set'; import split from 'lodash/split'; -import { type ICredentialDataDecryptedObject, jsonStringify } from 'n8n-workflow'; +import { type ICredentialDataDecryptedObject, jsonParse, jsonStringify } from 'n8n-workflow'; import pkceChallenge from 'pkce-challenge'; import * as qs from 'querystring'; @@ -111,6 +111,7 @@ export class OAuth2CredentialController extends AbstractOAuthController { } else if (oauthCredentials.authentication === 'body') { options = { body: { + ...(oAuthOptions.body ?? {}), client_id: oAuthOptions.clientId, client_secret: oAuthOptions.clientSecret, }, @@ -159,7 +160,7 @@ export class OAuth2CredentialController extends AbstractOAuthController { } private convertCredentialToOptions(credential: OAuth2CredentialData): ClientOAuth2Options { - return { + const options: ClientOAuth2Options = { clientId: credential.clientId, clientSecret: credential.clientSecret ?? '', accessTokenUri: credential.accessTokenUrl ?? '', @@ -170,5 +171,18 @@ export class OAuth2CredentialController extends AbstractOAuthController { scopesSeparator: credential.scope?.includes(',') ? ',' : ' ', ignoreSSLIssues: credential.ignoreSSLIssues ?? false, }; + + if ( + credential.additionalBodyProperties && + typeof credential.additionalBodyProperties === 'string' + ) { + const parsedBody = jsonParse>(credential.additionalBodyProperties); + + if (parsedBody) { + options.body = parsedBody; + } + } + + return options; } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b291fc00ca..2711d4dd86 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -666,6 +666,9 @@ importers: '@aws-sdk/client-sso-oidc': specifier: 3.666.0 version: 3.666.0(@aws-sdk/client-sts@3.666.0) + '@azure/identity': + specifier: 4.3.0 + version: 4.3.0 '@getzep/zep-cloud': specifier: 1.0.12 version: 1.0.12(@langchain/core@0.3.30(openai@4.78.1(encoding@0.1.13)(zod@3.24.1)))(encoding@0.1.13)(langchain@0.3.11(e320b1d8e94e7308fefdef3743329630)) @@ -738,6 +741,9 @@ importers: '@mozilla/readability': specifier: 0.6.0 version: 0.6.0 + '@n8n/client-oauth2': + specifier: workspace:* + version: link:../client-oauth2 '@n8n/json-schema-to-zod': specifier: workspace:* version: link:../json-schema-to-zod