feat(Ollama Credentials): Add optional API key support to Ollama credentials (Openwebui proxy) (#17857)

This commit is contained in:
Julian van der Horst
2025-08-05 17:25:19 +02:00
committed by GitHub
parent c4c46b8ff9
commit acfb79bd97
4 changed files with 43 additions and 2 deletions

View File

@@ -1,4 +1,9 @@
import type { ICredentialTestRequest, ICredentialType, INodeProperties } from 'n8n-workflow'; import type {
ICredentialTestRequest,
ICredentialType,
INodeProperties,
IAuthenticateGeneric,
} from 'n8n-workflow';
export class OllamaApi implements ICredentialType { export class OllamaApi implements ICredentialType {
name = 'ollamaApi'; name = 'ollamaApi';
@@ -15,12 +20,30 @@ export class OllamaApi implements ICredentialType {
type: 'string', type: 'string',
default: 'http://localhost:11434', default: 'http://localhost:11434',
}, },
{
displayName: 'API Key',
hint: 'When using Ollama behind a proxy with authentication (such as Open WebUI), provide the Bearer token/API key here. This is not required for the default Ollama installation',
name: 'apiKey',
type: 'string',
typeOptions: { password: true },
default: '',
required: false,
},
]; ];
authenticate: IAuthenticateGeneric = {
type: 'generic',
properties: {
headers: {
Authorization: '=Bearer {{$credentials.apiKey}}',
},
},
};
test: ICredentialTestRequest = { test: ICredentialTestRequest = {
request: { request: {
baseURL: '={{ $credentials.baseUrl }}', baseURL: '={{ $credentials.baseUrl }}',
url: '/', url: '/api/tags',
method: 'GET', method: 'GET',
}, },
}; };

View File

@@ -49,10 +49,16 @@ export class EmbeddingsOllama implements INodeType {
this.logger.debug('Supply data for embeddings Ollama'); this.logger.debug('Supply data for embeddings Ollama');
const modelName = this.getNodeParameter('model', itemIndex) as string; const modelName = this.getNodeParameter('model', itemIndex) as string;
const credentials = await this.getCredentials('ollamaApi'); const credentials = await this.getCredentials('ollamaApi');
const headers = credentials.apiKey
? {
Authorization: `Bearer ${credentials.apiKey as string}`,
}
: undefined;
const embeddings = new OllamaEmbeddings({ const embeddings = new OllamaEmbeddings({
baseUrl: credentials.baseUrl as string, baseUrl: credentials.baseUrl as string,
model: modelName, model: modelName,
headers,
}); });
return { return {

View File

@@ -58,6 +58,11 @@ export class LmChatOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string; const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput; const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;
const headers = credentials.apiKey
? {
Authorization: `Bearer ${credentials.apiKey as string}`,
}
: undefined;
const model = new ChatOllama({ const model = new ChatOllama({
...options, ...options,
@@ -66,6 +71,7 @@ export class LmChatOllama implements INodeType {
format: options.format === 'default' ? undefined : options.format, format: options.format === 'default' ? undefined : options.format,
callbacks: [new N8nLlmTracing(this)], callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
headers,
}); });
return { return {

View File

@@ -57,6 +57,11 @@ export class LmOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string; const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object; const options = this.getNodeParameter('options', itemIndex, {}) as object;
const headers = credentials.apiKey
? {
Authorization: `Bearer ${credentials.apiKey as string}`,
}
: undefined;
const model = new Ollama({ const model = new Ollama({
baseUrl: credentials.baseUrl as string, baseUrl: credentials.baseUrl as string,
@@ -64,6 +69,7 @@ export class LmOllama implements INodeType {
...options, ...options,
callbacks: [new N8nLlmTracing(this)], callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
headers,
}); });
return { return {