mirror of
https://github.com/Abdulazizzn/n8n-enterprise-unlocked.git
synced 2025-12-17 01:56:46 +00:00
feat(Ollama Credentials): Add optional API key support to Ollama credentials (Openwebui proxy) (#17857)
This commit is contained in:
committed by
GitHub
parent
c4c46b8ff9
commit
acfb79bd97
@@ -1,4 +1,9 @@
|
||||
import type { ICredentialTestRequest, ICredentialType, INodeProperties } from 'n8n-workflow';
|
||||
import type {
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
IAuthenticateGeneric,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class OllamaApi implements ICredentialType {
|
||||
name = 'ollamaApi';
|
||||
@@ -15,12 +20,30 @@ export class OllamaApi implements ICredentialType {
|
||||
type: 'string',
|
||||
default: 'http://localhost:11434',
|
||||
},
|
||||
{
|
||||
displayName: 'API Key',
|
||||
hint: 'When using Ollama behind a proxy with authentication (such as Open WebUI), provide the Bearer token/API key here. This is not required for the default Ollama installation',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
default: '',
|
||||
required: false,
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
Authorization: '=Bearer {{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '={{ $credentials.baseUrl }}',
|
||||
url: '/',
|
||||
url: '/api/tags',
|
||||
method: 'GET',
|
||||
},
|
||||
};
|
||||
|
||||
@@ -49,10 +49,16 @@ export class EmbeddingsOllama implements INodeType {
|
||||
this.logger.debug('Supply data for embeddings Ollama');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
const headers = credentials.apiKey
|
||||
? {
|
||||
Authorization: `Bearer ${credentials.apiKey as string}`,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const embeddings = new OllamaEmbeddings({
|
||||
baseUrl: credentials.baseUrl as string,
|
||||
model: modelName,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -58,6 +58,11 @@ export class LmChatOllama implements INodeType {
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;
|
||||
const headers = credentials.apiKey
|
||||
? {
|
||||
Authorization: `Bearer ${credentials.apiKey as string}`,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const model = new ChatOllama({
|
||||
...options,
|
||||
@@ -66,6 +71,7 @@ export class LmChatOllama implements INodeType {
|
||||
format: options.format === 'default' ? undefined : options.format,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -57,6 +57,11 @@ export class LmOllama implements INodeType {
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
const headers = credentials.apiKey
|
||||
? {
|
||||
Authorization: `Bearer ${credentials.apiKey as string}`,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const model = new Ollama({
|
||||
baseUrl: credentials.baseUrl as string,
|
||||
@@ -64,6 +69,7 @@ export class LmOllama implements INodeType {
|
||||
...options,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
Reference in New Issue
Block a user