feat(Ollama Credentials): Add optional API key support to Ollama credentials (Openwebui proxy) (#17857)

This commit is contained in:
Julian van der Horst
2025-08-05 17:25:19 +02:00
committed by GitHub
parent c4c46b8ff9
commit acfb79bd97
4 changed files with 43 additions and 2 deletions

View File

@@ -58,6 +58,11 @@ export class LmChatOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;
const headers = credentials.apiKey
? {
Authorization: `Bearer ${credentials.apiKey as string}`,
}
: undefined;
const model = new ChatOllama({
...options,
@@ -66,6 +71,7 @@ export class LmChatOllama implements INodeType {
format: options.format === 'default' ? undefined : options.format,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
headers,
});
return {

View File

@@ -57,6 +57,11 @@ export class LmOllama implements INodeType {
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object;
const headers = credentials.apiKey
? {
Authorization: `Bearer ${credentials.apiKey as string}`,
}
: undefined;
const model = new Ollama({
baseUrl: credentials.baseUrl as string,
@@ -64,6 +69,7 @@ export class LmOllama implements INodeType {
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
headers,
});
return {