feat(Ollama Chat Model Node): Add aditional Ollama config parameters & fix vision (#9215)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Michael Kret <michael.k@radency.com>
This commit is contained in:
oleg
2024-04-29 13:41:48 +02:00
committed by GitHub
parent 3fbcbce362
commit e17e767e70
3 changed files with 145 additions and 7 deletions

View File

@@ -7,6 +7,7 @@ import {
type SupplyData,
} from 'n8n-workflow';
import type { ChatOllamaInput } from '@langchain/community/chat_models/ollama';
import { ChatOllama } from '@langchain/community/chat_models/ollama';
import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
@@ -54,12 +55,13 @@ export class LmChatOllama implements INodeType {
const credentials = await this.getCredentials('ollamaApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object;
const options = this.getNodeParameter('options', itemIndex, {}) as ChatOllamaInput;
const model = new ChatOllama({
...options,
baseUrl: credentials.baseUrl as string,
model: modelName,
...options,
format: options.format === 'default' ? undefined : options.format,
});
return {