feat: Add AI tool building capabilities (#7336)
Github issue / Community forum post (link here to close automatically): https://community.n8n.io/t/langchain-memory-chat/23733 --------- Signed-off-by: Oleg Ivaniv <me@olegivaniv.com> Co-authored-by: Oleg Ivaniv <me@olegivaniv.com> Co-authored-by: Val <68596159+valya@users.noreply.github.com> Co-authored-by: Alex Grozav <alex@grozav.com> Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in> Co-authored-by: Deborah <deborah@starfallprojects.co.uk> Co-authored-by: Jesper Bylund <mail@jesperbylund.com> Co-authored-by: Jon <jonathan.bennetts@gmail.com> Co-authored-by: Michael Kret <88898367+michael-radency@users.noreply.github.com> Co-authored-by: Giulio Andreini <andreini@netseven.it> Co-authored-by: Mason Geloso <Mason.geloso@gmail.com> Co-authored-by: Mason Geloso <hone@Masons-Mac-mini.local> Co-authored-by: Mutasem Aldmour <mutasem@n8n.io>
20
packages/@n8n/nodes-langchain/.editorconfig
Normal file
@@ -0,0 +1,20 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = tab
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[package.json]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.yml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
155
packages/@n8n/nodes-langchain/.eslintrc.js
Normal file
@@ -0,0 +1,155 @@
|
||||
const sharedOptions = require('@n8n_io/eslint-config/shared');
|
||||
|
||||
/**
|
||||
* @type {import('@types/eslint').ESLint.ConfigData}
|
||||
*/
|
||||
module.exports = {
|
||||
extends: ['@n8n_io/eslint-config/node'],
|
||||
|
||||
...sharedOptions(__dirname),
|
||||
|
||||
ignorePatterns: ['index.js', '**/package.json'],
|
||||
|
||||
rules: {
|
||||
// TODO: remove all the following rules
|
||||
eqeqeq: 'warn',
|
||||
'id-denylist': 'warn',
|
||||
'import/extensions': 'warn',
|
||||
'import/order': 'warn',
|
||||
'prefer-spread': 'warn',
|
||||
'import/no-extraneous-dependencies': 'warn',
|
||||
|
||||
'@typescript-eslint/naming-convention': ['error', { selector: 'memberLike', format: null }],
|
||||
'@typescript-eslint/no-explicit-any': 'warn', //812 warnings, better to fix in separate PR
|
||||
'@typescript-eslint/no-non-null-assertion': 'warn', //665 errors, better to fix in separate PR
|
||||
'@typescript-eslint/no-unsafe-assignment': 'warn', //7084 problems, better to fix in separate PR
|
||||
'@typescript-eslint/no-unsafe-call': 'warn', //541 errors, better to fix in separate PR
|
||||
'@typescript-eslint/no-unsafe-member-access': 'warn', //4591 errors, better to fix in separate PR
|
||||
'@typescript-eslint/no-unsafe-return': 'warn', //438 errors, better to fix in separate PR
|
||||
'@typescript-eslint/no-unused-expressions': ['error', { allowTernary: true }],
|
||||
'@typescript-eslint/restrict-template-expressions': 'warn', //1152 errors, better to fix in separate PR
|
||||
'@typescript-eslint/unbound-method': 'warn',
|
||||
'@typescript-eslint/ban-ts-comment': ['warn', { 'ts-ignore': true }],
|
||||
'@typescript-eslint/prefer-nullish-coalescing': 'warn',
|
||||
'@typescript-eslint/no-base-to-string': 'warn',
|
||||
'@typescript-eslint/no-redundant-type-constituents': 'warn',
|
||||
'@typescript-eslint/no-unused-vars': 'warn',
|
||||
'@typescript-eslint/no-unsafe-argument': 'warn',
|
||||
'@typescript-eslint/prefer-optional-chain': 'warn',
|
||||
'@typescript-eslint/restrict-plus-operands': 'warn',
|
||||
},
|
||||
|
||||
overrides: [
|
||||
{
|
||||
files: ['./credentials/*.ts'],
|
||||
plugins: ['eslint-plugin-n8n-nodes-base'],
|
||||
rules: {
|
||||
'n8n-nodes-base/cred-class-field-authenticate-type-assertion': 'error',
|
||||
'n8n-nodes-base/cred-class-field-display-name-missing-oauth2': 'error',
|
||||
'n8n-nodes-base/cred-class-field-display-name-miscased': 'error',
|
||||
'n8n-nodes-base/cred-class-field-documentation-url-missing': 'error',
|
||||
'n8n-nodes-base/cred-class-field-name-missing-oauth2': 'error',
|
||||
'n8n-nodes-base/cred-class-field-name-unsuffixed': 'error',
|
||||
'n8n-nodes-base/cred-class-field-name-uppercase-first-char': 'error',
|
||||
'n8n-nodes-base/cred-class-field-properties-assertion': 'error',
|
||||
'n8n-nodes-base/cred-class-field-type-options-password-missing': 'error',
|
||||
'n8n-nodes-base/cred-class-name-missing-oauth2-suffix': 'error',
|
||||
'n8n-nodes-base/cred-class-name-unsuffixed': 'error',
|
||||
'n8n-nodes-base/cred-filename-against-convention': 'error',
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['./nodes/**/*.ts'],
|
||||
plugins: ['eslint-plugin-n8n-nodes-base'],
|
||||
rules: {
|
||||
'n8n-nodes-base/node-class-description-credentials-name-unsuffixed': 'error',
|
||||
'n8n-nodes-base/node-class-description-display-name-unsuffixed-trigger-node': 'error',
|
||||
'n8n-nodes-base/node-class-description-empty-string': 'error',
|
||||
'n8n-nodes-base/node-class-description-icon-not-svg': 'error',
|
||||
'n8n-nodes-base/node-class-description-inputs-wrong-regular-node': 'off',
|
||||
'n8n-nodes-base/node-class-description-outputs-wrong': 'off',
|
||||
'n8n-nodes-base/node-class-description-inputs-wrong-trigger-node': 'error',
|
||||
'n8n-nodes-base/node-class-description-missing-subtitle': 'error',
|
||||
'n8n-nodes-base/node-class-description-non-core-color-present': 'error',
|
||||
'n8n-nodes-base/node-class-description-name-miscased': 'error',
|
||||
'n8n-nodes-base/node-class-description-name-unsuffixed-trigger-node': 'error',
|
||||
'n8n-nodes-base/node-dirname-against-convention': 'error',
|
||||
'n8n-nodes-base/node-execute-block-double-assertion-for-items': 'error',
|
||||
'n8n-nodes-base/node-execute-block-wrong-error-thrown': 'error',
|
||||
'n8n-nodes-base/node-filename-against-convention': 'error',
|
||||
'n8n-nodes-base/node-param-array-type-assertion': 'error',
|
||||
'n8n-nodes-base/node-param-collection-type-unsorted-items': 'error',
|
||||
'n8n-nodes-base/node-param-color-type-unused': 'error',
|
||||
'n8n-nodes-base/node-param-default-missing': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-boolean': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-collection': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-fixed-collection': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-fixed-collection': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-multi-options': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-number': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-simplify': 'error',
|
||||
'n8n-nodes-base/node-param-default-wrong-for-string': 'error',
|
||||
'n8n-nodes-base/node-param-description-boolean-without-whether': 'error',
|
||||
'n8n-nodes-base/node-param-description-comma-separated-hyphen': 'error',
|
||||
'n8n-nodes-base/node-param-description-empty-string': 'error',
|
||||
'n8n-nodes-base/node-param-description-excess-final-period': 'error',
|
||||
'n8n-nodes-base/node-param-description-excess-inner-whitespace': 'error',
|
||||
'n8n-nodes-base/node-param-description-identical-to-display-name': 'error',
|
||||
'n8n-nodes-base/node-param-description-line-break-html-tag': 'error',
|
||||
'n8n-nodes-base/node-param-description-lowercase-first-char': 'error',
|
||||
'n8n-nodes-base/node-param-description-miscased-id': 'error',
|
||||
'n8n-nodes-base/node-param-description-miscased-json': 'error',
|
||||
'n8n-nodes-base/node-param-description-miscased-url': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-final-period': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-for-ignore-ssl-issues': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-for-return-all': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-for-simplify': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-from-dynamic-multi-options': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-from-dynamic-options': 'error',
|
||||
'n8n-nodes-base/node-param-description-missing-from-limit': 'error',
|
||||
'n8n-nodes-base/node-param-description-unencoded-angle-brackets': 'error',
|
||||
'n8n-nodes-base/node-param-description-unneeded-backticks': 'error',
|
||||
'n8n-nodes-base/node-param-description-untrimmed': 'error',
|
||||
'n8n-nodes-base/node-param-description-url-missing-protocol': 'error',
|
||||
'n8n-nodes-base/node-param-description-weak': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-dynamic-multi-options': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-dynamic-options': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-ignore-ssl-issues': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-limit': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-return-all': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-simplify': 'error',
|
||||
'n8n-nodes-base/node-param-description-wrong-for-upsert': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-excess-inner-whitespace': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-miscased-id': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-miscased': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-not-first-position': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-untrimmed': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-wrong-for-dynamic-multi-options': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-wrong-for-dynamic-options': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-wrong-for-simplify': 'error',
|
||||
'n8n-nodes-base/node-param-display-name-wrong-for-update-fields': 'error',
|
||||
'n8n-nodes-base/node-param-min-value-wrong-for-limit': 'error',
|
||||
'n8n-nodes-base/node-param-multi-options-type-unsorted-items': 'error',
|
||||
'n8n-nodes-base/node-param-name-untrimmed': 'error',
|
||||
'n8n-nodes-base/node-param-operation-option-action-wrong-for-get-many': 'error',
|
||||
'n8n-nodes-base/node-param-operation-option-description-wrong-for-get-many': 'error',
|
||||
'n8n-nodes-base/node-param-operation-option-without-action': 'error',
|
||||
'n8n-nodes-base/node-param-operation-without-no-data-expression': 'error',
|
||||
'n8n-nodes-base/node-param-option-description-identical-to-name': 'error',
|
||||
'n8n-nodes-base/node-param-option-name-containing-star': 'error',
|
||||
'n8n-nodes-base/node-param-option-name-duplicate': 'error',
|
||||
'n8n-nodes-base/node-param-option-name-wrong-for-get-many': 'error',
|
||||
'n8n-nodes-base/node-param-option-name-wrong-for-upsert': 'error',
|
||||
'n8n-nodes-base/node-param-option-value-duplicate': 'error',
|
||||
'n8n-nodes-base/node-param-options-type-unsorted-items': 'error',
|
||||
'n8n-nodes-base/node-param-placeholder-miscased-id': 'error',
|
||||
'n8n-nodes-base/node-param-placeholder-missing-email': 'error',
|
||||
'n8n-nodes-base/node-param-required-false': 'error',
|
||||
'n8n-nodes-base/node-param-resource-with-plural-option': 'error',
|
||||
'n8n-nodes-base/node-param-resource-without-no-data-expression': 'error',
|
||||
'n8n-nodes-base/node-param-type-options-missing-from-limit': 'error',
|
||||
'n8n-nodes-base/node-param-type-options-password-missing': 'error',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
8
packages/@n8n/nodes-langchain/.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
node_modules
|
||||
.DS_Store
|
||||
.tmp
|
||||
tmp
|
||||
dist
|
||||
npm-debug.log*
|
||||
yarn.lock
|
||||
.vscode/launch.json
|
||||
2
packages/@n8n/nodes-langchain/.npmignore
Normal file
@@ -0,0 +1,2 @@
|
||||
.DS_Store
|
||||
*.tsbuildinfo
|
||||
51
packages/@n8n/nodes-langchain/.prettierrc.js
Normal file
@@ -0,0 +1,51 @@
|
||||
module.exports = {
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#semicolons
|
||||
*/
|
||||
semi: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#trailing-commas
|
||||
*/
|
||||
trailingComma: 'all',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#bracket-spacing
|
||||
*/
|
||||
bracketSpacing: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#tabs
|
||||
*/
|
||||
useTabs: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#tab-width
|
||||
*/
|
||||
tabWidth: 2,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#arrow-function-parentheses
|
||||
*/
|
||||
arrowParens: 'always',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#quotes
|
||||
*/
|
||||
singleQuote: true,
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#quote-props
|
||||
*/
|
||||
quoteProps: 'as-needed',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#end-of-line
|
||||
*/
|
||||
endOfLine: 'lf',
|
||||
|
||||
/**
|
||||
* https://prettier.io/docs/en/options.html#print-width
|
||||
*/
|
||||
printWidth: 100,
|
||||
};
|
||||
7
packages/@n8n/nodes-langchain/.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"esbenp.prettier-vscode",
|
||||
]
|
||||
}
|
||||
85
packages/@n8n/nodes-langchain/LICENSE.md
Normal file
@@ -0,0 +1,85 @@
|
||||
# License
|
||||
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
- Content of branches other than the main branch (i.e. "master") are not licensed.
|
||||
- All source code files that contain ".ee." in their filename are licensed under the
|
||||
"n8n Enterprise License" defined in "LICENSE_EE.md".
|
||||
- All third party components incorporated into the n8n Software are licensed under the original license
|
||||
provided by the owner of the applicable component.
|
||||
- Content outside of the above mentioned files or restrictions is available under the "Sustainable Use
|
||||
License" as defined below.
|
||||
|
||||
## Sustainable Use License
|
||||
|
||||
Version 1.0
|
||||
|
||||
### Acceptance
|
||||
|
||||
By using the software, you agree to all of the terms and conditions below.
|
||||
|
||||
### Copyright License
|
||||
|
||||
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license
|
||||
to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject
|
||||
to the limitations below.
|
||||
|
||||
### Limitations
|
||||
|
||||
You may use or modify the software only for your own internal business purposes or for non-commercial or
|
||||
personal use. You may distribute the software or provide it to others only if you do so free of charge for
|
||||
non-commercial purposes. You may not alter, remove, or obscure any licensing, copyright, or other notices of
|
||||
the licensor in the software. Any use of the licensor’s trademarks is subject to applicable law.
|
||||
|
||||
### Patents
|
||||
|
||||
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to
|
||||
license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case
|
||||
subject to the limitations and conditions in this license. This license does not cover any patent claims that
|
||||
you cause to be infringed by modifications or additions to the software. If you or your company make any
|
||||
written claim that the software infringes or contributes to infringement of any patent, your patent license
|
||||
for the software granted under these terms ends immediately. If your company makes such a claim, your patent
|
||||
license ends immediately for work on behalf of your company.
|
||||
|
||||
### Notices
|
||||
|
||||
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these
|
||||
terms. If you modify the software, you must include in any modified copies of the software a prominent notice
|
||||
stating that you have modified the software.
|
||||
|
||||
### No Other Rights
|
||||
|
||||
These terms do not imply any licenses other than those expressly granted in these terms.
|
||||
|
||||
### Termination
|
||||
|
||||
If you use the software in violation of these terms, such use is not licensed, and your license will
|
||||
automatically terminate. If the licensor provides you with a notice of your violation, and you cease all
|
||||
violation of this license no later than 30 days after you receive that notice, your license will be reinstated
|
||||
retroactively. However, if you violate these terms after such reinstatement, any additional violation of these
|
||||
terms will cause your license to terminate automatically and permanently.
|
||||
|
||||
### No Liability
|
||||
|
||||
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will
|
||||
not be liable to you for any damages arising out of these terms or the use or nature of the software, under
|
||||
any kind of legal claim.
|
||||
|
||||
### Definitions
|
||||
|
||||
The “licensor” is the entity offering these terms.
|
||||
|
||||
The “software” is the software the licensor makes available under these terms, including any portion of it.
|
||||
|
||||
“You” refers to the individual or entity agreeing to these terms.
|
||||
|
||||
“Your company” is any legal entity, sole proprietorship, or other kind of organization that you work for, plus
|
||||
all organizations that have control over, are under the control of, or are under common control with that
|
||||
organization. Control means ownership of substantially all the assets of an entity, or the power to direct its
|
||||
management and policies by vote, contract, or otherwise. Control can be direct or indirect.
|
||||
|
||||
“Your license” is the license granted to you for the software under these terms.
|
||||
|
||||
“Use” means anything you do with the software requiring your license.
|
||||
|
||||
“Trademark” means trademarks, service marks, and similar rights.
|
||||
13
packages/@n8n/nodes-langchain/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||

|
||||
|
||||
# n8n-nodes-langchain
|
||||
|
||||
This repo contains nodes to use n8n in combination with [LangChain](https://langchain.com/).
|
||||
|
||||
These nodes are still in Beta state and are only compatible with the Docker image `docker.n8n.io/n8nio/n8n:ai-beta`.
|
||||
|
||||
## License
|
||||
|
||||
n8n is [fair-code](http://faircode.io) distributed under the [**Sustainable Use License**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md).
|
||||
|
||||
Additional information about the license can be found in the [docs](https://docs.n8n.io/reference/license/).
|
||||
@@ -0,0 +1,50 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class AnthropicApi implements ICredentialType {
|
||||
name = 'anthropicApi';
|
||||
|
||||
displayName = 'Anthropic';
|
||||
|
||||
documentationUrl = 'anthropic';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
'x-api-key': '={{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: 'https://api.anthropic.com',
|
||||
url: '/v1/complete',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
body: {
|
||||
model: 'claude-2',
|
||||
prompt: '\n\nHuman: Hello, world!\n\nAssistant:',
|
||||
max_tokens_to_sample: 256,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class CohereApi implements ICredentialType {
|
||||
name = 'cohereApi';
|
||||
|
||||
displayName = 'CohereApi';
|
||||
|
||||
documentationUrl = 'cohere';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
Authorization: '=Bearer {{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: 'https://api.cohere.ai',
|
||||
url: '/v1/detect-language',
|
||||
method: 'POST',
|
||||
body: {
|
||||
texts: ['hello'],
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class GooglePalmApi implements ICredentialType {
|
||||
name = 'googlePalmApi';
|
||||
|
||||
displayName = 'GooglePaLMApi';
|
||||
|
||||
documentationUrl = 'googlePalm';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Host',
|
||||
name: 'host',
|
||||
required: true,
|
||||
type: 'string',
|
||||
default: 'https://generativelanguage.googleapis.com',
|
||||
},
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
qs: {
|
||||
key: '={{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '={{$credentials.host}}/v1beta3/models',
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class HuggingFaceApi implements ICredentialType {
|
||||
name = 'huggingFaceApi';
|
||||
|
||||
displayName = 'HuggingFaceApi';
|
||||
|
||||
documentationUrl = 'huggingface';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
Authorization: '=Bearer {{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: 'https://api-inference.huggingface.co',
|
||||
url: '/models/gpt2',
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class MotorheadApi implements ICredentialType {
|
||||
name = 'motorheadApi';
|
||||
|
||||
displayName = 'MotorheadApi';
|
||||
|
||||
documentationUrl = 'motorhead';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Host',
|
||||
name: 'host',
|
||||
required: true,
|
||||
type: 'string',
|
||||
default: 'https://api.getmetal.io/v1',
|
||||
},
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
'x-metal-client-id': '={{$credentials.clientId}}',
|
||||
'x-metal-api-key': '={{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '={{$credentials.host}}/keys/current',
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
import type { ICredentialTestRequest, ICredentialType, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
export class OllamaApi implements ICredentialType {
|
||||
name = 'ollamaApi';
|
||||
|
||||
displayName = 'Ollama';
|
||||
|
||||
documentationUrl = 'ollama';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Base URL',
|
||||
name: 'baseUrl',
|
||||
required: true,
|
||||
type: 'string',
|
||||
default: 'http://localhost:11434',
|
||||
},
|
||||
];
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '={{ $credentials.baseUrl }}',
|
||||
url: '/api/generate',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'anthropic-version': '2023-06-01',
|
||||
},
|
||||
body: {
|
||||
model: 'llama2',
|
||||
prompt: 'Hello',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class PineconeApi implements ICredentialType {
|
||||
name = 'pineconeApi';
|
||||
|
||||
displayName = 'PineconeApi';
|
||||
|
||||
documentationUrl = 'pinecone';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Environment',
|
||||
name: 'environment',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'us-central1-gcp',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
'Api-Key': '={{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '=https://controller.{{$credentials.environment}}.pinecone.io/databases',
|
||||
headers: {
|
||||
accept: 'application/json; charset=utf-8',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class SerpApi implements ICredentialType {
|
||||
name = 'serpApi';
|
||||
|
||||
displayName = 'SerpAPI';
|
||||
|
||||
documentationUrl = 'serpapi';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
qs: {
|
||||
api_key: '={{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: 'https://serpapi.com',
|
||||
url: '/account.json ',
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class WolframAlphaApi implements ICredentialType {
|
||||
name = 'wolframAlphaApi';
|
||||
|
||||
displayName = 'WolframAlphaApi';
|
||||
|
||||
documentationUrl = 'wolframalpha';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'App ID',
|
||||
name: 'appId',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
qs: {
|
||||
api_key: '={{$credentials.appId}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: 'https://api.wolframalpha.com/v1',
|
||||
url: '=/simple',
|
||||
qs: {
|
||||
i: 'How much is 1 1',
|
||||
appid: '={{$credentials.appId}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class XataApi implements ICredentialType {
|
||||
name = 'xataApi';
|
||||
|
||||
displayName = 'Xata Api';
|
||||
|
||||
documentationUrl = 'xata';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Database Endpoint',
|
||||
name: 'databaseEndpoint',
|
||||
required: true,
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'https://{workspace}.{region}.xata.sh/db/{database}',
|
||||
},
|
||||
{
|
||||
displayName: 'Branch',
|
||||
name: 'branch',
|
||||
required: true,
|
||||
type: 'string',
|
||||
default: 'main',
|
||||
},
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
Authorization: '=Bearer {{$credentials.apiKey}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '={{$credentials.databaseEndpoint}}:{{$credentials.branch}}',
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialTestRequest,
|
||||
ICredentialType,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
export class ZepApi implements ICredentialType {
|
||||
name = 'zepApi';
|
||||
|
||||
displayName = 'Zep Api';
|
||||
|
||||
documentationUrl = 'zep';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'API URL',
|
||||
name: 'apiUrl',
|
||||
required: true,
|
||||
type: 'string',
|
||||
default: 'http://localhost:8000',
|
||||
},
|
||||
{
|
||||
displayName: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'string',
|
||||
typeOptions: { password: true },
|
||||
required: false,
|
||||
default: '',
|
||||
},
|
||||
];
|
||||
|
||||
authenticate: IAuthenticateGeneric = {
|
||||
type: 'generic',
|
||||
properties: {
|
||||
headers: {
|
||||
Authorization: '={{$credentials.apiKey ? "Bearer " + $credentials.apiKey : undefined }}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
test: ICredentialTestRequest = {
|
||||
request: {
|
||||
baseURL: '={{$credentials.apiUrl}}',
|
||||
url: '/api/v1/collection',
|
||||
},
|
||||
};
|
||||
}
|
||||
16
packages/@n8n/nodes-langchain/gulpfile.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const path = require('path');
|
||||
const { task, src, dest } = require('gulp');
|
||||
|
||||
task('build:icons', copyIcons);
|
||||
|
||||
function copyIcons() {
|
||||
const nodeSource = path.resolve('nodes', '**', '*.{png,svg}');
|
||||
const nodeDestination = path.resolve('dist', 'nodes');
|
||||
|
||||
src(nodeSource).pipe(dest(nodeDestination));
|
||||
|
||||
const credSource = path.resolve('credentials', '**', '*.{png,svg}');
|
||||
const credDestination = path.resolve('dist', 'credentials');
|
||||
|
||||
return src(credSource).pipe(dest(credDestination));
|
||||
}
|
||||
0
packages/@n8n/nodes-langchain/index.js
Normal file
267
packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts
Normal file
@@ -0,0 +1,267 @@
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type {
|
||||
ConnectionTypes,
|
||||
INodeInputConfiguration,
|
||||
INodeInputFilter,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
import { conversationalAgentProperties } from './agents/ConversationalAgent/description';
|
||||
import { conversationalAgentExecute } from './agents/ConversationalAgent/execute';
|
||||
|
||||
import { openAiFunctionsAgentProperties } from './agents/OpenAiFunctionsAgent/description';
|
||||
import { openAiFunctionsAgentExecute } from './agents/OpenAiFunctionsAgent/execute';
|
||||
import { planAndExecuteAgentProperties } from './agents/PlanAndExecuteAgent/description';
|
||||
import { planAndExecuteAgentExecute } from './agents/PlanAndExecuteAgent/execute';
|
||||
import { reActAgentAgentProperties } from './agents/ReActAgent/description';
|
||||
import { reActAgentAgentExecute } from './agents/ReActAgent/execute';
|
||||
import { sqlAgentAgentProperties } from './agents/SqlAgent/description';
|
||||
import { sqlAgentAgentExecute } from './agents/SqlAgent/execute';
|
||||
// Function used in the inputs expression to figure out which inputs to
|
||||
// display based on the agent type
|
||||
function getInputs(
|
||||
agent: 'conversationalAgent' | 'openAiFunctionsAgent' | 'reActAgent' | 'sqlAgent',
|
||||
): Array<ConnectionTypes | INodeInputConfiguration> {
|
||||
interface SpecialInput {
|
||||
type: ConnectionTypes;
|
||||
filter?: INodeInputFilter;
|
||||
}
|
||||
|
||||
const getInputData = (
|
||||
inputs: SpecialInput[],
|
||||
): Array<ConnectionTypes | INodeInputConfiguration> => {
|
||||
const displayNames: { [key: string]: string } = {
|
||||
[NodeConnectionType.AiLanguageModel]: 'Model',
|
||||
[NodeConnectionType.AiMemory]: 'Memory',
|
||||
[NodeConnectionType.AiTool]: 'Tool',
|
||||
[NodeConnectionType.AiOutputParser]: 'Output Parser',
|
||||
};
|
||||
|
||||
return inputs.map(({ type, filter }) => {
|
||||
const input: INodeInputConfiguration = {
|
||||
type,
|
||||
displayName: type in displayNames ? displayNames[type] : undefined,
|
||||
required: type === NodeConnectionType.AiLanguageModel,
|
||||
maxConnections: [NodeConnectionType.AiLanguageModel, NodeConnectionType.AiMemory].includes(
|
||||
type as NodeConnectionType,
|
||||
)
|
||||
? 1
|
||||
: undefined,
|
||||
};
|
||||
|
||||
if (filter) {
|
||||
input.filter = filter;
|
||||
}
|
||||
|
||||
return input;
|
||||
});
|
||||
};
|
||||
|
||||
let specialInputs: SpecialInput[] = [];
|
||||
|
||||
if (agent === 'conversationalAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
filter: {
|
||||
nodes: [
|
||||
'@n8n/n8n-nodes-langchain.lmChatAnthropic',
|
||||
'@n8n/n8n-nodes-langchain.lmChatOllama',
|
||||
'@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
'@n8n/n8n-nodes-langchain.lmChatGooglePalm',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiMemory,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiTool,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
];
|
||||
} else if (agent === 'openAiFunctionsAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
filter: {
|
||||
nodes: ['@n8n/n8n-nodes-langchain.lmChatOpenAi'],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiMemory,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiTool,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
];
|
||||
} else if (agent === 'reActAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiTool,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
];
|
||||
} else if (agent === 'sqlAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
},
|
||||
];
|
||||
} else if (agent === 'planAndExecuteAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiTool,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return [NodeConnectionType.Main, ...getInputData(specialInputs)];
|
||||
}
|
||||
|
||||
export class Agent implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'AI Agent',
|
||||
name: 'agent',
|
||||
icon: 'fa:robot',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1],
|
||||
description: 'Generates an action plan and executes it. Can use external tools.',
|
||||
subtitle:
|
||||
"={{ { conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reactAgent: 'ReAct Agent', sqlAgent: 'SQL Agent' }[$parameter.agent] }}",
|
||||
defaults: {
|
||||
name: 'AI Agent',
|
||||
color: '#404040',
|
||||
},
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Agents'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.agent/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
inputs: `={{ ((agent) => { ${getInputs.toString()}; return getInputs(agent) })($parameter.agent) }}`,
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [
|
||||
{
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-credentials-name-unsuffixed
|
||||
name: 'mySql',
|
||||
required: true,
|
||||
testedBy: 'mysqlConnectionTest',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['sqlAgent'],
|
||||
'/dataSource': ['mysql'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'postgres',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['sqlAgent'],
|
||||
'/dataSource': ['postgres'],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
{
|
||||
...getTemplateNoticeField(1954),
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['conversationalAgent'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Agent',
|
||||
name: 'agent',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Conversational Agent',
|
||||
value: 'conversationalAgent',
|
||||
description:
|
||||
'Selects tools to accomplish its task and uses memory to recall previous conversations',
|
||||
},
|
||||
{
|
||||
name: 'OpenAI Functions Agent',
|
||||
value: 'openAiFunctionsAgent',
|
||||
description:
|
||||
"Utilizes OpenAI's Function Calling feature to select the appropriate tool and arguments for execution",
|
||||
},
|
||||
{
|
||||
name: 'Plan and Execute Agent',
|
||||
value: 'planAndExecuteAgent',
|
||||
description:
|
||||
'Plan and execute agents accomplish an objective by first planning what to do, then executing the sub tasks',
|
||||
},
|
||||
{
|
||||
name: 'ReAct Agent',
|
||||
value: 'reActAgent',
|
||||
description: 'Strategically select tools to accomplish a given task',
|
||||
},
|
||||
{
|
||||
name: 'SQL Agent',
|
||||
value: 'sqlAgent',
|
||||
description: 'Answers questions about data in an SQL database',
|
||||
},
|
||||
],
|
||||
default: 'conversationalAgent',
|
||||
},
|
||||
|
||||
...conversationalAgentProperties,
|
||||
...openAiFunctionsAgentProperties,
|
||||
...reActAgentAgentProperties,
|
||||
...sqlAgentAgentProperties,
|
||||
...planAndExecuteAgentProperties,
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const agentType = this.getNodeParameter('agent', 0, '') as string;
|
||||
|
||||
if (agentType === 'conversationalAgent') {
|
||||
return conversationalAgentExecute.call(this);
|
||||
} else if (agentType === 'openAiFunctionsAgent') {
|
||||
return openAiFunctionsAgentExecute.call(this);
|
||||
} else if (agentType === 'reActAgent') {
|
||||
return reActAgentAgentExecute.call(this);
|
||||
} else if (agentType === 'sqlAgent') {
|
||||
return sqlAgentAgentExecute.call(this);
|
||||
} else if (agentType === 'planAndExecuteAgent') {
|
||||
return planAndExecuteAgentExecute.call(this);
|
||||
}
|
||||
|
||||
throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { SYSTEM_MESSAGE, HUMAN_MESSAGE } from './prompt';
|
||||
|
||||
export const conversationalAgentProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['conversationalAgent'],
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['conversationalAgent'],
|
||||
'@version': [1.1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.chat_input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['conversationalAgent'],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Human Message',
|
||||
name: 'humanMessage',
|
||||
type: 'string',
|
||||
default: HUMAN_MESSAGE,
|
||||
description: 'The message that will provide the agent with a list of tools to use',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
default: SYSTEM_MESSAGE,
|
||||
description: 'The message that will be sent to the agent before the conversation starts',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'The maximum number of iterations the agent will run before stopping',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,104 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
|
||||
import { BaseChatModel } from 'langchain/chat_models/base';
|
||||
import type { Tool } from 'langchain/tools';
|
||||
import type { BaseChatMemory } from 'langchain/memory';
|
||||
import type { BaseOutputParser } from 'langchain/schema/output_parser';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { CombiningOutputParser } from 'langchain/output_parsers';
|
||||
|
||||
export async function conversationalAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Conversational Agent');
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseChatModel;
|
||||
|
||||
if (!(model instanceof BaseChatModel)) {
|
||||
throw new NodeOperationError(this.getNode(), 'Conversational Agent requires Chat Model');
|
||||
}
|
||||
|
||||
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
|
||||
| BaseChatMemory
|
||||
| undefined;
|
||||
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
|
||||
const outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
|
||||
// TODO: Make it possible in the future to use values for other items than just 0
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
systemMessage?: string;
|
||||
humanMessage?: string;
|
||||
maxIterations?: number;
|
||||
};
|
||||
|
||||
const agentExecutor = await initializeAgentExecutorWithOptions(tools, model, {
|
||||
// Passing "chat-conversational-react-description" as the agent type
|
||||
// automatically creates and uses BufferMemory with the executor.
|
||||
// If you would like to override this, you can pass in a custom
|
||||
// memory option, but the memoryKey set on it must be "chat_history".
|
||||
agentType: 'chat-conversational-react-description',
|
||||
memory,
|
||||
maxIterations: options.maxIterations ?? 10,
|
||||
agentArgs: {
|
||||
systemMessage: options.systemMessage,
|
||||
humanMessage: options.humanMessage,
|
||||
},
|
||||
});
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
let outputParser: BaseOutputParser | undefined;
|
||||
let prompt: PromptTemplate | undefined;
|
||||
if (outputParsers.length) {
|
||||
if (outputParsers.length === 1) {
|
||||
outputParser = outputParsers[0];
|
||||
} else {
|
||||
outputParser = new CombiningOutputParser(...outputParsers);
|
||||
}
|
||||
|
||||
if (outputParser) {
|
||||
const formatInstructions = outputParser.getFormatInstructions();
|
||||
|
||||
prompt = new PromptTemplate({
|
||||
template: '{input}\n{formatInstructions}',
|
||||
inputVariables: ['input'],
|
||||
partialVariables: { formatInstructions },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const items = this.getInputData();
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let input = this.getNodeParameter('text', itemIndex) as string;
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘text parameter is empty.');
|
||||
}
|
||||
|
||||
if (prompt) {
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
}
|
||||
|
||||
returnData.push({ json: response });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
export const SYSTEM_MESSAGE = `Assistant is a large language model trained by OpenAI.
|
||||
|
||||
Assistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.
|
||||
|
||||
Assistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.
|
||||
|
||||
Overall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.`;
|
||||
|
||||
export const HUMAN_MESSAGE = `TOOLS
|
||||
------
|
||||
Assistant can ask the user to use tools to look up information that may be helpful in answering the users original question. The tools the human can use are:
|
||||
|
||||
{tools}
|
||||
|
||||
{format_instructions}
|
||||
|
||||
USER'S INPUT
|
||||
--------------------
|
||||
Here is the user's input (remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else):
|
||||
|
||||
{{input}}`;
|
||||
@@ -0,0 +1,62 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { SYSTEM_MESSAGE } from './prompt';
|
||||
|
||||
export const openAiFunctionsAgentProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['openAiFunctionsAgent'],
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['openAiFunctionsAgent'],
|
||||
'@version': [1.1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.chat_input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['openAiFunctionsAgent'],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
default: SYSTEM_MESSAGE,
|
||||
description: 'The message that will be sent to the agent before the conversation starts',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'The maximum number of iterations the agent will run before stopping',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,103 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { AgentExecutorInput } from 'langchain/agents';
|
||||
import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
|
||||
import type { Tool } from 'langchain/tools';
|
||||
import type { BaseOutputParser } from 'langchain/schema/output_parser';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { CombiningOutputParser } from 'langchain/output_parsers';
|
||||
import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
|
||||
import { ChatOpenAI } from 'langchain/chat_models/openai';
|
||||
|
||||
export async function openAiFunctionsAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing OpenAi Functions Agent');
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as ChatOpenAI;
|
||||
|
||||
if (!(model instanceof ChatOpenAI)) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'OpenAI Functions Agent requires OpenAI Chat Model',
|
||||
);
|
||||
}
|
||||
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
|
||||
| BaseChatMemory
|
||||
| undefined;
|
||||
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
|
||||
const outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
systemMessage?: string;
|
||||
maxIterations?: number;
|
||||
};
|
||||
|
||||
const agentConfig: AgentExecutorInput = {
|
||||
tags: ['openai-functions'],
|
||||
agent: OpenAIAgent.fromLLMAndTools(model, tools, {
|
||||
prefix: options.systemMessage,
|
||||
}),
|
||||
tools,
|
||||
maxIterations: options.maxIterations ?? 10,
|
||||
memory:
|
||||
memory ??
|
||||
new BufferMemory({
|
||||
returnMessages: true,
|
||||
memoryKey: 'chat_history',
|
||||
inputKey: 'input',
|
||||
outputKey: 'output',
|
||||
}),
|
||||
};
|
||||
|
||||
const agentExecutor = AgentExecutor.fromAgentAndTools(agentConfig);
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
let outputParser: BaseOutputParser | undefined;
|
||||
let prompt: PromptTemplate | undefined;
|
||||
if (outputParsers.length) {
|
||||
outputParser =
|
||||
outputParsers.length === 1 ? outputParsers[0] : new CombiningOutputParser(...outputParsers);
|
||||
|
||||
const formatInstructions = outputParser.getFormatInstructions();
|
||||
|
||||
prompt = new PromptTemplate({
|
||||
template: '{input}\n{formatInstructions}',
|
||||
inputVariables: ['input'],
|
||||
partialVariables: { formatInstructions },
|
||||
});
|
||||
}
|
||||
|
||||
const items = this.getInputData();
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let input = this.getNodeParameter('text', itemIndex) as string;
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.');
|
||||
}
|
||||
|
||||
if (prompt) {
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
}
|
||||
|
||||
returnData.push({ json: response });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export const SYSTEM_MESSAGE = 'You are a helpful AI assistant.';
|
||||
@@ -0,0 +1,55 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { DEFAULT_STEP_EXECUTOR_HUMAN_CHAT_MESSAGE_TEMPLATE } from './prompt';
|
||||
|
||||
export const planAndExecuteAgentProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['planAndExecuteAgent'],
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['planAndExecuteAgent'],
|
||||
'@version': [1.1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.chat_input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['planAndExecuteAgent'],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Human Message Template',
|
||||
name: 'humanMessageTemplate',
|
||||
type: 'string',
|
||||
default: DEFAULT_STEP_EXECUTOR_HUMAN_CHAT_MESSAGE_TEMPLATE,
|
||||
description: 'The message that will be sent to the agent during each step execution',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,80 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { Tool } from 'langchain/tools';
|
||||
import type { BaseOutputParser } from 'langchain/schema/output_parser';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { CombiningOutputParser } from 'langchain/output_parsers';
|
||||
import type { BaseChatModel } from 'langchain/chat_models/base';
|
||||
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
|
||||
|
||||
export async function planAndExecuteAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing PlanAndExecute Agent');
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseChatModel;
|
||||
|
||||
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
|
||||
|
||||
const outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
humanMessageTemplate?: string;
|
||||
};
|
||||
|
||||
const agentExecutor = await PlanAndExecuteAgentExecutor.fromLLMAndTools({
|
||||
llm: model,
|
||||
tools,
|
||||
humanMessageTemplate: options.humanMessageTemplate,
|
||||
});
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
let outputParser: BaseOutputParser | undefined;
|
||||
let prompt: PromptTemplate | undefined;
|
||||
if (outputParsers.length) {
|
||||
outputParser =
|
||||
outputParsers.length === 1 ? outputParsers[0] : new CombiningOutputParser(...outputParsers);
|
||||
|
||||
const formatInstructions = outputParser.getFormatInstructions();
|
||||
|
||||
prompt = new PromptTemplate({
|
||||
template: '{input}\n{formatInstructions}',
|
||||
inputVariables: ['input'],
|
||||
partialVariables: { formatInstructions },
|
||||
});
|
||||
}
|
||||
|
||||
const items = this.getInputData();
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let input = this.getNodeParameter('text', itemIndex) as string;
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.');
|
||||
}
|
||||
|
||||
if (prompt) {
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
}
|
||||
|
||||
returnData.push({ json: response });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
export const DEFAULT_STEP_EXECUTOR_HUMAN_CHAT_MESSAGE_TEMPLATE = `Previous steps: {previous_steps}
|
||||
|
||||
Current objective: {current_step}
|
||||
|
||||
{agent_scratchpad}
|
||||
|
||||
You may extract and combine relevant data from your previous steps when responding to me.`;
|
||||
@@ -0,0 +1,87 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { HUMAN_MESSAGE_TEMPLATE, PREFIX, SUFFIX, SUFFIX_CHAT } from './prompt';
|
||||
|
||||
export const reActAgentAgentProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['reActAgent'],
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['reActAgent'],
|
||||
'@version': [1.1],
|
||||
},
|
||||
},
|
||||
default: '={{ $json.chat_input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['reActAgent'],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Human Message Template',
|
||||
name: 'humanMessageTemplate',
|
||||
type: 'string',
|
||||
default: HUMAN_MESSAGE_TEMPLATE,
|
||||
description: 'String to use directly as the human message template',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Prefix Message',
|
||||
name: 'prefix',
|
||||
type: 'string',
|
||||
default: PREFIX,
|
||||
description: 'String to put before the list of tools',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Suffix Message for Chat Model',
|
||||
name: 'suffixChat',
|
||||
type: 'string',
|
||||
default: SUFFIX_CHAT,
|
||||
description:
|
||||
'String to put after the list of tools that will be used if chat model is used',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Suffix Message for Regular Model',
|
||||
name: 'suffix',
|
||||
type: 'string',
|
||||
default: SUFFIX,
|
||||
description:
|
||||
'String to put after the list of tools that will be used if regular model is used',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,93 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
|
||||
import type { BaseLanguageModel } from 'langchain/base_language';
|
||||
import type { Tool } from 'langchain/tools';
|
||||
import type { BaseOutputParser } from 'langchain/schema/output_parser';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { CombiningOutputParser } from 'langchain/output_parsers';
|
||||
import { BaseChatModel } from 'langchain/chat_models/base';
|
||||
|
||||
export async function reActAgentAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing ReAct Agent');
|
||||
|
||||
const model = (await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0)) as
|
||||
| BaseLanguageModel
|
||||
| BaseChatModel;
|
||||
|
||||
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
|
||||
|
||||
const outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
prefix?: string;
|
||||
suffix?: string;
|
||||
suffixChat?: string;
|
||||
humanMessageTemplate?: string;
|
||||
};
|
||||
let agent: ChatAgent | ZeroShotAgent;
|
||||
|
||||
if (model instanceof BaseChatModel) {
|
||||
agent = ChatAgent.fromLLMAndTools(model, tools, {
|
||||
prefix: options.prefix,
|
||||
suffix: options.suffixChat,
|
||||
humanMessageTemplate: options.humanMessageTemplate,
|
||||
});
|
||||
} else {
|
||||
agent = ZeroShotAgent.fromLLMAndTools(model, tools, {
|
||||
prefix: options.prefix,
|
||||
suffix: options.suffix,
|
||||
});
|
||||
}
|
||||
|
||||
const agentExecutor = AgentExecutor.fromAgentAndTools({ agent, tools });
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
let outputParser: BaseOutputParser | undefined;
|
||||
let prompt: PromptTemplate | undefined;
|
||||
if (outputParsers.length) {
|
||||
outputParser =
|
||||
outputParsers.length === 1 ? outputParsers[0] : new CombiningOutputParser(...outputParsers);
|
||||
|
||||
const formatInstructions = outputParser.getFormatInstructions();
|
||||
|
||||
prompt = new PromptTemplate({
|
||||
template: '{input}\n{formatInstructions}',
|
||||
inputVariables: ['input'],
|
||||
partialVariables: { formatInstructions },
|
||||
});
|
||||
}
|
||||
|
||||
const items = this.getInputData();
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let input = this.getNodeParameter('text', itemIndex) as string;
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.');
|
||||
}
|
||||
|
||||
if (prompt) {
|
||||
input = (await prompt.invoke({ input })).value;
|
||||
}
|
||||
|
||||
let response = await agentExecutor.call({ input, outputParsers });
|
||||
if (outputParser) {
|
||||
response = { output: await outputParser.parse(response.output as string) };
|
||||
}
|
||||
|
||||
returnData.push({ json: response });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
export const PREFIX =
|
||||
'Answer the following questions as best you can. You have access to the following tools:';
|
||||
|
||||
export const SUFFIX_CHAT =
|
||||
'Begin! Reminder to always use the exact characters `Final Answer` when responding.';
|
||||
|
||||
export const SUFFIX = `Begin!
|
||||
|
||||
Question: {input}
|
||||
Thought:{agent_scratchpad}`;
|
||||
|
||||
export const HUMAN_MESSAGE_TEMPLATE = '{input}\n\n{agent_scratchpad}';
|
||||
@@ -0,0 +1,114 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { SQL_PREFIX, SQL_SUFFIX } from './other/prompts';
|
||||
|
||||
export const sqlAgentAgentProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Data Source',
|
||||
name: 'dataSource',
|
||||
type: 'options',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['sqlAgent'],
|
||||
},
|
||||
},
|
||||
default: 'sqlite',
|
||||
description: 'SQL database to connect to',
|
||||
options: [
|
||||
{
|
||||
name: 'MySQL',
|
||||
value: 'mysql',
|
||||
description: 'Connect to a MySQL database',
|
||||
},
|
||||
{
|
||||
name: 'Postgres',
|
||||
value: 'postgres',
|
||||
description: 'Connect to a Postgres database',
|
||||
},
|
||||
{
|
||||
name: 'SQLite',
|
||||
value: 'sqlite',
|
||||
description: 'Use SQLite by connecting a database file as binary input',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'input',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['sqlAgent'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
typeOptions: {
|
||||
rows: 5,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['sqlAgent'],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Ignored Tables',
|
||||
name: 'ignoredTables',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Comma-separated list of tables to ignore from the database. If empty, no tables are ignored.',
|
||||
},
|
||||
{
|
||||
displayName: 'Include Sample Rows',
|
||||
name: 'includedSampleRows',
|
||||
type: 'number',
|
||||
description:
|
||||
'Number of sample rows to include in the prompt to the agent. It helps the agent to understand the schema of the database but it also increases the amount of tokens used.',
|
||||
default: 3,
|
||||
},
|
||||
{
|
||||
displayName: 'Included Tables',
|
||||
name: 'includedTables',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Comma-separated list of tables to include in the database. If empty, all tables are included.',
|
||||
},
|
||||
{
|
||||
displayName: 'Prefix Prompt',
|
||||
name: 'prefixPrompt',
|
||||
type: 'string',
|
||||
default: SQL_PREFIX,
|
||||
description: 'Prefix prompt to use for the agent',
|
||||
typeOptions: {
|
||||
rows: 10,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Suffix Prompt',
|
||||
name: 'suffixPrompt',
|
||||
type: 'string',
|
||||
default: SQL_SUFFIX,
|
||||
description: 'Suffix prompt to use for the agent',
|
||||
typeOptions: {
|
||||
rows: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Limit',
|
||||
name: 'topK',
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'The maximum number of results to return',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,105 @@
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { SqlDatabase } from 'langchain/sql_db';
|
||||
import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql';
|
||||
import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql';
|
||||
import type { BaseLanguageModel } from 'langchain/dist/base_language';
|
||||
import type { DataSource } from 'typeorm';
|
||||
|
||||
import { getSqliteDataSource } from './other/handlers/sqlite';
|
||||
import { getPostgresDataSource } from './other/handlers/postgres';
|
||||
import { SQL_PREFIX, SQL_SUFFIX } from './other/prompts';
|
||||
import { getMysqlDataSource } from './other/handlers/mysql';
|
||||
|
||||
const parseTablesString = (tablesString: string) =>
|
||||
tablesString
|
||||
.split(',')
|
||||
.map((table) => table.trim())
|
||||
.filter((table) => table.length > 0);
|
||||
|
||||
export async function sqlAgentAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing SQL Agent');
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
const items = this.getInputData();
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
const item = items[i];
|
||||
const input = this.getNodeParameter('input', i) as string;
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘prompt’ parameter is empty.');
|
||||
}
|
||||
|
||||
const options = this.getNodeParameter('options', i, {});
|
||||
const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as
|
||||
| 'mysql'
|
||||
| 'postgres'
|
||||
| 'sqlite';
|
||||
|
||||
const includedSampleRows = options.includedSampleRows as number;
|
||||
const includedTablesArray = parseTablesString((options.includedTables as string) ?? '');
|
||||
const ignoredTablesArray = parseTablesString((options.ignoredTables as string) ?? '');
|
||||
|
||||
let dataSource: DataSource | null = null;
|
||||
if (selectedDataSource === 'sqlite') {
|
||||
if (!item.binary) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'No binary data found, please connect a binary to the input if you want to use SQLite as data source',
|
||||
);
|
||||
}
|
||||
|
||||
dataSource = getSqliteDataSource.call(this, item.binary);
|
||||
}
|
||||
|
||||
if (selectedDataSource === 'postgres') {
|
||||
dataSource = await getPostgresDataSource.call(this);
|
||||
}
|
||||
|
||||
if (selectedDataSource === 'mysql') {
|
||||
dataSource = await getMysqlDataSource.call(this);
|
||||
}
|
||||
|
||||
if (!dataSource) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'No data source found, please configure data source',
|
||||
);
|
||||
}
|
||||
|
||||
const agentOptions: SqlCreatePromptArgs = {
|
||||
topK: (options.topK as number) ?? 10,
|
||||
prefix: (options.prefixPrompt as string) ?? SQL_PREFIX,
|
||||
suffix: (options.suffixPrompt as string) ?? SQL_SUFFIX,
|
||||
};
|
||||
|
||||
const dbInstance = await SqlDatabase.fromDataSourceParams({
|
||||
appDataSource: dataSource,
|
||||
includesTables: includedTablesArray.length > 0 ? includedTablesArray : undefined,
|
||||
ignoreTables: ignoredTablesArray.length > 0 ? ignoredTablesArray : undefined,
|
||||
sampleRowsInTableInfo: includedSampleRows ?? 3,
|
||||
});
|
||||
|
||||
const toolkit = new SqlToolkit(dbInstance, model);
|
||||
const agentExecutor = createSqlAgent(model, toolkit, agentOptions);
|
||||
|
||||
const response = await agentExecutor.call({ input, signal: this.getExecutionCancelSignal() });
|
||||
|
||||
returnData.push({ json: response });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { type IExecuteFunctions } from 'n8n-workflow';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
export async function getMysqlDataSource(this: IExecuteFunctions): Promise<DataSource> {
|
||||
const credentials = await this.getCredentials('mySql');
|
||||
|
||||
const dataSource = new DataSource({
|
||||
type: 'mysql',
|
||||
host: credentials.host as string,
|
||||
port: credentials.port as number,
|
||||
username: credentials.user as string,
|
||||
password: credentials.password as string,
|
||||
database: credentials.database as string,
|
||||
ssl: {
|
||||
rejectUnauthorized: credentials.ssl as boolean,
|
||||
},
|
||||
});
|
||||
|
||||
return dataSource;
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
import { type IExecuteFunctions } from 'n8n-workflow';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
export async function getPostgresDataSource(this: IExecuteFunctions): Promise<DataSource> {
|
||||
const credentials = await this.getCredentials('postgres');
|
||||
|
||||
const dataSource = new DataSource({
|
||||
type: 'postgres',
|
||||
host: credentials.host as string,
|
||||
port: credentials.port as number,
|
||||
username: credentials.user as string,
|
||||
password: credentials.password as string,
|
||||
database: credentials.database as string,
|
||||
});
|
||||
|
||||
if (credentials.allowUnauthorizedCerts === true) {
|
||||
dataSource.setOptions({
|
||||
ssl: {
|
||||
rejectUnauthorized: true,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
dataSource.setOptions({
|
||||
ssl: !['disable', undefined].includes(credentials.ssl as string | undefined),
|
||||
});
|
||||
}
|
||||
|
||||
return dataSource;
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
import * as fs from 'fs';
|
||||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import { BINARY_ENCODING, NodeOperationError } from 'n8n-workflow';
|
||||
import * as temp from 'temp';
|
||||
import * as sqlite3 from 'sqlite3';
|
||||
import { DataSource } from 'typeorm';
|
||||
|
||||
export function getSqliteDataSource(
|
||||
this: IExecuteFunctions,
|
||||
binary: INodeExecutionData['binary'],
|
||||
): DataSource {
|
||||
const binaryData = binary?.data;
|
||||
|
||||
if (!binaryData) {
|
||||
throw new NodeOperationError(this.getNode(), 'No binary data received.');
|
||||
}
|
||||
|
||||
const bufferString = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
|
||||
// Track and cleanup temp files at exit
|
||||
temp.track();
|
||||
|
||||
const tempDbPath = temp.path({ suffix: '.sqlite' });
|
||||
fs.writeFileSync(tempDbPath, bufferString);
|
||||
|
||||
// Initialize a new SQLite database from the temp file
|
||||
const tempDb = new sqlite3.Database(tempDbPath, (error: Error | null) => {
|
||||
if (error) {
|
||||
throw new NodeOperationError(this.getNode(), 'Could not connect to database');
|
||||
}
|
||||
});
|
||||
tempDb.close();
|
||||
|
||||
return new DataSource({
|
||||
type: 'sqlite',
|
||||
database: tempDbPath,
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
export const SQL_PREFIX = `You are an agent designed to interact with an SQL database.
|
||||
Given an input question, create a syntactically correct {dialect} query to run, then look at the results of the query and return the answer.
|
||||
Unless the user specifies a specific number of examples they wish to obtain, always limit your query to at most {top_k} results using the LIMIT clause.
|
||||
You can order the results by a relevant column to return the most interesting examples in the database.
|
||||
Never query for all the columns from a specific table, only ask for a the few relevant columns given the question.
|
||||
You have access to tools for interacting with the database.
|
||||
Only use the below tools. Only use the information returned by the below tools to construct your final answer.
|
||||
You MUST double check your query before executing it. If you get an error while executing a query, rewrite the query and try again.
|
||||
|
||||
DO NOT make any DML statements (INSERT, UPDATE, DELETE, DROP etc.) to the database.
|
||||
|
||||
If the question does not seem related to the database, just return "I don't know" as the answer.`;
|
||||
|
||||
export const SQL_SUFFIX = `Begin!
|
||||
|
||||
Question: {input}
|
||||
Thought: I should look at the tables in the database to see what I can query.
|
||||
{agent_scratchpad}`;
|
||||
@@ -0,0 +1,352 @@
|
||||
import { AgentExecutor } from 'langchain/agents';
|
||||
import { OpenAI as OpenAIClient } from 'openai';
|
||||
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
|
||||
import { type Tool } from 'langchain/tools';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
|
||||
import { formatToOpenAIAssistantTool } from './utils';
|
||||
|
||||
export class OpenAiAssistant implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'OpenAI Assistant',
|
||||
name: 'openAiAssistant',
|
||||
icon: 'fa:robot',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Utilizes Assistant API from Open AI.',
|
||||
subtitle: 'Open AI Assistant',
|
||||
defaults: {
|
||||
name: 'OpenAI Assistant',
|
||||
color: '#404040',
|
||||
},
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Agents'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.openaiassistant/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
inputs: [
|
||||
{ type: NodeConnectionType.Main },
|
||||
{ type: NodeConnectionType.AiTool, displayName: 'Tools' },
|
||||
],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [
|
||||
{
|
||||
name: 'openAiApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL:
|
||||
'={{ $parameter.options?.baseURL?.split("/").slice(0,-1).join("/") || "https://api.openai.com" }}',
|
||||
},
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'mode',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
default: 'existing',
|
||||
options: [
|
||||
{
|
||||
name: 'Create New Assistant',
|
||||
value: 'new',
|
||||
},
|
||||
{
|
||||
name: 'Use Existing Assistant',
|
||||
value: 'existing',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['new'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Instructions',
|
||||
name: 'instructions',
|
||||
type: 'string',
|
||||
description: 'How the Assistant and model should behave or respond',
|
||||
default: '',
|
||||
typeOptions: {
|
||||
rows: 5,
|
||||
},
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['new'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will be used to power the assistant. <a href="https://beta.openai.com/docs/models/overview">Learn more</a>. The Retrieval tool requires gpt-3.5-turbo-1106 and gpt-4-1106-preview models.',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['new'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '={{ $parameter.options?.baseURL?.split("/").slice(-1).pop() || "v1" }}/models',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'data',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
pass: "={{ $responseItem.id.startsWith('gpt-') && !$responseItem.id.includes('instruct') }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.id}}',
|
||||
value: '={{$responseItem.id}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: 'gpt-3.5-turbo-1106',
|
||||
},
|
||||
{
|
||||
displayName: 'Assistant',
|
||||
name: 'assistantId',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['existing'],
|
||||
},
|
||||
},
|
||||
description:
|
||||
'The assistant to use. <a href="https://beta.openai.com/docs/assistants/overview">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
},
|
||||
url: '={{ $parameter.options?.baseURL?.split("/").slice(-1).pop() || "v1" }}/assistants',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'data',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.name}}',
|
||||
value: '={{$responseItem.id}}',
|
||||
// eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string
|
||||
description: '={{$responseItem.model}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'assistant',
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Text',
|
||||
name: 'text',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '={{ $json.chat_input }}',
|
||||
},
|
||||
{
|
||||
displayName: 'OpenAI Tools',
|
||||
name: 'nativeTools',
|
||||
type: 'multiOptions',
|
||||
default: [],
|
||||
options: [
|
||||
{
|
||||
name: 'Code Interpreter',
|
||||
value: 'code_interpreter',
|
||||
},
|
||||
{
|
||||
name: 'Retrieval',
|
||||
value: 'retrieval',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Base URL',
|
||||
name: 'baseURL',
|
||||
default: 'https://api.openai.com/v1',
|
||||
description: 'Override the default base URL for the API',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Retries',
|
||||
name: 'maxRetries',
|
||||
default: 2,
|
||||
description: 'Maximum number of retries to attempt',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Timeout',
|
||||
name: 'timeout',
|
||||
default: 10000,
|
||||
description: 'Maximum amount of time a request is allowed to take in milliseconds',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
const input = this.getNodeParameter('text', itemIndex) as string;
|
||||
const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string;
|
||||
const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array<
|
||||
'code_interpreter' | 'retrieval'
|
||||
>;
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
baseURL?: string;
|
||||
maxRetries: number;
|
||||
timeout: number;
|
||||
};
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.');
|
||||
}
|
||||
|
||||
const client = new OpenAIClient({
|
||||
apiKey: credentials.apiKey as string,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
timeout: options.timeout ?? 10000,
|
||||
baseURL: options.baseURL,
|
||||
});
|
||||
let agent;
|
||||
const nativeToolsParsed: OpenAIToolType = nativeTools.map((tool) => ({ type: tool }));
|
||||
const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? [];
|
||||
const newTools = [...transformedConnectedTools, ...nativeToolsParsed];
|
||||
|
||||
// Existing agent, update tools with currently assigned
|
||||
if (assistantId) {
|
||||
agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
|
||||
|
||||
await client.beta.assistants.update(assistantId, {
|
||||
tools: newTools,
|
||||
});
|
||||
} else {
|
||||
const name = this.getNodeParameter('name', itemIndex, '') as string;
|
||||
const instructions = this.getNodeParameter('instructions', itemIndex, '') as string;
|
||||
const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string;
|
||||
|
||||
agent = await OpenAIAssistantRunnable.createAssistant({
|
||||
model,
|
||||
client,
|
||||
instructions,
|
||||
name,
|
||||
tools: newTools,
|
||||
asAgent: true,
|
||||
});
|
||||
}
|
||||
|
||||
const agentExecutor = AgentExecutor.fromAgentAndTools({
|
||||
agent,
|
||||
tools,
|
||||
});
|
||||
|
||||
const response = await agentExecutor.call({
|
||||
content: input,
|
||||
signal: this.getExecutionCancelSignal(),
|
||||
timeout: options.timeout ?? 10000,
|
||||
});
|
||||
|
||||
returnData.push({ json: response });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema';
|
||||
import type { OpenAI as OpenAIClient } from 'openai';
|
||||
import type { StructuredTool } from 'langchain/tools';
|
||||
|
||||
// Copied from langchain(`langchain/src/tools/convert_to_openai.ts`)
|
||||
// since these functions are not exported
|
||||
|
||||
/**
|
||||
* Formats a `StructuredTool` instance into a format that is compatible
|
||||
* with OpenAI's ChatCompletionFunctions. It uses the `zodToJsonSchema`
|
||||
* function to convert the schema of the `StructuredTool` into a JSON
|
||||
* schema, which is then used as the parameters for the OpenAI function.
|
||||
*/
|
||||
export function formatToOpenAIFunction(
|
||||
tool: StructuredTool,
|
||||
): OpenAIClient.Chat.ChatCompletionCreateParams.Function {
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: zodToJsonSchema(tool.schema),
|
||||
};
|
||||
}
|
||||
|
||||
export function formatToOpenAITool(tool: StructuredTool): OpenAIClient.Chat.ChatCompletionTool {
|
||||
const schema = zodToJsonSchema(tool.schema);
|
||||
return {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: schema,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function formatToOpenAIAssistantTool(
|
||||
tool: StructuredTool,
|
||||
): OpenAIClient.Beta.AssistantCreateParams.AssistantToolsFunction {
|
||||
return {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: zodToJsonSchema(tool.schema),
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,463 @@
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type {
|
||||
IBinaryData,
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { BaseLanguageModel } from 'langchain/base_language';
|
||||
import {
|
||||
AIMessagePromptTemplate,
|
||||
PromptTemplate,
|
||||
SystemMessagePromptTemplate,
|
||||
HumanMessagePromptTemplate,
|
||||
ChatPromptTemplate,
|
||||
} from 'langchain/prompts';
|
||||
import type { BaseOutputParser } from 'langchain/schema/output_parser';
|
||||
import { CombiningOutputParser } from 'langchain/output_parsers';
|
||||
import { LLMChain } from 'langchain/chains';
|
||||
import { BaseChatModel } from 'langchain/chat_models/base';
|
||||
import { HumanMessage } from 'langchain/schema';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
interface MessagesTemplate {
|
||||
type: string;
|
||||
message: string;
|
||||
messageType: 'text' | 'imageBinary' | 'imageUrl';
|
||||
binaryImageDataKey?: string;
|
||||
imageUrl?: string;
|
||||
imageDetail?: 'auto' | 'low' | 'high';
|
||||
}
|
||||
|
||||
async function getImageMessage(
|
||||
context: IExecuteFunctions,
|
||||
itemIndex: number,
|
||||
message: MessagesTemplate,
|
||||
) {
|
||||
if (message.messageType !== 'imageBinary' && message.messageType !== 'imageUrl') {
|
||||
// eslint-disable-next-line n8n-nodes-base/node-execute-block-wrong-error-thrown
|
||||
throw new NodeOperationError(
|
||||
context.getNode(),
|
||||
'Invalid message type. Only imageBinary and imageUrl are supported',
|
||||
);
|
||||
}
|
||||
const detail = message.imageDetail === 'auto' ? undefined : message.imageDetail;
|
||||
if (message.messageType === 'imageUrl' && message.imageUrl) {
|
||||
return new HumanMessage({
|
||||
content: [
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: message.imageUrl,
|
||||
detail,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
const binaryDataKey = message.binaryImageDataKey ?? 'data';
|
||||
const inputData = context.getInputData()[itemIndex];
|
||||
const binaryData = inputData.binary?.[binaryDataKey] as IBinaryData;
|
||||
|
||||
if (!binaryData) {
|
||||
throw new NodeOperationError(context.getNode(), 'No binary data set.');
|
||||
}
|
||||
|
||||
const bufferData = await context.helpers.getBinaryDataBuffer(itemIndex, binaryDataKey);
|
||||
return new HumanMessage({
|
||||
content: [
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: `data:image/jpeg;base64,${bufferData.toString('base64')}`,
|
||||
detail,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
async function getChainPromptTemplate(
|
||||
context: IExecuteFunctions,
|
||||
itemIndex: number,
|
||||
llm: BaseLanguageModel | BaseChatModel,
|
||||
messages?: MessagesTemplate[],
|
||||
formatInstructions?: string,
|
||||
) {
|
||||
const queryTemplate = new PromptTemplate({
|
||||
template: `{query}${formatInstructions ? '\n{formatInstructions}' : ''}`,
|
||||
inputVariables: ['query'],
|
||||
partialVariables: formatInstructions ? { formatInstructions } : undefined,
|
||||
});
|
||||
|
||||
if (llm instanceof BaseChatModel) {
|
||||
const parsedMessages = await Promise.all(
|
||||
(messages ?? []).map(async (message) => {
|
||||
const messageClass = [
|
||||
SystemMessagePromptTemplate,
|
||||
AIMessagePromptTemplate,
|
||||
HumanMessagePromptTemplate,
|
||||
].find((m) => m.lc_name() === message.type);
|
||||
|
||||
if (!messageClass) {
|
||||
// eslint-disable-next-line n8n-nodes-base/node-execute-block-wrong-error-thrown
|
||||
throw new Error(`Invalid message type "${message.type}"`);
|
||||
}
|
||||
|
||||
if (messageClass === HumanMessagePromptTemplate && message.messageType !== 'text') {
|
||||
const test = await getImageMessage(context, itemIndex, message);
|
||||
return test;
|
||||
}
|
||||
|
||||
const res = messageClass.fromTemplate(
|
||||
// Since we're using the message as template, we need to escape any curly braces
|
||||
// so LangChain doesn't try to parse them as variables
|
||||
(message.message || '').replace(/[{}]/g, (match) => match + match),
|
||||
);
|
||||
return res;
|
||||
}),
|
||||
);
|
||||
|
||||
parsedMessages.push(new HumanMessagePromptTemplate(queryTemplate));
|
||||
return ChatPromptTemplate.fromMessages(parsedMessages);
|
||||
}
|
||||
|
||||
return queryTemplate;
|
||||
}
|
||||
|
||||
async function createSimpleLLMChain(
|
||||
context: IExecuteFunctions,
|
||||
llm: BaseLanguageModel,
|
||||
query: string,
|
||||
prompt: ChatPromptTemplate | PromptTemplate,
|
||||
): Promise<string[]> {
|
||||
const chain = new LLMChain({
|
||||
llm,
|
||||
prompt,
|
||||
});
|
||||
const response = (await chain.call({
|
||||
query,
|
||||
signal: context.getExecutionCancelSignal(),
|
||||
})) as string[];
|
||||
|
||||
return Array.isArray(response) ? response : [response];
|
||||
}
|
||||
|
||||
async function getChain(
|
||||
context: IExecuteFunctions,
|
||||
itemIndex: number,
|
||||
query: string,
|
||||
llm: BaseLanguageModel,
|
||||
outputParsers: BaseOutputParser[],
|
||||
messages?: MessagesTemplate[],
|
||||
): Promise<unknown[]> {
|
||||
const chatTemplate: ChatPromptTemplate | PromptTemplate = await getChainPromptTemplate(
|
||||
context,
|
||||
itemIndex,
|
||||
llm,
|
||||
messages,
|
||||
);
|
||||
|
||||
// If there are no output parsers, create a simple LLM chain and execute the query
|
||||
if (!outputParsers.length) {
|
||||
return createSimpleLLMChain(context, llm, query, chatTemplate);
|
||||
}
|
||||
|
||||
// If there's only one output parser, use it; otherwise, create a combined output parser
|
||||
const combinedOutputParser =
|
||||
outputParsers.length === 1 ? outputParsers[0] : new CombiningOutputParser(...outputParsers);
|
||||
|
||||
const formatInstructions = combinedOutputParser.getFormatInstructions();
|
||||
|
||||
// Create a prompt template incorporating the format instructions and query
|
||||
const prompt = await getChainPromptTemplate(
|
||||
context,
|
||||
itemIndex,
|
||||
llm,
|
||||
messages,
|
||||
formatInstructions,
|
||||
);
|
||||
|
||||
const chain = prompt.pipe(llm).pipe(combinedOutputParser);
|
||||
|
||||
const response = (await chain.invoke({ query })) as string | string[];
|
||||
|
||||
return Array.isArray(response) ? response : [response];
|
||||
}
|
||||
|
||||
export class ChainLlm implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Basic LLM Chain',
|
||||
name: 'chainLlm',
|
||||
icon: 'fa:link',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1],
|
||||
description: 'A simple chain to prompt a large language model',
|
||||
defaults: {
|
||||
name: 'Basic LLM Chain',
|
||||
color: '#909298',
|
||||
},
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Chains'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainllm/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Output Parser',
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1951),
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '={{ $json.input }}',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '={{ $json.chat_input }}',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1.1],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Chat Messages (if Using a Chat Model)',
|
||||
name: 'messages',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add prompt',
|
||||
options: [
|
||||
{
|
||||
name: 'messageValues',
|
||||
displayName: 'Prompt',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Type Name or ID',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'AI',
|
||||
value: AIMessagePromptTemplate.lc_name(),
|
||||
},
|
||||
{
|
||||
name: 'System',
|
||||
value: SystemMessagePromptTemplate.lc_name(),
|
||||
},
|
||||
{
|
||||
name: 'User',
|
||||
value: HumanMessagePromptTemplate.lc_name(),
|
||||
},
|
||||
],
|
||||
default: SystemMessagePromptTemplate.lc_name(),
|
||||
},
|
||||
{
|
||||
displayName: 'Message Type',
|
||||
name: 'messageType',
|
||||
type: 'options',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: [HumanMessagePromptTemplate.lc_name()],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Text',
|
||||
value: 'text',
|
||||
description: 'Simple text message',
|
||||
},
|
||||
{
|
||||
name: 'Image (Binary)',
|
||||
value: 'imageBinary',
|
||||
description: 'Process the binary input from the previous node',
|
||||
},
|
||||
{
|
||||
name: 'Image (URL)',
|
||||
value: 'imageUrl',
|
||||
description: 'Process the image from the specified URL',
|
||||
},
|
||||
],
|
||||
default: 'text',
|
||||
},
|
||||
{
|
||||
displayName: 'Image Data Field Name',
|
||||
name: 'binaryImageDataKey',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
description:
|
||||
'The name of the field in the chain’s input that contains the binary image file to be processed',
|
||||
displayOptions: {
|
||||
show: {
|
||||
messageType: ['imageBinary'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Image URL',
|
||||
name: 'imageUrl',
|
||||
type: 'string',
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'URL to the image to be processed',
|
||||
displayOptions: {
|
||||
show: {
|
||||
messageType: ['imageUrl'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Image Details',
|
||||
description:
|
||||
'Control how the model processes the image and generates its textual understanding',
|
||||
name: 'imageDetail',
|
||||
type: 'options',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: [HumanMessagePromptTemplate.lc_name()],
|
||||
messageType: ['imageBinary', 'imageUrl'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Auto',
|
||||
value: 'auto',
|
||||
description:
|
||||
'Model will use the auto setting which will look at the image input size and decide if it should use the low or high setting',
|
||||
},
|
||||
{
|
||||
name: 'Low',
|
||||
value: 'low',
|
||||
description:
|
||||
'The model will receive a low-res 512px x 512px version of the image, and represent the image with a budget of 65 tokens. This allows the API to return faster responses and consume fewer input tokens for use cases that do not require high detail.',
|
||||
},
|
||||
{
|
||||
name: 'High',
|
||||
value: 'high',
|
||||
description:
|
||||
'Allows the model to see the low res image and then creates detailed crops of input images as 512px squares based on the input image size. Each of the detailed crops uses twice the token budget (65 tokens) for a total of 129 tokens.',
|
||||
},
|
||||
],
|
||||
default: 'auto',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Message',
|
||||
name: 'message',
|
||||
type: 'string',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
hide: {
|
||||
messageType: ['imageBinary', 'imageUrl'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing LLM Chain');
|
||||
const items = this.getInputData();
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
const llm = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
const prompt = this.getNodeParameter('prompt', itemIndex) as string;
|
||||
const messages = this.getNodeParameter(
|
||||
'messages.messageValues',
|
||||
itemIndex,
|
||||
[],
|
||||
) as MessagesTemplate[];
|
||||
|
||||
if (prompt === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘prompt’ parameter is empty.');
|
||||
}
|
||||
|
||||
const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages);
|
||||
|
||||
responses.forEach((response) => {
|
||||
let data: IDataObject;
|
||||
if (typeof response === 'string') {
|
||||
data = {
|
||||
response: {
|
||||
text: response.trim(),
|
||||
},
|
||||
};
|
||||
} else if (Array.isArray(response)) {
|
||||
data = {
|
||||
data: response,
|
||||
};
|
||||
} else if (response instanceof Object) {
|
||||
data = response as IDataObject;
|
||||
} else {
|
||||
data = {
|
||||
response: {
|
||||
text: response,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
returnData.push({
|
||||
json: data,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { RetrievalQAChain } from 'langchain/chains';
|
||||
import type { BaseLanguageModel } from 'langchain/dist/base_language';
|
||||
import type { BaseRetriever } from 'langchain/schema/retriever';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class ChainRetrievalQa implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Question and Answer Chain',
|
||||
name: 'chainRetrievalQa',
|
||||
icon: 'fa:link',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1],
|
||||
description: 'Answer questions about retrieved documents',
|
||||
defaults: {
|
||||
name: 'Question and Answer Chain',
|
||||
color: '#909298',
|
||||
},
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Chains'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainretrievalqa/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Retriever',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiRetriever,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1960),
|
||||
{
|
||||
displayName: 'Query',
|
||||
name: 'query',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '={{ $json.input }}',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Query',
|
||||
name: 'query',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '={{ $json.chat_input }}',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1.1],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Retrieval QA Chain');
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const retriever = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiRetriever,
|
||||
0,
|
||||
)) as BaseRetriever;
|
||||
|
||||
const items = this.getInputData();
|
||||
const chain = RetrievalQAChain.fromLLM(model, retriever);
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
// Run for each item
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
const query = this.getNodeParameter('query', itemIndex) as string;
|
||||
|
||||
if (query === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘query‘ parameter is empty.');
|
||||
}
|
||||
|
||||
const response = await chain.call({ query });
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { SummarizationChainParams } from 'langchain/chains';
|
||||
import { loadSummarizationChain } from 'langchain/chains';
|
||||
import type { BaseLanguageModel } from 'langchain/dist/base_language';
|
||||
import type { Document } from 'langchain/document';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from './prompt';
|
||||
|
||||
export class ChainSummarization implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Summarization Chain',
|
||||
name: 'chainSummarization',
|
||||
icon: 'fa:link',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Transforms text into a concise summary',
|
||||
|
||||
defaults: {
|
||||
name: 'Summarization Chain',
|
||||
color: '#909298',
|
||||
},
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Chains'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainsummarization/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Document',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiDocument,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1951),
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of summarization to run',
|
||||
default: 'map_reduce',
|
||||
options: [
|
||||
{
|
||||
name: 'Map Reduce (Recommended)',
|
||||
value: 'map_reduce',
|
||||
description:
|
||||
'Summarize each document (or chunk) individually, then summarize those summaries',
|
||||
},
|
||||
{
|
||||
name: 'Refine',
|
||||
value: 'refine',
|
||||
description:
|
||||
'Summarize the first document (or chunk). Then update that summary based on the next document (or chunk), and repeat.',
|
||||
},
|
||||
{
|
||||
name: 'Stuff',
|
||||
value: 'stuff',
|
||||
description: 'Pass all documents (or chunks) at once. Ideal for small datasets.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Final Prompt to Combine',
|
||||
name: 'combineMapPrompt',
|
||||
type: 'string',
|
||||
hint: 'The prompt to combine individual summaries',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['map_reduce'],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Individual Summary Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to summarize an individual document (or chunk)',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['map_reduce'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['stuff'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Subsequent (Refine) Prompt',
|
||||
name: 'refinePrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['refine'],
|
||||
},
|
||||
},
|
||||
default: REFINE_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to refine the summary based on the next document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Initial Prompt',
|
||||
name: 'refineQuestionPrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['refine'],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt for the first document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Vector Store QA Chain');
|
||||
const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as
|
||||
| N8nJsonLoader
|
||||
| Array<Document<Record<string, unknown>>>;
|
||||
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
prompt?: string;
|
||||
refineQuestionPrompt?: string;
|
||||
refinePrompt?: string;
|
||||
combineMapPrompt?: string;
|
||||
};
|
||||
|
||||
const chainArgs: SummarizationChainParams = {
|
||||
type,
|
||||
};
|
||||
|
||||
// Map reduce prompt override
|
||||
if (type === 'map_reduce') {
|
||||
const mapReduceArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'map_reduce';
|
||||
};
|
||||
if (options.combineMapPrompt) {
|
||||
mapReduceArgs.combineMapPrompt = new PromptTemplate({
|
||||
template: options.combineMapPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
if (options.prompt) {
|
||||
mapReduceArgs.combinePrompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Stuff prompt override
|
||||
if (type === 'stuff') {
|
||||
const stuffArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'stuff';
|
||||
};
|
||||
if (options.prompt) {
|
||||
stuffArgs.prompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Refine prompt override
|
||||
if (type === 'refine') {
|
||||
const refineArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'refine';
|
||||
};
|
||||
|
||||
if (options.refinePrompt) {
|
||||
refineArgs.refinePrompt = new PromptTemplate({
|
||||
template: options.refinePrompt,
|
||||
inputVariables: ['existing_answer', 'text'],
|
||||
});
|
||||
}
|
||||
|
||||
if (options.refineQuestionPrompt) {
|
||||
refineArgs.questionPrompt = new PromptTemplate({
|
||||
template: options.refineQuestionPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const chain = loadSummarizationChain(model, chainArgs);
|
||||
|
||||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let processedDocuments: Document[];
|
||||
if (documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader) {
|
||||
processedDocuments = await documentInput.processItem(items[itemIndex], itemIndex);
|
||||
} else {
|
||||
processedDocuments = documentInput;
|
||||
}
|
||||
|
||||
const response = await chain.call({
|
||||
input_documents: processedDocuments,
|
||||
});
|
||||
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
export const REFINE_PROMPT_TEMPLATE = `Your job is to produce a final summary
|
||||
We have provided an existing summary up to a certain point: "{existing_answer}"
|
||||
We have the opportunity to refine the existing summary
|
||||
(only if needed) with some more context below.
|
||||
------------
|
||||
"{text}"
|
||||
------------
|
||||
|
||||
Given the new context, refine the original summary
|
||||
If the context isn't useful, return the original summary.
|
||||
|
||||
REFINED SUMMARY:`;
|
||||
|
||||
export const DEFAULT_PROMPT_TEMPLATE = `Write a concise summary of the following:
|
||||
|
||||
|
||||
"{text}"
|
||||
|
||||
|
||||
CONCISE SUMMARY:`;
|
||||
359
packages/@n8n/nodes-langchain/nodes/code/Code.node.ts
Normal file
@@ -0,0 +1,359 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type INodeOutputConfiguration,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
// TODO: Add support for execute function. Got already started but got commented out
|
||||
|
||||
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
|
||||
import { standardizeOutput } from 'n8n-nodes-base/dist/nodes/Code/utils';
|
||||
import type { Tool } from 'langchain/tools';
|
||||
import { makeResolverFromLegacyOptions } from '@n8n/vm2';
|
||||
import { logWrapper } from '../../utils/logWrapper';
|
||||
|
||||
const { NODE_FUNCTION_ALLOW_BUILTIN: builtIn, NODE_FUNCTION_ALLOW_EXTERNAL: external } =
|
||||
process.env;
|
||||
|
||||
// TODO: Replace
|
||||
const connectorTypes = {
|
||||
[NodeConnectionType.AiChain]: 'Chain',
|
||||
[NodeConnectionType.AiDocument]: 'Document',
|
||||
[NodeConnectionType.AiEmbedding]: 'Embedding',
|
||||
[NodeConnectionType.AiLanguageModel]: 'Language Model',
|
||||
[NodeConnectionType.AiMemory]: 'Memory',
|
||||
[NodeConnectionType.AiOutputParser]: 'Output Parser',
|
||||
[NodeConnectionType.AiTextSplitter]: 'Text Splitter',
|
||||
[NodeConnectionType.AiTool]: 'Tool',
|
||||
[NodeConnectionType.AiVectorStore]: 'Vector Store',
|
||||
[NodeConnectionType.Main]: 'Main',
|
||||
};
|
||||
|
||||
const defaultCodeExecute = `const { PromptTemplate } = require('langchain/prompts');
|
||||
|
||||
const query = 'Tell me a joke';
|
||||
const prompt = PromptTemplate.fromTemplate(query);
|
||||
const llm = await this.getInputConnectionData('ai_languageModel', 0);
|
||||
let chain = prompt.pipe(llm);
|
||||
const output = await chain.invoke();
|
||||
return [ {json: { output } } ];`;
|
||||
|
||||
const defaultCodeSupplyData = `const { WikipediaQueryRun } = require('langchain/tools');
|
||||
return new WikipediaQueryRun();`;
|
||||
|
||||
export const vmResolver = makeResolverFromLegacyOptions({
|
||||
external: {
|
||||
modules: external ? ['langchain', ...external.split(',')] : ['langchain'],
|
||||
transitive: false,
|
||||
},
|
||||
builtin: builtIn?.split(',') ?? [],
|
||||
});
|
||||
|
||||
function getSandbox(
|
||||
this: IExecuteFunctions,
|
||||
code: string,
|
||||
options?: { addItems?: boolean; itemIndex?: number },
|
||||
) {
|
||||
const itemIndex = options?.itemIndex ?? 0;
|
||||
const node = this.getNode();
|
||||
const workflowMode = this.getMode();
|
||||
|
||||
const context = getSandboxContext.call(this, itemIndex);
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.addInputData = this.addInputData;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.addOutputData = this.addOutputData;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.getInputConnectionData = this.getInputConnectionData;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.getInputData = this.getInputData;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.getNode = this.getNode;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.getExecutionCancelSignal = this.getExecutionCancelSignal;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.getNodeOutputs = this.getNodeOutputs;
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
context.logger = this.logger;
|
||||
|
||||
if (options?.addItems) {
|
||||
context.items = context.$input.all();
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/unbound-method
|
||||
|
||||
const sandbox = new JavaScriptSandbox(context, code, itemIndex, this.helpers, {
|
||||
resolver: vmResolver,
|
||||
});
|
||||
|
||||
sandbox.on(
|
||||
'output',
|
||||
workflowMode === 'manual'
|
||||
? this.sendMessageToUI.bind(this)
|
||||
: (...args: unknown[]) =>
|
||||
console.log(`[Workflow "${this.getWorkflow().id}"][Node "${node.name}"]`, ...args),
|
||||
);
|
||||
return sandbox;
|
||||
}
|
||||
|
||||
export class Code implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'LangChain Code',
|
||||
name: 'code',
|
||||
icon: 'fa:code',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'LangChain Code Node',
|
||||
defaults: {
|
||||
name: 'LangChain Code',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Miscellaneous'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.code/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
inputs: `={{ ((values) => { const connectorTypes = ${JSON.stringify(
|
||||
connectorTypes,
|
||||
)}; return values.map(value => { return { type: value.type, required: value.required, maxConnections: value.maxConnections === -1 ? undefined : value.maxConnections, displayName: connectorTypes[value.type] !== 'Main' ? connectorTypes[value.type] : undefined } } ) })($parameter.inputs.input) }}`,
|
||||
outputs: `={{ ((values) => { const connectorTypes = ${JSON.stringify(
|
||||
connectorTypes,
|
||||
)}; return values.map(value => { return { type: value.type, displayName: connectorTypes[value.type] !== 'Main' ? connectorTypes[value.type] : undefined } } ) })($parameter.outputs.output) }}`,
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Code',
|
||||
name: 'code',
|
||||
placeholder: 'Add Code',
|
||||
type: 'fixedCollection',
|
||||
noDataExpression: true,
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
name: 'execute',
|
||||
displayName: 'Execute',
|
||||
values: [
|
||||
{
|
||||
displayName: 'JavaScript - Execute',
|
||||
name: 'code',
|
||||
type: 'string',
|
||||
typeOptions: {
|
||||
editor: 'codeNodeEditor',
|
||||
editorLanguage: 'javaScript',
|
||||
},
|
||||
default: defaultCodeExecute,
|
||||
hint: 'This code will only run and return data if a "Main" input & output got created.',
|
||||
noDataExpression: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'supplyData',
|
||||
displayName: 'Supply Data',
|
||||
values: [
|
||||
{
|
||||
displayName: 'JavaScript - Supply Data',
|
||||
name: 'code',
|
||||
type: 'string',
|
||||
typeOptions: {
|
||||
editor: 'codeNodeEditor',
|
||||
editorLanguage: 'javaScript',
|
||||
},
|
||||
default: defaultCodeSupplyData,
|
||||
hint: 'This code will only run and return data if an output got created which is not "Main".',
|
||||
noDataExpression: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
// TODO: Add links to docs which provide additional information regarding functionality
|
||||
{
|
||||
displayName:
|
||||
'You can import LangChain and use all available functionality. Debug by using <code>console.log()</code> statements and viewing their output in the browser console.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Inputs',
|
||||
name: 'inputs',
|
||||
placeholder: 'Add Input',
|
||||
type: 'fixedCollection',
|
||||
noDataExpression: true,
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
sortable: true,
|
||||
},
|
||||
description: 'The input to add',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
name: 'input',
|
||||
displayName: 'Input',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
options: Object.keys(connectorTypes).map((key) => ({
|
||||
name: connectorTypes[key as keyof typeof connectorTypes],
|
||||
value: key,
|
||||
})),
|
||||
noDataExpression: true,
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'The type of the input',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Connections',
|
||||
name: 'maxConnections',
|
||||
type: 'number',
|
||||
noDataExpression: true,
|
||||
default: -1,
|
||||
required: true,
|
||||
description:
|
||||
'How many nodes of this type are allowed to be connected. Set it to -1 for unlimited.',
|
||||
},
|
||||
{
|
||||
displayName: 'Required',
|
||||
name: 'required',
|
||||
type: 'boolean',
|
||||
noDataExpression: true,
|
||||
default: false,
|
||||
required: true,
|
||||
description: 'Whether the input needs a connection',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Outputs',
|
||||
name: 'outputs',
|
||||
placeholder: 'Add Output',
|
||||
type: 'fixedCollection',
|
||||
noDataExpression: true,
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
sortable: true,
|
||||
},
|
||||
description: 'The output to add',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
name: 'output',
|
||||
displayName: 'Output',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
options: Object.keys(connectorTypes).map((key) => ({
|
||||
name: connectorTypes[key as keyof typeof connectorTypes],
|
||||
value: key,
|
||||
})),
|
||||
noDataExpression: true,
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'The type of the input',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const itemIndex = 0;
|
||||
|
||||
const code = this.getNodeParameter('code', itemIndex) as { execute?: { code: string } };
|
||||
|
||||
if (!code.execute?.code) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`No code for "Execute" set on node "${this.getNode().name}`,
|
||||
{
|
||||
itemIndex,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sandbox = getSandbox.call(this, code.execute.code, { addItems: true, itemIndex });
|
||||
|
||||
const outputs = this.getNodeOutputs();
|
||||
const mainOutputs: INodeOutputConfiguration[] = outputs.filter(
|
||||
(output) => output.type === NodeConnectionType.Main,
|
||||
);
|
||||
|
||||
const options = { multiOutput: mainOutputs.length !== 1 };
|
||||
|
||||
let items: INodeExecutionData[] | INodeExecutionData[][];
|
||||
try {
|
||||
items = await sandbox.runCodeAllItems(options);
|
||||
} catch (error) {
|
||||
if (!this.continueOnFail()) throw error;
|
||||
items = [{ json: { error: (error as Error).message } }];
|
||||
if (options.multiOutput) {
|
||||
items = [items];
|
||||
}
|
||||
}
|
||||
|
||||
if (mainOutputs.length === 0) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'The node does not have a "Main" output set. Please add one.',
|
||||
{
|
||||
itemIndex,
|
||||
},
|
||||
);
|
||||
} else if (!options.multiOutput) {
|
||||
for (const item of items as INodeExecutionData[]) {
|
||||
standardizeOutput(item.json);
|
||||
}
|
||||
return [items as INodeExecutionData[]];
|
||||
} else {
|
||||
items.forEach((data) => {
|
||||
for (const item of data as INodeExecutionData[]) {
|
||||
standardizeOutput(item.json);
|
||||
}
|
||||
});
|
||||
return items as INodeExecutionData[][];
|
||||
}
|
||||
}
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const code = this.getNodeParameter('code', itemIndex) as { supplyData?: { code: string } };
|
||||
|
||||
if (!code.supplyData?.code) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`No code for "Supply Data" set on node "${this.getNode().name}`,
|
||||
{
|
||||
itemIndex,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const sandbox = getSandbox.call(this, code.supplyData.code, { itemIndex });
|
||||
const response = (await sandbox.runCode()) as Tool;
|
||||
|
||||
return {
|
||||
response: logWrapper(response, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,186 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
import { getConnectionHintNoticeField, metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
||||
// Dependencies needed underneath the hood for the loaders. We add them
|
||||
// here only to track where what dependency is sued
|
||||
// import 'd3-dsv'; // for csv
|
||||
import 'mammoth'; // for docx
|
||||
import '@gxl/epub-parser'; // for epub
|
||||
import 'pdf-parse'; // for pdf
|
||||
|
||||
export class DocumentBinaryInputLoader implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
// This node is deprecated and will be removed in the future.
|
||||
// The functionality was merged with the `DocumentJSONInputLoader` to `DocumentDefaultDataLoader`
|
||||
hidden: true,
|
||||
displayName: 'Binary Input Loader',
|
||||
name: 'documentBinaryInputLoader',
|
||||
icon: 'file:binary.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use binary data from a previous step in the workflow',
|
||||
defaults: {
|
||||
name: 'Binary Input Loader',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Document Loaders'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.documentbinaryinputloader/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
{
|
||||
displayName: 'Text Splitter',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiTextSplitter,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiDocument],
|
||||
outputNames: ['Document'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName: 'Loader Type',
|
||||
name: 'loader',
|
||||
type: 'options',
|
||||
default: 'jsonLoader',
|
||||
required: true,
|
||||
options: [
|
||||
{
|
||||
name: 'CSV Loader',
|
||||
value: 'csvLoader',
|
||||
description: 'Load CSV files',
|
||||
},
|
||||
{
|
||||
name: 'Docx Loader',
|
||||
value: 'docxLoader',
|
||||
description: 'Load Docx documents',
|
||||
},
|
||||
{
|
||||
name: 'EPub Loader',
|
||||
value: 'epubLoader',
|
||||
description: 'Load EPub files',
|
||||
},
|
||||
{
|
||||
name: 'JSON Loader',
|
||||
value: 'jsonLoader',
|
||||
description: 'Load JSON files',
|
||||
},
|
||||
{
|
||||
name: 'PDF Loader',
|
||||
value: 'pdfLoader',
|
||||
description: 'Load PDF documents',
|
||||
},
|
||||
{
|
||||
name: 'Text Loader',
|
||||
value: 'textLoader',
|
||||
description: 'Load plain text files',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Binary Data Key',
|
||||
name: 'binaryDataKey',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
description: 'Name of the binary property from which to read the file buffer',
|
||||
},
|
||||
// PDF Only Fields
|
||||
{
|
||||
displayName: 'Split Pages',
|
||||
name: 'splitPages',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
loader: ['pdfLoader'],
|
||||
},
|
||||
},
|
||||
},
|
||||
// CSV Only Fields
|
||||
{
|
||||
displayName: 'Column',
|
||||
name: 'column',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Column to extract from CSV',
|
||||
displayOptions: {
|
||||
show: {
|
||||
loader: ['csvLoader'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Separator',
|
||||
name: 'separator',
|
||||
type: 'string',
|
||||
description: 'Separator to use for CSV',
|
||||
default: ',',
|
||||
displayOptions: {
|
||||
show: {
|
||||
loader: ['csvLoader'],
|
||||
},
|
||||
},
|
||||
},
|
||||
// JSON Only Fields
|
||||
{
|
||||
displayName: 'Pointers',
|
||||
name: 'pointers',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Pointers to extract from JSON, e.g. "/text" or "/text, /meta/title"',
|
||||
displayOptions: {
|
||||
show: {
|
||||
loader: ['jsonLoader'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
...metadataFilterField,
|
||||
displayName: 'Metadata',
|
||||
description:
|
||||
'Metadata to add to each document. Could be used for filtering during retrieval',
|
||||
placeholder: 'Add property',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply Data for Binary Input Loader');
|
||||
const processor = new N8nBinaryLoader(this);
|
||||
|
||||
return {
|
||||
response: logWrapper(processor, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
<svg height="1024" width="768" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 960V64h576l192 192v704H0zM704 320L512 128H64v768h640V320zM320 512H128V256h192V512zM256 320h-64v128h64V320zM256 768h64v64H128v-64h64V640h-64v-64h128V768zM512 448h64v64H384v-64h64V320h-64v-64h128V448zM576 832H384V576h192V832zM512 640h-64v128h64V640z" fill="#7D7D87" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 355 B |
@@ -0,0 +1,270 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
import { metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
||||
// Dependencies needed underneath the hood for the loaders. We add them
|
||||
// here only to track where what dependency is sued
|
||||
// import 'd3-dsv'; // for csv
|
||||
import 'mammoth'; // for docx
|
||||
import '@gxl/epub-parser'; // for epub
|
||||
import 'pdf-parse'; // for pdf
|
||||
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
|
||||
|
||||
export class DocumentDefaultDataLoader implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Default Data Loader',
|
||||
name: 'documentDefaultDataLoader',
|
||||
icon: 'file:binary.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Load data from previous step in the workflow',
|
||||
defaults: {
|
||||
name: 'Default Data Loader',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Document Loaders'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.documentdefaultdataloader/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
{
|
||||
displayName: 'Text Splitter',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiTextSplitter,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiDocument],
|
||||
outputNames: ['Document'],
|
||||
properties: [
|
||||
{
|
||||
displayName:
|
||||
'This will load data from a previous step in the workflow. <a href="/templates/1962" target="_blank">Example</a>',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Type of Data',
|
||||
name: 'dataType',
|
||||
type: 'options',
|
||||
default: 'json',
|
||||
required: true,
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'JSON',
|
||||
value: 'json',
|
||||
description: 'Process JSON data from previous step in the workflow',
|
||||
},
|
||||
{
|
||||
name: 'Binary',
|
||||
value: 'binary',
|
||||
description: 'Process binary data from previous step in the workflow',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Mode',
|
||||
name: 'jsonMode',
|
||||
type: 'options',
|
||||
default: 'allInputData',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
dataType: ['json'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Load All Input Data',
|
||||
value: 'allInputData',
|
||||
description: 'Use all JSON data that flows into the parent agent or chain',
|
||||
},
|
||||
{
|
||||
name: 'Load Specific Data',
|
||||
value: 'expressionData',
|
||||
description:
|
||||
'Load a subset of data, and/or data from any previous step in the workflow',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Data Format',
|
||||
name: 'loader',
|
||||
type: 'options',
|
||||
default: 'auto',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
dataType: ['binary'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Automatically Detect by Mime Type',
|
||||
value: 'auto',
|
||||
description: 'Uses the mime type to detect the format',
|
||||
},
|
||||
{
|
||||
name: 'CSV',
|
||||
value: 'csvLoader',
|
||||
description: 'Load CSV files',
|
||||
},
|
||||
{
|
||||
name: 'Docx',
|
||||
value: 'docxLoader',
|
||||
description: 'Load Docx documents',
|
||||
},
|
||||
{
|
||||
name: 'EPub',
|
||||
value: 'epubLoader',
|
||||
description: 'Load EPub files',
|
||||
},
|
||||
{
|
||||
name: 'JSON',
|
||||
value: 'jsonLoader',
|
||||
description: 'Load JSON files',
|
||||
},
|
||||
{
|
||||
name: 'PDF',
|
||||
value: 'pdfLoader',
|
||||
description: 'Load PDF documents',
|
||||
},
|
||||
{
|
||||
name: 'Text',
|
||||
value: 'textLoader',
|
||||
description: 'Load plain text files',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Data',
|
||||
name: 'jsonData',
|
||||
type: 'string',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'Drag and drop fields from the input pane, or use an expression',
|
||||
displayOptions: {
|
||||
show: {
|
||||
dataType: ['json'],
|
||||
jsonMode: ['expressionData'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Input Data Field Name',
|
||||
name: 'binaryDataKey',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
required: true,
|
||||
description:
|
||||
'The name of the field in the agent or chain’s input that contains the binary file to be processed',
|
||||
displayOptions: {
|
||||
show: {
|
||||
dataType: ['binary'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'JSON Pointers',
|
||||
name: 'pointers',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Pointers to extract from JSON, e.g. "/text" or "/text, /meta/title"',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/loader': ['jsonLoader', 'auto'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'CSV Separator',
|
||||
name: 'separator',
|
||||
type: 'string',
|
||||
description: 'Separator to use for CSV',
|
||||
default: ',',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/loader': ['csvLoader', 'auto'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'CSV Column',
|
||||
name: 'column',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Column to extract from CSV',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/loader': ['csvLoader', 'auto'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Split Pages in PDF',
|
||||
description: 'Whether to split PDF pages into separate documents',
|
||||
name: 'splitPages',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/loader': ['pdfLoader', 'auto'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
...metadataFilterField,
|
||||
displayName: 'Metadata',
|
||||
description:
|
||||
'Metadata to add to each document. Could be used for filtering during retrieval',
|
||||
placeholder: 'Add property',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const dataType = this.getNodeParameter('dataType', itemIndex, 'json') as 'json' | 'binary';
|
||||
|
||||
const processor =
|
||||
dataType === 'binary'
|
||||
? new N8nBinaryLoader(this, 'options.')
|
||||
: new N8nJsonLoader(this, 'options.');
|
||||
|
||||
return {
|
||||
response: logWrapper(processor, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
<svg height="1024" width="768" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 960V64h576l192 192v704H0zM704 320L512 128H64v768h640V320zM320 512H128V256h192V512zM256 320h-64v128h64V320zM256 768h64v64H128v-64h64V640h-64v-64h128V768zM512 448h64v64H384v-64h64V320h-64v-64h128V448zM576 832H384V576h192V832zM512 640h-64v128h64V640z" fill="#7D7D87" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 355 B |
@@ -0,0 +1,125 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GithubRepoLoader } from 'langchain/document_loaders/web/github';
|
||||
import type { CharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class DocumentGithubLoader implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'GitHub Document Loader',
|
||||
name: 'documentGithubLoader',
|
||||
icon: 'file:github.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use GitHub data as input to this chain',
|
||||
defaults: {
|
||||
name: 'GitHub Document Loader',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Document Loaders'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.documentgithubloader/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
credentials: [
|
||||
{
|
||||
name: 'githubApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
{
|
||||
displayName: 'Text Splitter',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiTextSplitter,
|
||||
},
|
||||
],
|
||||
inputNames: ['Text Splitter'],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiDocument],
|
||||
outputNames: ['Document'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName: 'Repository Link',
|
||||
name: 'repository',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Branch',
|
||||
name: 'branch',
|
||||
type: 'string',
|
||||
default: 'main',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'additionalOptions',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
|
||||
options: [
|
||||
{
|
||||
displayName: 'Recursive',
|
||||
name: 'recursive',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
displayName: 'Ignore Paths',
|
||||
name: 'recursive',
|
||||
type: 'string',
|
||||
description: 'Comma-separated list of paths to ignore, e.g. "docs, src/tests',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
console.log('Supplying data for Github Document Loader');
|
||||
|
||||
const repository = this.getNodeParameter('repository', itemIndex) as string;
|
||||
const branch = this.getNodeParameter('branch', itemIndex) as string;
|
||||
const credentials = await this.getCredentials('githubApi');
|
||||
const { ignorePaths, recursive } = this.getNodeParameter('additionalOptions', 0) as {
|
||||
recursive: boolean;
|
||||
ignorePaths: string;
|
||||
};
|
||||
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as CharacterTextSplitter | undefined;
|
||||
|
||||
const docs = new GithubRepoLoader(repository, {
|
||||
branch,
|
||||
ignorePaths: (ignorePaths ?? '').split(',').map((p) => p.trim()),
|
||||
recursive,
|
||||
accessToken: (credentials.accessToken as string) || '',
|
||||
});
|
||||
|
||||
const loadedDocs = textSplitter ? await docs.loadAndSplit(textSplitter) : await docs.load();
|
||||
|
||||
return {
|
||||
response: logWrapper(loadedDocs, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 148.744 150.744" fill="#fff" fill-rule="evenodd" stroke="#000" stroke-linecap="round" stroke-linejoin="round"><use xlink:href="#a" x=".872" y=".872"/><symbol id="a" overflow="visible"><path d="M73.256 0C32.801 0 0 34.029 0 76.001c0 33.586 20.988 62.069 50.1 72.115 3.663.698 4.999-1.652 4.999-3.656l-.105-14.149c-20.372 4.593-24.677-8.961-24.677-8.961-3.335-8.777-8.133-11.114-8.133-11.114-6.658-4.713.523-4.622.523-4.622 7.355.529 11.227 7.831 11.227 7.831 6.537 11.616 17.151 8.257 21.319 6.309.666-4.901 2.564-8.257 4.65-10.151-16.261-1.919-33.366-8.442-33.366-37.565 0-8.302 2.857-15.075 7.535-20.396-.747-1.929-3.269-9.663.724-20.123 0 0 6.143-2.041 20.145 7.793 5.84-1.692 12.105-2.529 18.314-2.555 6.223.028 12.492.872 18.34 2.564 13.978-9.844 20.128-7.793 20.128-7.793 4.006 10.47 1.483 18.192.733 20.114 4.695 5.32 7.53 12.093 7.53 20.396 0 29.198-17.133 35.627-33.453 37.509 2.639 2.355 4.971 6.977 4.971 14.065l-.098 20.855c0 2.023 1.333 4.388 5.044 3.663 29.091-10.078 50.062-38.561 50.062-72.129C146.512 34.029 113.71 0 73.256 0z" fill="#7D7D87" stroke="none"/></symbol></svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
@@ -0,0 +1,89 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
|
||||
import { getConnectionHintNoticeField, metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
||||
export class DocumentJsonInputLoader implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
// This node is deprecated and will be removed in the future.
|
||||
// The functionality was merged with the `DocumentBinaryInputLoader` to `DocumentDefaultDataLoader`
|
||||
hidden: true,
|
||||
displayName: 'JSON Input Loader',
|
||||
name: 'documentJsonInputLoader',
|
||||
icon: 'file:json.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use JSON data from a previous step in the workflow',
|
||||
defaults: {
|
||||
name: 'JSON Input Loader',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Document Loaders'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.documentjsoninputloader/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
{
|
||||
displayName: 'Text Splitter',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiTextSplitter,
|
||||
},
|
||||
],
|
||||
inputNames: ['Text Splitter'],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiDocument],
|
||||
outputNames: ['Document'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName: 'Pointers',
|
||||
name: 'pointers',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Pointers to extract from JSON, e.g. "/text" or "/text, /meta/title"',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
...metadataFilterField,
|
||||
displayName: 'Metadata',
|
||||
description:
|
||||
'Metadata to add to each document. Could be used for filtering during retrieval',
|
||||
placeholder: 'Add property',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply Data for JSON Input Loader');
|
||||
const processor = new N8nJsonLoader(this);
|
||||
|
||||
return {
|
||||
response: logWrapper(processor, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg fill="#7D7D87" height="800px" width="800px" version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
viewBox="0 0 58 58" xml:space="preserve">
|
||||
<g>
|
||||
<path d="M50.949,12.187l-1.361-1.361l-9.504-9.505c-0.001-0.001-0.001-0.001-0.002-0.001l-0.77-0.771
|
||||
C38.957,0.195,38.486,0,37.985,0H8.963C7.776,0,6.5,0.916,6.5,2.926V39v16.537V56c0,0.837,0.841,1.652,1.836,1.909
|
||||
c0.051,0.014,0.1,0.033,0.152,0.043C8.644,57.983,8.803,58,8.963,58h40.074c0.16,0,0.319-0.017,0.475-0.048
|
||||
c0.052-0.01,0.101-0.029,0.152-0.043C50.659,57.652,51.5,56.837,51.5,56v-0.463V39V13.978C51.5,13.211,51.407,12.644,50.949,12.187
|
||||
z M39.5,3.565L47.935,12H39.5V3.565z M8.963,56c-0.071,0-0.135-0.025-0.198-0.049C8.61,55.877,8.5,55.721,8.5,55.537V41h41v14.537
|
||||
c0,0.184-0.11,0.34-0.265,0.414C49.172,55.975,49.108,56,49.037,56H8.963z M8.5,39V2.926C8.5,2.709,8.533,2,8.963,2h28.595
|
||||
C37.525,2.126,37.5,2.256,37.5,2.391V13.78c-0.532-0.48-1.229-0.78-2-0.78c-0.553,0-1,0.448-1,1s0.447,1,1,1c0.552,0,1,0.449,1,1v4
|
||||
c0,1.2,0.542,2.266,1.382,3c-0.84,0.734-1.382,1.8-1.382,3v4c0,0.551-0.448,1-1,1c-0.553,0-1,0.448-1,1s0.447,1,1,1
|
||||
c1.654,0,3-1.346,3-3v-4c0-1.103,0.897-2,2-2c0.553,0,1-0.448,1-1s-0.447-1-1-1c-1.103,0-2-0.897-2-2v-4
|
||||
c0-0.771-0.301-1.468-0.78-2h11.389c0.135,0,0.265-0.025,0.391-0.058c0,0.015,0.001,0.021,0.001,0.036V39H8.5z"/>
|
||||
<path d="M16.354,51.43c-0.019,0.446-0.171,0.764-0.458,0.95s-0.672,0.28-1.155,0.28c-0.191,0-0.396-0.022-0.615-0.068
|
||||
s-0.429-0.098-0.629-0.157s-0.385-0.123-0.554-0.191s-0.299-0.135-0.39-0.198l-0.697,1.107c0.183,0.137,0.405,0.26,0.67,0.369
|
||||
s0.54,0.207,0.827,0.294s0.565,0.15,0.834,0.191s0.504,0.062,0.704,0.062c0.401,0,0.791-0.039,1.169-0.116
|
||||
c0.378-0.077,0.713-0.214,1.005-0.41s0.524-0.456,0.697-0.779s0.26-0.723,0.26-1.196v-7.848h-1.668V51.43z"/>
|
||||
<path d="M25.083,49.064c-0.314-0.228-0.654-0.422-1.019-0.581s-0.702-0.323-1.012-0.492s-0.569-0.364-0.779-0.588
|
||||
s-0.314-0.518-0.314-0.882c0-0.146,0.036-0.299,0.109-0.458s0.173-0.303,0.301-0.431s0.273-0.234,0.438-0.321
|
||||
s0.337-0.139,0.52-0.157c0.328-0.027,0.597-0.032,0.807-0.014s0.378,0.05,0.506,0.096s0.226,0.091,0.294,0.137
|
||||
s0.13,0.082,0.185,0.109c0.009-0.009,0.036-0.055,0.082-0.137s0.101-0.185,0.164-0.308s0.132-0.255,0.205-0.396
|
||||
s0.137-0.271,0.191-0.39c-0.265-0.173-0.61-0.299-1.039-0.376s-0.853-0.116-1.271-0.116c-0.41,0-0.8,0.063-1.169,0.191
|
||||
s-0.692,0.313-0.971,0.554s-0.499,0.535-0.663,0.882S20.4,46.13,20.4,46.576c0,0.492,0.104,0.902,0.314,1.23
|
||||
s0.474,0.613,0.793,0.854s0.661,0.451,1.025,0.629s0.704,0.355,1.019,0.533s0.576,0.376,0.786,0.595s0.314,0.483,0.314,0.793
|
||||
c0,0.511-0.148,0.896-0.444,1.155s-0.723,0.39-1.278,0.39c-0.183,0-0.378-0.019-0.588-0.055s-0.419-0.084-0.629-0.144
|
||||
s-0.412-0.123-0.608-0.191s-0.357-0.139-0.485-0.212l-0.287,1.176c0.155,0.137,0.34,0.253,0.554,0.349s0.439,0.171,0.677,0.226
|
||||
c0.237,0.055,0.472,0.094,0.704,0.116s0.458,0.034,0.677,0.034c0.511,0,0.966-0.077,1.367-0.232s0.738-0.362,1.012-0.622
|
||||
s0.485-0.561,0.636-0.902s0.226-0.695,0.226-1.06c0-0.538-0.104-0.978-0.314-1.319S25.397,49.292,25.083,49.064z"/>
|
||||
<path d="M34.872,45.072c-0.378-0.429-0.82-0.754-1.326-0.978s-1.06-0.335-1.661-0.335s-1.155,0.111-1.661,0.335
|
||||
s-0.948,0.549-1.326,0.978s-0.675,0.964-0.889,1.606s-0.321,1.388-0.321,2.235s0.107,1.595,0.321,2.242s0.511,1.185,0.889,1.613
|
||||
s0.82,0.752,1.326,0.971s1.06,0.328,1.661,0.328s1.155-0.109,1.661-0.328s0.948-0.542,1.326-0.971s0.675-0.966,0.889-1.613
|
||||
s0.321-1.395,0.321-2.242s-0.107-1.593-0.321-2.235S35.25,45.501,34.872,45.072z M34.195,50.698
|
||||
c-0.137,0.487-0.326,0.882-0.567,1.183s-0.515,0.518-0.82,0.649s-0.627,0.198-0.964,0.198c-0.328,0-0.641-0.07-0.937-0.212
|
||||
s-0.561-0.364-0.793-0.67s-0.415-0.699-0.547-1.183s-0.203-1.066-0.212-1.75c0.009-0.702,0.082-1.294,0.219-1.777
|
||||
c0.137-0.483,0.326-0.877,0.567-1.183s0.515-0.521,0.82-0.649s0.627-0.191,0.964-0.191c0.328,0,0.641,0.068,0.937,0.205
|
||||
s0.561,0.36,0.793,0.67s0.415,0.704,0.547,1.183s0.203,1.06,0.212,1.743C34.405,49.616,34.332,50.211,34.195,50.698z"/>
|
||||
<polygon points="44.012,50.869 40.061,43.924 38.393,43.924 38.393,54 40.061,54 40.061,47.055 44.012,54 45.68,54 45.68,43.924
|
||||
44.012,43.924 "/>
|
||||
<path d="M20.5,20v-4c0-0.551,0.448-1,1-1c0.553,0,1-0.448,1-1s-0.447-1-1-1c-1.654,0-3,1.346-3,3v4c0,1.103-0.897,2-2,2
|
||||
c-0.553,0-1,0.448-1,1s0.447,1,1,1c1.103,0,2,0.897,2,2v4c0,1.654,1.346,3,3,3c0.553,0,1-0.448,1-1s-0.447-1-1-1
|
||||
c-0.552,0-1-0.449-1-1v-4c0-1.2-0.542-2.266-1.382-3C19.958,22.266,20.5,21.2,20.5,20z"/>
|
||||
<circle cx="28.5" cy="19.5" r="1.5"/>
|
||||
<path d="M28.5,25c-0.553,0-1,0.448-1,1v3c0,0.552,0.447,1,1,1s1-0.448,1-1v-3C29.5,25.448,29.053,25,28.5,25z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.7 KiB |
@@ -0,0 +1,133 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { BedrockEmbeddings } from 'langchain/embeddings/bedrock';
|
||||
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class EmbeddingsAwsBedrock implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Embeddings AWS Bedrock',
|
||||
name: 'embeddingsAwsBedrock',
|
||||
icon: 'file:bedrock.svg',
|
||||
credentials: [
|
||||
{
|
||||
name: 'aws',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Embeddings AWS Bedrock',
|
||||
defaults: {
|
||||
name: 'Embeddings AWS Bedrock',
|
||||
},
|
||||
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Embeddings'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsawsbedrock/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiEmbedding],
|
||||
outputNames: ['Embeddings'],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '=https://bedrock.{{$credentials?.region ?? "eu-central-1"}}.amazonaws.com',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models.html">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/foundation-models',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'modelSummaries',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
// There isn't a good way to filter embedding models, so we atleast filter-out the default non-embedding ones
|
||||
pass: "={{ !'anthropic.claude-instant-v1-100k,anthropic.claude-v2,amazon.titan-text-express-v1'.match($responseItem.modelId) }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.modelName}}',
|
||||
description: '={{$responseItem.modelArn}}',
|
||||
value: '={{$responseItem.modelId}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('aws');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
const embeddings = new BedrockEmbeddings({
|
||||
region: credentials.region as string,
|
||||
model: modelName,
|
||||
maxRetries: 3,
|
||||
credentials: {
|
||||
secretAccessKey: credentials.secretAccessKey as string,
|
||||
accessKeyId: credentials.accessKeyId as string,
|
||||
sessionToken: credentials.sessionToken as string,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(embeddings, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24px" height="24px" viewBox="0 0 24 24" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<title>Icon-Architecture/16/Arch_Amazon-Bedrock_16</title>
|
||||
<defs>
|
||||
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
|
||||
<stop stop-color="#055F4E" offset="0%"></stop>
|
||||
<stop stop-color="#56C0A7" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Icon-Architecture/16/Arch_Amazon-Bedrock_16" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Icon-Architecture-BG/16/Machine-Learning" fill="url(#linearGradient-1)">
|
||||
<rect id="Rectangle" x="0" y="0" width="24" height="24"></rect>
|
||||
</g>
|
||||
<g id="Icon-Service/16/Amazon-Bedrock_16" transform="translate(4.000000, 4.000000)" fill="#FFFFFF">
|
||||
<path d="M8,14.1397014 L5.574,14.9487014 L4.628,14.3177014 L5.658,13.9737014 L5.342,13.0257014 L3.574,13.6147014 L3,13.2327014 L3,10.4997014 C3,10.3107014 2.893,10.1377014 2.724,10.0527014 L1,9.19070136 L1,6.80870136 L2.5,6.05870136 L4,6.80870136 L4,8.49970136 C4,8.68970136 4.107,8.86270136 4.276,8.94770136 L6.276,9.94770136 L6.724,9.05270136 L5,8.19070136 L5,6.80870136 L6.724,5.94770136 C6.893,5.86270136 7,5.68970136 7,5.49970136 L7,3.99970136 L6,3.99970136 L6,5.19070136 L4.5,5.94070136 L3,5.19070136 L3,2.76770136 L4,2.10070136 L4,3.99970136 L5,3.99970136 L5,1.43470136 L5.574,1.05170136 L8,1.86070136 L8,14.1397014 Z M13.5,12.9997014 C13.775,12.9997014 14,13.2237014 14,13.4997014 C14,13.7757014 13.775,13.9997014 13.5,13.9997014 C13.225,13.9997014 13,13.7757014 13,13.4997014 C13,13.2237014 13.225,12.9997014 13.5,12.9997014 L13.5,12.9997014 Z M12.5,1.99970136 C12.775,1.99970136 13,2.22370136 13,2.49970136 C13,2.77570136 12.775,2.99970136 12.5,2.99970136 C12.225,2.99970136 12,2.77570136 12,2.49970136 C12,2.22370136 12.225,1.99970136 12.5,1.99970136 L12.5,1.99970136 Z M14.5,7.99970136 C14.775,7.99970136 15,8.22370136 15,8.49970136 C15,8.77570136 14.775,8.99970136 14.5,8.99970136 C14.225,8.99970136 14,8.77570136 14,8.49970136 C14,8.22370136 14.225,7.99970136 14.5,7.99970136 L14.5,7.99970136 Z M13.092,8.99970136 C13.299,9.58070136 13.849,9.99970136 14.5,9.99970136 C15.327,9.99970136 16,9.32770136 16,8.49970136 C16,7.67270136 15.327,6.99970136 14.5,6.99970136 C13.849,6.99970136 13.299,7.41970136 13.092,7.99970136 L9,7.99970136 L9,5.99970136 L12.5,5.99970136 C12.776,5.99970136 13,5.77670136 13,5.49970136 L13,3.90770136 C13.581,3.70070136 14,3.15070136 14,2.49970136 C14,1.67270136 13.327,0.999701362 12.5,0.999701362 C11.673,0.999701362 11,1.67270136 11,2.49970136 C11,3.15070136 11.419,3.70070136 12,3.90770136 L12,4.99970136 L9,4.99970136 L9,1.49970136 C9,1.28470136 8.862,1.09370136 8.658,1.02570136 L5.658,0.0257013622 C5.511,-0.0232986378 5.351,-0.00129863776 5.223,0.0837013622 L2.223,2.08370136 C2.084,2.17670136 2,2.33270136 2,2.49970136 L2,5.19070136 L0.276,6.05270136 C0.107,6.13770136 0,6.31070136 0,6.49970136 L0,9.49970136 C0,9.68970136 0.107,9.86270136 0.276,9.94770136 L2,10.8087014 L2,13.4997014 C2,13.6667014 2.084,13.8237014 2.223,13.9157014 L5.223,15.9157014 C5.306,15.9717014 5.402,15.9997014 5.5,15.9997014 C5.553,15.9997014 5.606,15.9917014 5.658,15.9737014 L8.658,14.9737014 C8.862,14.9067014 9,14.7157014 9,14.4997014 L9,11.9997014 L11.293,11.9997014 L12.146,12.8537014 L12.159,12.8407014 C12.061,13.0407014 12,13.2627014 12,13.4997014 C12,14.3267014 12.673,14.9997014 13.5,14.9997014 C14.327,14.9997014 15,14.3267014 15,13.4997014 C15,12.6727014 14.327,11.9997014 13.5,11.9997014 C13.262,11.9997014 13.04,12.0607014 12.841,12.1597014 L12.854,12.1467014 L11.854,11.1467014 C11.76,11.0527014 11.633,10.9997014 11.5,10.9997014 L9,10.9997014 L9,8.99970136 L13.092,8.99970136 Z" id="Fill-7"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.8 KiB |
@@ -0,0 +1,99 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { CohereEmbeddings } from 'langchain/embeddings/cohere';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class EmbeddingsCohere implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Embeddings Cohere',
|
||||
name: 'embeddingsCohere',
|
||||
icon: 'file:cohere.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Cohere Embeddings',
|
||||
defaults: {
|
||||
name: 'Embeddings Cohere',
|
||||
},
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '={{ $credentials.host }}',
|
||||
},
|
||||
credentials: [
|
||||
{
|
||||
name: 'cohereApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Embeddings'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingscohere/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiEmbedding],
|
||||
outputNames: ['Embeddings'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName:
|
||||
'Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'modelName',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the embeddings. <a href="https://docs.cohere.com/docs/models">Learn more</a>.',
|
||||
default: 'embed-english-v2.0',
|
||||
options: [
|
||||
{
|
||||
name: 'Embed-English-v2.0(4096 Dimensions)',
|
||||
value: 'embed-english-v2.0',
|
||||
},
|
||||
{
|
||||
name: 'Embed-English-Light-v2.0(1024 Dimensions)',
|
||||
value: 'embed-english-light-v2.0',
|
||||
},
|
||||
{
|
||||
name: 'Embed-Multilingual-v2.0(768 Dimensions)',
|
||||
value: 'embed-multilingual-v2.0',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply data for embeddings Cohere');
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex, 'embed-english-v2.0') as string;
|
||||
const credentials = await this.getCredentials('cohereApi');
|
||||
const embeddings = new CohereEmbeddings({
|
||||
apiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(embeddings, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
viewBox="0 0 1000 166.84006"
|
||||
height="166.84006"
|
||||
width="1000"
|
||||
version="1.1"
|
||||
id="svg17"
|
||||
sodipodi:docname="f5e234e86f931398d5e05f4cdeb8d6dfcd182c7a-102x18.svg"
|
||||
style="fill:none"
|
||||
inkscape:version="0.92.4 (5da689c313, 2019-01-14)">
|
||||
<metadata
|
||||
id="metadata21">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<sodipodi:namedview
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#111111"
|
||||
borderopacity="1"
|
||||
objecttolerance="10"
|
||||
gridtolerance="10"
|
||||
guidetolerance="10"
|
||||
inkscape:pageopacity="0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1017"
|
||||
id="namedview19"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="0.57365771"
|
||||
inkscape:cx="572.84853"
|
||||
inkscape:cy="17.606572"
|
||||
inkscape:window-x="1912"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg17" />
|
||||
<path
|
||||
d="m 281.72414,166.80668 c 24.78262,0 46.47281,-12.39131 55.04741,-37.41044 1.66447,-5.01011 -0.7233,-8.33906 -5.48119,-8.33906 h -9.2979 c -4.28779,0 -7.14566,1.90001 -9.06236,5.95128 -7.3812,14.54453 -17.63695,19.77349 -30.49934,19.77349 -22.88358,0 -36.93937,-15.97346 -36.93937,-42.65609 0,-26.683027 14.54355,-42.655802 36.45161,-42.655802 13.34917,0 24.31252,5.716529 31.22264,19.301823 2.15224,4.052054 4.75789,5.95197 9.06236,5.95197 h 9.2979 c 4.75789,0 7.14566,-3.093705 5.48119,-7.633325 C 327.00257,51.920035 304.59006,41.44523 281.72414,41.44523 c -34.55159,0 -60.29304,25.506014 -60.29304,62.68063 0,37.1749 24.54805,62.68082 60.29304,62.68082 z M 905.35262,93.163386 c 3.09341,-20.260172 16.91464,-32.887118 35.74499,-32.887118 18.83134,0 32.88712,12.862288 34.55159,32.887118 z m 36.70383,73.643294 c 21.92475,0 43.85047,-10.24005 54.57731,-33.35819 2.62037,-5.48119 0.23554,-9.2979 -4.52431,-9.2979 h -8.81308 c -4.28387,0 -6.90718,1.90001 -9.05942,5.71672 -7.14566,12.62685 -19.53697,17.8725 -32.16382,17.8725 -21.6892,0 -35.74499,-14.77909 -37.41044,-38.83938 h 87.46639 c 4.75984,0 7.87092,-2.62234 7.87092,-7.63245 -0.96178,-37.410441 -25.2586,-59.80597 -58.86804,-59.80597 -33.61042,0 -60.29305,24.312325 -60.29305,62.68053 0,38.36831 25.97602,62.68083 61.25091,62.68083 z M 803.36232,107.23595 h 7.86798 c 4.75887,0 7.3812,-2.62233 8.1045,-7.633421 4.54001,-32.197284 23.38803,-36.467996 43.42945,-35.509549 4.2868,0.20168 7.80124,-3.093706 7.80124,-7.397983 V 49.3139 c 0,-4.75818 -2.38778,-7.63333 -7.14566,-7.86867 -17.73804,-0.67256 -33.55939,5.41386 -42.70615,22.64775 -0.50444,0.941468 -1.91669,0.689343 -2.03446,-0.369895 l -1.47997,-12.99684 c -0.4701,-4.758175 -3.09341,-7.145755 -7.86798,-7.145755 h -35.98151 c -4.20339,0 -7.63343,3.41315 -7.63343,7.633321 v 4.052054 c 0,4.203388 3.41335,7.633325 7.63343,7.633325 h 14.77908 c 4.20339,0 7.63342,3.413155 7.63342,7.633325 v 29.070014 c 0,4.203391 3.41335,7.633421 7.63343,7.633421 z m -32.1648,57.19964 h 74.58437 c 4.75887,0 7.63342,-2.85787 7.63342,-7.63342 v -4.05225 c 0,-4.75789 -2.85787,-7.63244 -7.63342,-7.63244 H 826.716 c -4.75789,0 -7.63342,-2.85886 -7.63342,-7.63343 v -13.11461 c 0,-4.75886 -2.85788,-7.63342 -7.63343,-7.63342 h -8.10352 c -4.75886,0 -7.63342,2.85787 -7.63342,7.63342 v 13.11461 c 0,4.75789 -2.85885,7.63343 -7.63342,7.63343 h -16.91464 c -4.75788,0 -7.63342,2.85787 -7.63342,7.63244 v 4.05225 c 0,4.75886 2.85885,7.63342 7.63342,7.63342 z M 656.09261,93.180266 c 3.09439,-20.26027 16.91463,-32.887118 35.74597,-32.887118 18.83035,0 32.88712,12.862289 34.55159,32.887118 z m 36.70383,73.643104 c 21.92474,0 43.84949,-10.23908 54.57632,-33.3582 2.62331,-5.48119 0.23554,-9.2979 -4.52235,-9.2979 h -8.81013 c -4.28779,0 -6.91013,1.90002 -9.06236,5.71673 -7.14566,12.62685 -19.53795,17.87249 -32.1648,17.87249 -21.68921,0 -35.74499,-14.77908 -37.40947,-38.8384 h 87.46345 c 4.75788,0 7.86896,-2.62331 7.86896,-7.63342 -0.95884,-37.410251 -25.2537,-59.80588 -58.86411,-59.80588 -33.61042,0 -60.29305,24.312327 -60.29305,62.68044 0,38.3683 25.97699,62.68082 61.25188,62.68082 z m -275.70711,0 c 35.74597,0 61.25189,-26.44808 61.25189,-62.68083 0,-36.232947 -25.50592,-62.68053 -61.25189,-62.68053 -35.74499,0 -61.25091,26.935148 -61.25091,62.68053 0,8.34004 1.42894,17.63696 5.71574,27.87701 2.15224,5.01011 6.20449,5.71672 10.49228,2.62233 l 6.91012,-5.01011 c 3.58118,-2.62331 4.52235,-5.71672 3.32895,-10.23907 -1.90001,-5.95226 -2.38777,-11.19791 -2.38777,-15.72124 0,-25.018055 15.01462,-42.18462 37.1749,-42.18462 22.16029,0 37.17491,16.914342 37.17491,42.6557 0,25.74146 -14.77908,42.65609 -36.70383,42.65609 -7.63342,0 -14.77908,-1.42893 -23.35466,-7.86896 -3.58117,-2.85787 -6.91012,-3.32895 -10.72683,-0.47108 l -5.24565,3.81672 c -4.28779,3.09439 -4.75789,7.3812 -0.7233,10.72683 12.39229,10.00451 26.68361,13.82123 39.56268,13.82123 z m 91.49901,-2.38778 h 7.86896 c 4.20339,0 7.63342,-3.41335 7.63342,-7.63342 V 99.854752 c 0,-24.059611 12.86141,-38.367912 32.88712,-38.367912 18.10803,0 28.59933,11.920721 28.59933,33.845565 v 61.486445 c 0,4.20339 3.41335,7.63342 7.63342,7.63342 h 8.10352 c 4.20339,0 7.63342,-3.41335 7.63342,-7.63342 V 91.515693 c 0,-32.164111 -16.44356,-50.036903 -44.31958,-50.036903 -18.98248,0 -30.19707,7.76788 -38.57048,18.578918 -0.6389,0.823895 -1.93338,0.369894 -1.93338,-0.655682 V 7.63333 C 524.07309,3.41314 520.66069,0 516.4573,0 h -7.86896 c -4.20339,0 -7.63343,3.41314 -7.63343,7.63333 v 149.16884 c 0,4.20338 3.41336,7.63342 7.63343,7.63342 z"
|
||||
id="path2"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:#39594d;stroke-width:9.81412029" />
|
||||
<g
|
||||
clip-path="url(#clip0_2207_90691)"
|
||||
id="g10"
|
||||
transform="matrix(9.8141206,0,0,9.8141206,0,-4.90706)">
|
||||
<path
|
||||
d="m 5.50773,10.6219 c 0.45757,0 1.36777,-0.0251 2.62589,-0.5431 C 9.59973,9.47518 12.5166,8.37942 14.6208,7.2539 16.0924,6.46668 16.7375,5.42553 16.7375,4.02344 16.7375,2.07751 15.16,0.5 13.2141,0.5 H 5.06095 C 2.26586,0.5 0,2.76586 0,5.56095 0,8.35604 2.12151,10.6219 5.50773,10.6219 Z"
|
||||
id="path4"
|
||||
inkscape:connector-curvature="0"
|
||||
style="clip-rule:evenodd;fill:#39594d;fill-rule:evenodd" />
|
||||
<path
|
||||
d="m 6.88672,14.107 c 0,-1.3701 0.82483,-2.6054 2.09027,-3.1306 L 11.5446,9.9108 c 2.5971,-1.07786 5.4557,0.8307 5.4557,3.6427 0,2.1785 -1.7664,3.9444 -3.945,3.9438 l -2.7799,-7e-4 C 8.40372,17.4961 6.88672,15.9787 6.88672,14.107 Z"
|
||||
id="path6"
|
||||
inkscape:connector-curvature="0"
|
||||
style="clip-rule:evenodd;fill:#d18ee2;fill-rule:evenodd" />
|
||||
<path
|
||||
d="m 2.91749,11.2891 h -5e-5 C 1.30618,11.2891 0,12.5952 0,14.2065 v 0.3779 c 0,1.6112 1.30618,2.9174 2.91744,2.9174 h 5e-5 c 1.61126,0 2.91744,-1.3062 2.91744,-2.9174 v -0.3779 c 0,-1.6113 -1.30618,-2.9174 -2.91744,-2.9174 z"
|
||||
id="path8"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:#ff7759" />
|
||||
</g>
|
||||
<defs
|
||||
id="defs15">
|
||||
<clipPath
|
||||
id="clip0_2207_90691">
|
||||
<rect
|
||||
transform="translate(0,0.5)"
|
||||
height="17"
|
||||
width="17"
|
||||
id="rect12"
|
||||
x="0"
|
||||
y="0"
|
||||
style="fill:#ffffff" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 7.6 KiB |
@@ -0,0 +1,135 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GooglePaLMEmbeddings } from 'langchain/embeddings/googlepalm';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class EmbeddingsGooglePalm implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Embeddings Google PaLM',
|
||||
name: 'embeddingsGooglePalm',
|
||||
icon: 'file:google.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Google PaLM Embeddings',
|
||||
defaults: {
|
||||
name: 'Embeddings Google PaLM',
|
||||
},
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '={{ $credentials.host }}',
|
||||
},
|
||||
credentials: [
|
||||
{
|
||||
name: 'googlePalmApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Embeddings'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsgooglepalm/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiEmbedding],
|
||||
outputNames: ['Embeddings'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName:
|
||||
'Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'modelName',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the embeddings. <a href="https://developers.generativeai.google/api/rest/generativelanguage/models/list">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/v1beta3/models',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'models',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
pass: "={{ $responseItem.name.startsWith('models/embedding') }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.name}}',
|
||||
value: '={{$responseItem.name}}',
|
||||
description: '={{$responseItem.description}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: 'models/embedding-gecko-001',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply data for embeddings Google PaLM');
|
||||
const modelName = this.getNodeParameter(
|
||||
'modelName',
|
||||
itemIndex,
|
||||
'models/embedding-gecko-001',
|
||||
) as string;
|
||||
const credentials = await this.getCredentials('googlePalmApi');
|
||||
const embeddings = new GooglePaLMEmbeddings({
|
||||
apiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(embeddings, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 48 48"><defs><path id="a" d="M44.5 20H24v8.5h11.8C34.7 33.9 30.1 37 24 37c-7.2 0-13-5.8-13-13s5.8-13 13-13c3.1 0 5.9 1.1 8.1 2.9l6.4-6.4C34.6 4.1 29.6 2 24 2 11.8 2 2 11.8 2 24s9.8 22 22 22c11 0 21-8 21-22 0-1.3-.2-2.7-.5-4z"/></defs><clipPath id="b"><use xlink:href="#a" overflow="visible"/></clipPath><path clip-path="url(#b)" fill="#FBBC05" d="M0 37V11l17 13z"/><path clip-path="url(#b)" fill="#EA4335" d="M0 11l17 13 7-6.1L48 14V0H0z"/><path clip-path="url(#b)" fill="#34A853" d="M0 37l30-23 7.9 1L48 0v48H0z"/><path clip-path="url(#b)" fill="#4285F4" d="M48 48L17 24l-4-3 35-10z"/></svg>
|
||||
|
After Width: | Height: | Size: 688 B |
@@ -0,0 +1,104 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { HuggingFaceInferenceEmbeddings } from 'langchain/embeddings/hf';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class EmbeddingsHuggingFaceInference implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Embeddings Hugging Face Inference',
|
||||
name: 'embeddingsHuggingFaceInference',
|
||||
icon: 'file:huggingface.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use HuggingFace Inference Embeddings',
|
||||
defaults: {
|
||||
name: 'Embeddings HuggingFace Inference',
|
||||
},
|
||||
credentials: [
|
||||
{
|
||||
name: 'huggingFaceApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Embeddings'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingshuggingfaceinference/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiEmbedding],
|
||||
outputNames: ['Embeddings'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName:
|
||||
'Each model is using different dimensional density for embeddings. Please make sure to use the same dimensionality for your vector store. The default model is using 768-dimensional embeddings.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Model Name',
|
||||
name: 'modelName',
|
||||
type: 'string',
|
||||
default: 'sentence-transformers/distilbert-base-nli-mean-tokens',
|
||||
description: 'The model name to use from HuggingFace library',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Custom Inference Endpoint',
|
||||
name: 'endpointUrl',
|
||||
default: '',
|
||||
description: 'Custom endpoint URL',
|
||||
type: 'string',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply data for embeddings HF Inference');
|
||||
const model = this.getNodeParameter(
|
||||
'modelName',
|
||||
itemIndex,
|
||||
'sentence-transformers/distilbert-base-nli-mean-tokens',
|
||||
) as string;
|
||||
const credentials = await this.getCredentials('huggingFaceApi');
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const embeddings = new HuggingFaceInferenceEmbeddings({
|
||||
apiKey: credentials.apiKey as string,
|
||||
model,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(embeddings, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 34 KiB |
@@ -0,0 +1,128 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { ClientOptions } from 'openai';
|
||||
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class EmbeddingsOpenAi implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Embeddings OpenAI',
|
||||
name: 'embeddingsOpenAi',
|
||||
icon: 'file:openAi.svg',
|
||||
credentials: [
|
||||
{
|
||||
name: 'openAiApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Embeddings OpenAI',
|
||||
defaults: {
|
||||
name: 'Embeddings OpenAI',
|
||||
},
|
||||
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Embeddings'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingsopenai/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiEmbedding],
|
||||
outputNames: ['Embeddings'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Base URL',
|
||||
name: 'baseURL',
|
||||
default: 'https://api.openai.com/v1',
|
||||
description: 'Override the default base URL for the API',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
displayName: 'Batch Size',
|
||||
name: 'batchSize',
|
||||
default: 512,
|
||||
typeOptions: { maxValue: 2048 },
|
||||
description: 'Maximum number of documents to send in each request',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Strip New Lines',
|
||||
name: 'stripNewLines',
|
||||
default: true,
|
||||
description: 'Whether to strip new lines from the input text',
|
||||
type: 'boolean',
|
||||
},
|
||||
{
|
||||
displayName: 'Timeout',
|
||||
name: 'timeout',
|
||||
default: -1,
|
||||
description:
|
||||
'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply data for embeddings');
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
baseURL?: string;
|
||||
batchSize?: number;
|
||||
stripNewLines?: boolean;
|
||||
timeout?: number;
|
||||
};
|
||||
|
||||
if (options.timeout === -1) {
|
||||
options.timeout = undefined;
|
||||
}
|
||||
|
||||
const configuration: ClientOptions = {};
|
||||
if (options.baseURL) {
|
||||
configuration.baseURL = options.baseURL;
|
||||
}
|
||||
|
||||
const embeddings = new OpenAIEmbeddings(
|
||||
{
|
||||
openAIApiKey: credentials.apiKey as string,
|
||||
...options,
|
||||
},
|
||||
configuration,
|
||||
);
|
||||
|
||||
return {
|
||||
response: logWrapper(embeddings, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="256px" height="260px" viewBox="0 0 256 260" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
|
||||
<title>OpenAI</title>
|
||||
<g>
|
||||
<path d="M239.183914,106.202783 C245.054304,88.5242096 243.02228,69.1733805 233.607599,53.0998864 C219.451678,28.4588021 190.999703,15.7836129 163.213007,21.739505 C147.554077,4.32145883 123.794909,-3.42398554 100.87901,1.41873898 C77.9631105,6.26146349 59.3690093,22.9572536 52.0959621,45.2214219 C33.8436494,48.9644867 18.0901721,60.392749 8.86672513,76.5818033 C-5.443491,101.182962 -2.19544431,132.215255 16.8986662,153.320094 C11.0060865,170.990656 13.0197283,190.343991 22.4238231,206.422991 C36.5975553,231.072344 65.0680342,243.746566 92.8695738,237.783372 C105.235639,251.708249 123.001113,259.630942 141.623968,259.52692 C170.105359,259.552169 195.337611,241.165718 204.037777,214.045661 C222.28734,210.296356 238.038489,198.869783 247.267014,182.68528 C261.404453,158.127515 258.142494,127.262775 239.183914,106.202783 L239.183914,106.202783 Z M141.623968,242.541207 C130.255682,242.559177 119.243876,238.574642 110.519381,231.286197 L112.054146,230.416496 L163.724595,200.590881 C166.340648,199.056444 167.954321,196.256818 167.970781,193.224005 L167.970781,120.373788 L189.815614,133.010026 C190.034132,133.121423 190.186235,133.330564 190.224885,133.572774 L190.224885,193.940229 C190.168603,220.758427 168.442166,242.484864 141.623968,242.541207 Z M37.1575749,197.93062 C31.456498,188.086359 29.4094818,176.546984 31.3766237,165.342426 L32.9113895,166.263285 L84.6329973,196.088901 C87.2389349,197.618207 90.4682717,197.618207 93.0742093,196.088901 L156.255402,159.663793 L156.255402,184.885111 C156.243557,185.149771 156.111725,185.394602 155.89729,185.550176 L103.561776,215.733903 C80.3054953,229.131632 50.5924954,221.165435 37.1575749,197.93062 Z M23.5493181,85.3811273 C29.2899861,75.4733097 38.3511911,67.9162648 49.1287482,64.0478825 L49.1287482,125.438515 C49.0891492,128.459425 50.6965386,131.262556 53.3237748,132.754232 L116.198014,169.025864 L94.3531808,181.662102 C94.1132325,181.789434 93.8257461,181.789434 93.5857979,181.662102 L41.3526015,151.529534 C18.1419426,138.076098 10.1817681,108.385562 23.5493181,85.125333 L23.5493181,85.3811273 Z M203.0146,127.075598 L139.935725,90.4458545 L161.7294,77.8607748 C161.969348,77.7334434 162.256834,77.7334434 162.496783,77.8607748 L214.729979,108.044502 C231.032329,117.451747 240.437294,135.426109 238.871504,154.182739 C237.305714,172.939368 225.050719,189.105572 207.414262,195.67963 L207.414262,134.288998 C207.322521,131.276867 205.650697,128.535853 203.0146,127.075598 Z M224.757116,94.3850867 L223.22235,93.4642272 L171.60306,63.3828173 C168.981293,61.8443751 165.732456,61.8443751 163.110689,63.3828173 L99.9806554,99.8079259 L99.9806554,74.5866077 C99.9533004,74.3254088 100.071095,74.0701869 100.287609,73.9215426 L152.520805,43.7889738 C168.863098,34.3743518 189.174256,35.2529043 204.642579,46.0434841 C220.110903,56.8340638 227.949269,75.5923959 224.757116,94.1804513 L224.757116,94.3850867 Z M88.0606409,139.097931 L66.2158076,126.512851 C65.9950399,126.379091 65.8450965,126.154176 65.8065367,125.898945 L65.8065367,65.684966 C65.8314495,46.8285367 76.7500605,29.6846032 93.8270852,21.6883055 C110.90411,13.6920079 131.063833,16.2835462 145.5632,28.338998 L144.028434,29.2086986 L92.3579852,59.0343142 C89.7419327,60.5687513 88.1282597,63.3683767 88.1117998,66.4011901 L88.0606409,139.097931 Z M99.9294965,113.5185 L128.06687,97.3011417 L156.255402,113.5185 L156.255402,145.953218 L128.169187,162.170577 L99.9806554,145.953218 L99.9294965,113.5185 Z" fill="#7D7D87"></path>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.7 KiB |
@@ -0,0 +1,64 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import '@tensorflow/tfjs-backend-cpu';
|
||||
import { TensorFlowEmbeddings } from 'langchain/embeddings/tensorflow';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class EmbeddingsTensorFlow implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Embeddings TensorFlow',
|
||||
name: 'embeddingsTensorFlow',
|
||||
icon: 'file:tensorflow.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Embeddings TensorFlow',
|
||||
defaults: {
|
||||
name: 'Embeddings TensorFlow',
|
||||
},
|
||||
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Embeddings'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.embeddingstensorflow/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiEmbedding],
|
||||
outputNames: ['Embeddings'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]),
|
||||
{
|
||||
displayName:
|
||||
'The TensorFlow model we use for generating embeddings is using 512-dimensional embeddings. Please make sure to use the same dimensionality for your vector store. Be aware that running this model with high-dimensional embeddings may result in high CPU usage on the machine.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply data for embeddings tensorflow');
|
||||
const embeddings = new TensorFlowEmbeddings({ maxConcurrency: Infinity });
|
||||
|
||||
return {
|
||||
response: logWrapper(embeddings, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
id="svg8"
|
||||
version="1.1"
|
||||
viewBox="0 0 30.31081 32.499828"
|
||||
height="122.83399"
|
||||
width="114.56054">
|
||||
<defs
|
||||
id="defs2" />
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
transform="translate(-77.942529,-177.00005)"
|
||||
id="layer1">
|
||||
<g
|
||||
id="g4550">
|
||||
<path
|
||||
style="fill:#e55b2d;fill-opacity:1"
|
||||
d="m 360.04883,687.87305 v 18.89843 l 32.73047,18.89844 v -18.89844 z m -65.46289,18.89843 v 18.89844 l 16.36523,9.44727 V 716.2207 Z m 49.0957,9.44922 -16.36523,9.44922 v 56.69141 l 16.36523,9.44922 v -37.79493 l 16.36719,9.44922 v -18.89843 l -16.36719,-9.44922 z"
|
||||
transform="scale(0.26458333)"
|
||||
id="path4508" />
|
||||
<path
|
||||
style="fill:#ed8e24;fill-opacity:1"
|
||||
d="m 360.04883,687.87305 -49.09766,28.34765 v 18.89649 l 32.73047,-18.89649 v 18.89649 l 16.36719,-9.44727 z m 49.09765,9.44922 -16.36718,9.44921 v 18.89844 l 16.36718,-9.44922 z m -32.73242,37.79492 -16.36523,9.44922 v 18.89843 l 16.36523,-9.44922 z m -16.36523,28.34765 -16.36719,-9.44922 v 37.79493 l 16.36719,-9.44922 z"
|
||||
transform="scale(0.26458333)"
|
||||
id="path4491" />
|
||||
<path
|
||||
style="fill:#f8bf3c;fill-opacity:1"
|
||||
d="m 360.04883,668.97656 -65.46289,37.79492 16.36523,9.44922 49.09766,-28.34765 32.73047,18.89843 16.36718,-9.44921 z m 0,56.69336 -16.36719,9.44727 16.36719,9.44922 16.36523,-9.44922 z"
|
||||
transform="scale(0.26458333)"
|
||||
id="path4506" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.0 KiB |
@@ -0,0 +1,141 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { ChatAnthropic } from 'langchain/chat_models/anthropic';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmChatAnthropic implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Anthropic Chat Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmChatAnthropic',
|
||||
icon: 'file:anthropic.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model Anthropic',
|
||||
defaults: {
|
||||
name: 'Anthropic Chat Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatanthropic/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'anthropicApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Claude 2',
|
||||
value: 'claude-2',
|
||||
},
|
||||
{
|
||||
name: 'Claude 2.1',
|
||||
value: 'claude-2.1',
|
||||
},
|
||||
{
|
||||
name: 'Claude Instant 1.2',
|
||||
value: 'claude-instant-1.2',
|
||||
},
|
||||
{
|
||||
name: 'Claude Instant 1',
|
||||
value: 'claude-instant-1',
|
||||
},
|
||||
],
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/reference/selecting-a-model">Learn more</a>.',
|
||||
default: 'claude-2',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokensToSample',
|
||||
default: 32768,
|
||||
description: 'The maximum number of tokens to generate in the completion',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
name: 'topK',
|
||||
default: -1,
|
||||
typeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },
|
||||
description:
|
||||
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('anthropicApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new ChatAnthropic({
|
||||
anthropicApiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 7.3 KiB |
@@ -0,0 +1 @@
|
||||
<svg width="46" height="32" viewBox="0 0 46 32" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M32.73 0h-6.945L38.45 32h6.945L32.73 0ZM12.665 0 0 32h7.082l2.59-6.72h13.25l2.59 6.72h7.082L19.929 0h-7.264Zm-.702 19.337 4.334-11.246 4.334 11.246h-8.668Z" fill="#7D7D87"></path></svg>
|
||||
|
After Width: | Height: | Size: 290 B |
@@ -0,0 +1,161 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { ChatOllama } from 'langchain/chat_models/ollama';
|
||||
// import { ChatAnthropic } from 'langchain/chat_models/anthropic';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmChatOllama implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Ollama Chat Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmChatOllama',
|
||||
icon: 'file:ollama.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model Ollama',
|
||||
defaults: {
|
||||
name: 'Ollama Chat Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatollama/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'ollamaApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '={{ $credentials.baseUrl.replace(new RegExp("/$"), "") }}',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
default: 'llama2',
|
||||
description:
|
||||
'The model which will generate the completion. To download models, visit <a href="https://ollama.ai/library">Ollama Models Library</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/api/tags',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'models',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.name}}',
|
||||
value: '={{$responseItem.name}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
name: 'topK',
|
||||
default: -1,
|
||||
typeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },
|
||||
description:
|
||||
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new ChatOllama({
|
||||
baseUrl: credentials.baseUrl as string,
|
||||
model: modelName,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="181.000000pt" height="256.000000pt" viewBox="0 0 181.000000 256.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
|
||||
<g transform="translate(0.000000,256.000000) scale(0.100000,-0.100000)"
|
||||
fill="#7D7D87" stroke="none">
|
||||
<path d="M377 2365 c-52 -18 -83 -49 -117 -116 -45 -89 -62 -192 -58 -355 l3
|
||||
-142 -58 -61 c-148 -155 -185 -387 -92 -574 l34 -69 -20 -44 c-34 -82 -50
|
||||
-164 -50 -263 0 -108 18 -190 58 -262 l26 -48 -21 -49 c-12 -27 -26 -71 -32
|
||||
-98 -14 -62 -15 -221 -1 -257 10 -26 14 -27 76 -27 73 0 70 -4 53 86 -15 82 2
|
||||
188 42 266 37 70 38 104 5 148 -47 64 -68 136 -69 240 -1 103 14 160 66 261
|
||||
31 61 29 87 -10 122 -11 10 -31 42 -43 70 -19 42 -24 69 -23 142 0 114 25 183
|
||||
95 260 70 76 142 110 239 112 41 0 78 2 82 2 4 1 17 22 29 47 30 59 96 119
|
||||
167 152 49 23 70 27 147 27 79 0 97 -4 149 -29 68 -33 133 -94 159 -148 10
|
||||
-20 23 -41 30 -45 6 -4 46 -8 87 -8 67 -1 83 -5 140 -36 123 -68 193 -187 193
|
||||
-334 1 -67 -4 -90 -27 -142 -16 -35 -35 -68 -43 -75 -34 -28 -35 -58 -5 -117
|
||||
52 -101 67 -158 66 -261 -1 -104 -22 -176 -69 -240 -33 -44 -32 -78 5 -148 40
|
||||
-78 57 -184 42 -266 -17 -90 -20 -86 53 -86 62 0 66 1 76 27 14 36 13 195 -1
|
||||
257 -6 27 -20 71 -32 98 l-21 49 26 48 c76 139 79 359 6 528 l-20 47 25 46
|
||||
c99 183 64 439 -81 591 l-58 61 3 142 c4 164 -13 266 -58 357 -64 126 -172
|
||||
159 -263 79 -54 -47 -92 -138 -123 -298 -3 -14 -10 -22 -17 -18 -182 80 -297
|
||||
85 -443 21 l-54 -24 -4 22 c-36 185 -85 285 -156 322 -43 21 -74 24 -113 10z
|
||||
m77 -168 c42 -71 81 -301 57 -336 -5 -8 -31 -16 -58 -18 -26 -2 -62 -8 -80
|
||||
-13 l-31 -8 -7 49 c-8 59 2 172 22 248 14 57 48 121 63 121 5 0 20 -19 34 -43z
|
||||
m965 10 c40 -65 69 -239 56 -336 l-7 -49 -31 8 c-18 5 -54 11 -80 13 -27 2
|
||||
-53 10 -58 18 -12 17 -3 141 17 229 15 64 57 150 74 150 4 0 18 -15 29 -33z"/>
|
||||
<path d="M778 1361 c-73 -24 -116 -51 -165 -104 -55 -60 -76 -120 -71 -201 5
|
||||
-76 35 -129 106 -183 62 -47 127 -63 257 -63 172 0 258 36 329 138 42 59 48
|
||||
155 16 230 -29 68 -111 143 -188 173 -80 31 -207 36 -284 10z m257 -100 c161
|
||||
-71 194 -232 66 -318 -49 -33 -94 -43 -196 -43 -102 0 -147 10 -196 43 -178
|
||||
120 -32 356 211 343 39 -2 86 -12 115 -25z"/>
|
||||
<path d="M838 1159 c-25 -14 -22 -44 7 -67 20 -16 24 -26 19 -49 -7 -36 15
|
||||
-58 51 -49 21 5 25 12 25 46 0 29 5 42 20 50 27 15 27 66 0 75 -10 3 -28 1
|
||||
-40 -5 -14 -7 -26 -8 -39 0 -23 12 -22 12 -43 -1z"/>
|
||||
<path d="M397 1348 c-9 -7 -23 -30 -32 -50 -21 -53 -1 -103 47 -116 43 -11 60
|
||||
-6 92 27 40 41 43 81 11 119 -21 25 -34 32 -64 32 -20 0 -45 -6 -54 -12z"/>
|
||||
<path d="M1295 1328 c-32 -38 -29 -78 11 -119 32 -33 49 -38 92 -27 49 13 68
|
||||
62 46 118 -19 47 -38 60 -87 60 -27 0 -41 -7 -62 -32z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.7 KiB |
@@ -0,0 +1,262 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { ClientOptions } from 'openai';
|
||||
import { ChatOpenAI } from 'langchain/chat_models/openai';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmChatOpenAi implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'OpenAI Chat Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmChatOpenAi',
|
||||
icon: 'file:openAi.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'For advanced usage with an AI chain',
|
||||
defaults: {
|
||||
name: 'OpenAI Chat Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatopenai/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'openAiApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL:
|
||||
'={{ $parameter.options?.baseURL?.split("/").slice(0,-1).join("/") || "https://api.openai.com" }}',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName:
|
||||
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/options.responseFormat': ['json_object'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://beta.openai.com/docs/models/overview">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '={{ $parameter.options?.baseURL?.split("/").slice(-1).pop() || "v1" }}/models',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'data',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
pass: "={{ $responseItem.id.startsWith('gpt-') && !$responseItem.id.includes('instruct') }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.id}}',
|
||||
value: '={{$responseItem.id}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: 'gpt-3.5-turbo',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Base URL',
|
||||
name: 'baseURL',
|
||||
default: 'https://api.openai.com/v1',
|
||||
description: 'Override the default base URL for the API',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
displayName: 'Frequency Penalty',
|
||||
name: 'frequencyPenalty',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
|
||||
description:
|
||||
"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim",
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokens',
|
||||
default: -1,
|
||||
description:
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
|
||||
type: 'number',
|
||||
typeOptions: {
|
||||
maxValue: 32768,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Response Format',
|
||||
name: 'responseFormat',
|
||||
default: 'text',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Text',
|
||||
value: 'text',
|
||||
description: 'Regular text response',
|
||||
},
|
||||
{
|
||||
name: 'JSON',
|
||||
value: 'json_object',
|
||||
description:
|
||||
'Enables JSON mode, which should guarantee the message the model generates is valid JSON',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Presence Penalty',
|
||||
name: 'presencePenalty',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
|
||||
description:
|
||||
"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics",
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Timeout',
|
||||
name: 'timeout',
|
||||
default: 60000,
|
||||
description: 'Maximum amount of time a request is allowed to take in milliseconds',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Retries',
|
||||
name: 'maxRetries',
|
||||
default: 2,
|
||||
description: 'Maximum number of retries to attempt',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
baseURL?: string;
|
||||
frequencyPenalty?: number;
|
||||
maxTokens?: number;
|
||||
maxRetries: number;
|
||||
timeout: number;
|
||||
presencePenalty?: number;
|
||||
temperature?: number;
|
||||
topP?: number;
|
||||
responseFormat?: 'text' | 'json_object';
|
||||
};
|
||||
|
||||
const configuration: ClientOptions = {};
|
||||
if (options.baseURL) {
|
||||
configuration.baseURL = options.baseURL;
|
||||
}
|
||||
|
||||
const model = new ChatOpenAI({
|
||||
openAIApiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
...options,
|
||||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
configuration,
|
||||
modelKwargs: options.responseFormat
|
||||
? {
|
||||
response_format: { type: options.responseFormat },
|
||||
}
|
||||
: undefined,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="256px" height="260px" viewBox="0 0 256 260" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
|
||||
<title>OpenAI</title>
|
||||
<g>
|
||||
<path d="M239.183914,106.202783 C245.054304,88.5242096 243.02228,69.1733805 233.607599,53.0998864 C219.451678,28.4588021 190.999703,15.7836129 163.213007,21.739505 C147.554077,4.32145883 123.794909,-3.42398554 100.87901,1.41873898 C77.9631105,6.26146349 59.3690093,22.9572536 52.0959621,45.2214219 C33.8436494,48.9644867 18.0901721,60.392749 8.86672513,76.5818033 C-5.443491,101.182962 -2.19544431,132.215255 16.8986662,153.320094 C11.0060865,170.990656 13.0197283,190.343991 22.4238231,206.422991 C36.5975553,231.072344 65.0680342,243.746566 92.8695738,237.783372 C105.235639,251.708249 123.001113,259.630942 141.623968,259.52692 C170.105359,259.552169 195.337611,241.165718 204.037777,214.045661 C222.28734,210.296356 238.038489,198.869783 247.267014,182.68528 C261.404453,158.127515 258.142494,127.262775 239.183914,106.202783 L239.183914,106.202783 Z M141.623968,242.541207 C130.255682,242.559177 119.243876,238.574642 110.519381,231.286197 L112.054146,230.416496 L163.724595,200.590881 C166.340648,199.056444 167.954321,196.256818 167.970781,193.224005 L167.970781,120.373788 L189.815614,133.010026 C190.034132,133.121423 190.186235,133.330564 190.224885,133.572774 L190.224885,193.940229 C190.168603,220.758427 168.442166,242.484864 141.623968,242.541207 Z M37.1575749,197.93062 C31.456498,188.086359 29.4094818,176.546984 31.3766237,165.342426 L32.9113895,166.263285 L84.6329973,196.088901 C87.2389349,197.618207 90.4682717,197.618207 93.0742093,196.088901 L156.255402,159.663793 L156.255402,184.885111 C156.243557,185.149771 156.111725,185.394602 155.89729,185.550176 L103.561776,215.733903 C80.3054953,229.131632 50.5924954,221.165435 37.1575749,197.93062 Z M23.5493181,85.3811273 C29.2899861,75.4733097 38.3511911,67.9162648 49.1287482,64.0478825 L49.1287482,125.438515 C49.0891492,128.459425 50.6965386,131.262556 53.3237748,132.754232 L116.198014,169.025864 L94.3531808,181.662102 C94.1132325,181.789434 93.8257461,181.789434 93.5857979,181.662102 L41.3526015,151.529534 C18.1419426,138.076098 10.1817681,108.385562 23.5493181,85.125333 L23.5493181,85.3811273 Z M203.0146,127.075598 L139.935725,90.4458545 L161.7294,77.8607748 C161.969348,77.7334434 162.256834,77.7334434 162.496783,77.8607748 L214.729979,108.044502 C231.032329,117.451747 240.437294,135.426109 238.871504,154.182739 C237.305714,172.939368 225.050719,189.105572 207.414262,195.67963 L207.414262,134.288998 C207.322521,131.276867 205.650697,128.535853 203.0146,127.075598 Z M224.757116,94.3850867 L223.22235,93.4642272 L171.60306,63.3828173 C168.981293,61.8443751 165.732456,61.8443751 163.110689,63.3828173 L99.9806554,99.8079259 L99.9806554,74.5866077 C99.9533004,74.3254088 100.071095,74.0701869 100.287609,73.9215426 L152.520805,43.7889738 C168.863098,34.3743518 189.174256,35.2529043 204.642579,46.0434841 C220.110903,56.8340638 227.949269,75.5923959 224.757116,94.1804513 L224.757116,94.3850867 Z M88.0606409,139.097931 L66.2158076,126.512851 C65.9950399,126.379091 65.8450965,126.154176 65.8065367,125.898945 L65.8065367,65.684966 C65.8314495,46.8285367 76.7500605,29.6846032 93.8270852,21.6883055 C110.90411,13.6920079 131.063833,16.2835462 145.5632,28.338998 L144.028434,29.2086986 L92.3579852,59.0343142 C89.7419327,60.5687513 88.1282597,63.3683767 88.1117998,66.4011901 L88.0606409,139.097931 Z M99.9294965,113.5185 L128.06687,97.3011417 L156.255402,113.5185 L156.255402,145.953218 L128.169187,162.170577 L99.9806554,145.953218 L99.9294965,113.5185 Z" fill="#7D7D87"></path>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.7 KiB |
@@ -0,0 +1,106 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { Cohere } from 'langchain/llms/cohere';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmCohere implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Cohere Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmCohere',
|
||||
icon: 'file:cohere.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model Cohere',
|
||||
defaults: {
|
||||
name: 'Cohere Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmcohere/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'cohereApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokens',
|
||||
default: 250,
|
||||
description:
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
|
||||
type: 'number',
|
||||
typeOptions: {
|
||||
maxValue: 32768,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'string',
|
||||
description: 'The name of the model to use',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('cohereApi');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new Cohere({
|
||||
apiKey: credentials.apiKey as string,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
93
packages/@n8n/nodes-langchain/nodes/llms/LMCohere/cohere.svg
Normal file
@@ -0,0 +1,93 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
viewBox="0 0 1000 166.84006"
|
||||
height="166.84006"
|
||||
width="1000"
|
||||
version="1.1"
|
||||
id="svg17"
|
||||
sodipodi:docname="f5e234e86f931398d5e05f4cdeb8d6dfcd182c7a-102x18.svg"
|
||||
style="fill:none"
|
||||
inkscape:version="0.92.4 (5da689c313, 2019-01-14)">
|
||||
<metadata
|
||||
id="metadata21">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<sodipodi:namedview
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#111111"
|
||||
borderopacity="1"
|
||||
objecttolerance="10"
|
||||
gridtolerance="10"
|
||||
guidetolerance="10"
|
||||
inkscape:pageopacity="0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1017"
|
||||
id="namedview19"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="0.57365771"
|
||||
inkscape:cx="572.84853"
|
||||
inkscape:cy="17.606572"
|
||||
inkscape:window-x="1912"
|
||||
inkscape:window-y="-8"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg17" />
|
||||
<path
|
||||
d="m 281.72414,166.80668 c 24.78262,0 46.47281,-12.39131 55.04741,-37.41044 1.66447,-5.01011 -0.7233,-8.33906 -5.48119,-8.33906 h -9.2979 c -4.28779,0 -7.14566,1.90001 -9.06236,5.95128 -7.3812,14.54453 -17.63695,19.77349 -30.49934,19.77349 -22.88358,0 -36.93937,-15.97346 -36.93937,-42.65609 0,-26.683027 14.54355,-42.655802 36.45161,-42.655802 13.34917,0 24.31252,5.716529 31.22264,19.301823 2.15224,4.052054 4.75789,5.95197 9.06236,5.95197 h 9.2979 c 4.75789,0 7.14566,-3.093705 5.48119,-7.633325 C 327.00257,51.920035 304.59006,41.44523 281.72414,41.44523 c -34.55159,0 -60.29304,25.506014 -60.29304,62.68063 0,37.1749 24.54805,62.68082 60.29304,62.68082 z M 905.35262,93.163386 c 3.09341,-20.260172 16.91464,-32.887118 35.74499,-32.887118 18.83134,0 32.88712,12.862288 34.55159,32.887118 z m 36.70383,73.643294 c 21.92475,0 43.85047,-10.24005 54.57731,-33.35819 2.62037,-5.48119 0.23554,-9.2979 -4.52431,-9.2979 h -8.81308 c -4.28387,0 -6.90718,1.90001 -9.05942,5.71672 -7.14566,12.62685 -19.53697,17.8725 -32.16382,17.8725 -21.6892,0 -35.74499,-14.77909 -37.41044,-38.83938 h 87.46639 c 4.75984,0 7.87092,-2.62234 7.87092,-7.63245 -0.96178,-37.410441 -25.2586,-59.80597 -58.86804,-59.80597 -33.61042,0 -60.29305,24.312325 -60.29305,62.68053 0,38.36831 25.97602,62.68083 61.25091,62.68083 z M 803.36232,107.23595 h 7.86798 c 4.75887,0 7.3812,-2.62233 8.1045,-7.633421 4.54001,-32.197284 23.38803,-36.467996 43.42945,-35.509549 4.2868,0.20168 7.80124,-3.093706 7.80124,-7.397983 V 49.3139 c 0,-4.75818 -2.38778,-7.63333 -7.14566,-7.86867 -17.73804,-0.67256 -33.55939,5.41386 -42.70615,22.64775 -0.50444,0.941468 -1.91669,0.689343 -2.03446,-0.369895 l -1.47997,-12.99684 c -0.4701,-4.758175 -3.09341,-7.145755 -7.86798,-7.145755 h -35.98151 c -4.20339,0 -7.63343,3.41315 -7.63343,7.633321 v 4.052054 c 0,4.203388 3.41335,7.633325 7.63343,7.633325 h 14.77908 c 4.20339,0 7.63342,3.413155 7.63342,7.633325 v 29.070014 c 0,4.203391 3.41335,7.633421 7.63343,7.633421 z m -32.1648,57.19964 h 74.58437 c 4.75887,0 7.63342,-2.85787 7.63342,-7.63342 v -4.05225 c 0,-4.75789 -2.85787,-7.63244 -7.63342,-7.63244 H 826.716 c -4.75789,0 -7.63342,-2.85886 -7.63342,-7.63343 v -13.11461 c 0,-4.75886 -2.85788,-7.63342 -7.63343,-7.63342 h -8.10352 c -4.75886,0 -7.63342,2.85787 -7.63342,7.63342 v 13.11461 c 0,4.75789 -2.85885,7.63343 -7.63342,7.63343 h -16.91464 c -4.75788,0 -7.63342,2.85787 -7.63342,7.63244 v 4.05225 c 0,4.75886 2.85885,7.63342 7.63342,7.63342 z M 656.09261,93.180266 c 3.09439,-20.26027 16.91463,-32.887118 35.74597,-32.887118 18.83035,0 32.88712,12.862289 34.55159,32.887118 z m 36.70383,73.643104 c 21.92474,0 43.84949,-10.23908 54.57632,-33.3582 2.62331,-5.48119 0.23554,-9.2979 -4.52235,-9.2979 h -8.81013 c -4.28779,0 -6.91013,1.90002 -9.06236,5.71673 -7.14566,12.62685 -19.53795,17.87249 -32.1648,17.87249 -21.68921,0 -35.74499,-14.77908 -37.40947,-38.8384 h 87.46345 c 4.75788,0 7.86896,-2.62331 7.86896,-7.63342 -0.95884,-37.410251 -25.2537,-59.80588 -58.86411,-59.80588 -33.61042,0 -60.29305,24.312327 -60.29305,62.68044 0,38.3683 25.97699,62.68082 61.25188,62.68082 z m -275.70711,0 c 35.74597,0 61.25189,-26.44808 61.25189,-62.68083 0,-36.232947 -25.50592,-62.68053 -61.25189,-62.68053 -35.74499,0 -61.25091,26.935148 -61.25091,62.68053 0,8.34004 1.42894,17.63696 5.71574,27.87701 2.15224,5.01011 6.20449,5.71672 10.49228,2.62233 l 6.91012,-5.01011 c 3.58118,-2.62331 4.52235,-5.71672 3.32895,-10.23907 -1.90001,-5.95226 -2.38777,-11.19791 -2.38777,-15.72124 0,-25.018055 15.01462,-42.18462 37.1749,-42.18462 22.16029,0 37.17491,16.914342 37.17491,42.6557 0,25.74146 -14.77908,42.65609 -36.70383,42.65609 -7.63342,0 -14.77908,-1.42893 -23.35466,-7.86896 -3.58117,-2.85787 -6.91012,-3.32895 -10.72683,-0.47108 l -5.24565,3.81672 c -4.28779,3.09439 -4.75789,7.3812 -0.7233,10.72683 12.39229,10.00451 26.68361,13.82123 39.56268,13.82123 z m 91.49901,-2.38778 h 7.86896 c 4.20339,0 7.63342,-3.41335 7.63342,-7.63342 V 99.854752 c 0,-24.059611 12.86141,-38.367912 32.88712,-38.367912 18.10803,0 28.59933,11.920721 28.59933,33.845565 v 61.486445 c 0,4.20339 3.41335,7.63342 7.63342,7.63342 h 8.10352 c 4.20339,0 7.63342,-3.41335 7.63342,-7.63342 V 91.515693 c 0,-32.164111 -16.44356,-50.036903 -44.31958,-50.036903 -18.98248,0 -30.19707,7.76788 -38.57048,18.578918 -0.6389,0.823895 -1.93338,0.369894 -1.93338,-0.655682 V 7.63333 C 524.07309,3.41314 520.66069,0 516.4573,0 h -7.86896 c -4.20339,0 -7.63343,3.41314 -7.63343,7.63333 v 149.16884 c 0,4.20338 3.41336,7.63342 7.63343,7.63342 z"
|
||||
id="path2"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:#39594d;stroke-width:9.81412029" />
|
||||
<g
|
||||
clip-path="url(#clip0_2207_90691)"
|
||||
id="g10"
|
||||
transform="matrix(9.8141206,0,0,9.8141206,0,-4.90706)">
|
||||
<path
|
||||
d="m 5.50773,10.6219 c 0.45757,0 1.36777,-0.0251 2.62589,-0.5431 C 9.59973,9.47518 12.5166,8.37942 14.6208,7.2539 16.0924,6.46668 16.7375,5.42553 16.7375,4.02344 16.7375,2.07751 15.16,0.5 13.2141,0.5 H 5.06095 C 2.26586,0.5 0,2.76586 0,5.56095 0,8.35604 2.12151,10.6219 5.50773,10.6219 Z"
|
||||
id="path4"
|
||||
inkscape:connector-curvature="0"
|
||||
style="clip-rule:evenodd;fill:#39594d;fill-rule:evenodd" />
|
||||
<path
|
||||
d="m 6.88672,14.107 c 0,-1.3701 0.82483,-2.6054 2.09027,-3.1306 L 11.5446,9.9108 c 2.5971,-1.07786 5.4557,0.8307 5.4557,3.6427 0,2.1785 -1.7664,3.9444 -3.945,3.9438 l -2.7799,-7e-4 C 8.40372,17.4961 6.88672,15.9787 6.88672,14.107 Z"
|
||||
id="path6"
|
||||
inkscape:connector-curvature="0"
|
||||
style="clip-rule:evenodd;fill:#d18ee2;fill-rule:evenodd" />
|
||||
<path
|
||||
d="m 2.91749,11.2891 h -5e-5 C 1.30618,11.2891 0,12.5952 0,14.2065 v 0.3779 c 0,1.6112 1.30618,2.9174 2.91744,2.9174 h 5e-5 c 1.61126,0 2.91744,-1.3062 2.91744,-2.9174 v -0.3779 c 0,-1.6113 -1.30618,-2.9174 -2.91744,-2.9174 z"
|
||||
id="path8"
|
||||
inkscape:connector-curvature="0"
|
||||
style="fill:#ff7759" />
|
||||
</g>
|
||||
<defs
|
||||
id="defs15">
|
||||
<clipPath
|
||||
id="clip0_2207_90691">
|
||||
<rect
|
||||
transform="translate(0,0.5)"
|
||||
height="17"
|
||||
width="17"
|
||||
id="rect12"
|
||||
x="0"
|
||||
y="0"
|
||||
style="fill:#ffffff" />
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 7.6 KiB |
@@ -0,0 +1,160 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { Ollama } from 'langchain/llms/ollama';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmOllama implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Ollama Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmOllama',
|
||||
icon: 'file:ollama.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model Ollama',
|
||||
defaults: {
|
||||
name: 'Ollama Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmollama/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'ollamaApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '={{ $credentials.baseUrl.replace(new RegExp("/$"), "") }}',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
default: 'llama2',
|
||||
description:
|
||||
'The model which will generate the completion. To download models, visit <a href="https://ollama.ai/library">Ollama Models Library</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/api/tags',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'models',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.name}}',
|
||||
value: '={{$responseItem.name}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
name: 'topK',
|
||||
default: -1,
|
||||
typeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },
|
||||
description:
|
||||
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new Ollama({
|
||||
baseUrl: credentials.baseUrl as string,
|
||||
model: modelName,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
43
packages/@n8n/nodes-langchain/nodes/llms/LMOllama/ollama.svg
Normal file
@@ -0,0 +1,43 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="181.000000pt" height="256.000000pt" viewBox="0 0 181.000000 256.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
|
||||
<g transform="translate(0.000000,256.000000) scale(0.100000,-0.100000)"
|
||||
fill="#7D7D87" stroke="none">
|
||||
<path d="M377 2365 c-52 -18 -83 -49 -117 -116 -45 -89 -62 -192 -58 -355 l3
|
||||
-142 -58 -61 c-148 -155 -185 -387 -92 -574 l34 -69 -20 -44 c-34 -82 -50
|
||||
-164 -50 -263 0 -108 18 -190 58 -262 l26 -48 -21 -49 c-12 -27 -26 -71 -32
|
||||
-98 -14 -62 -15 -221 -1 -257 10 -26 14 -27 76 -27 73 0 70 -4 53 86 -15 82 2
|
||||
188 42 266 37 70 38 104 5 148 -47 64 -68 136 -69 240 -1 103 14 160 66 261
|
||||
31 61 29 87 -10 122 -11 10 -31 42 -43 70 -19 42 -24 69 -23 142 0 114 25 183
|
||||
95 260 70 76 142 110 239 112 41 0 78 2 82 2 4 1 17 22 29 47 30 59 96 119
|
||||
167 152 49 23 70 27 147 27 79 0 97 -4 149 -29 68 -33 133 -94 159 -148 10
|
||||
-20 23 -41 30 -45 6 -4 46 -8 87 -8 67 -1 83 -5 140 -36 123 -68 193 -187 193
|
||||
-334 1 -67 -4 -90 -27 -142 -16 -35 -35 -68 -43 -75 -34 -28 -35 -58 -5 -117
|
||||
52 -101 67 -158 66 -261 -1 -104 -22 -176 -69 -240 -33 -44 -32 -78 5 -148 40
|
||||
-78 57 -184 42 -266 -17 -90 -20 -86 53 -86 62 0 66 1 76 27 14 36 13 195 -1
|
||||
257 -6 27 -20 71 -32 98 l-21 49 26 48 c76 139 79 359 6 528 l-20 47 25 46
|
||||
c99 183 64 439 -81 591 l-58 61 3 142 c4 164 -13 266 -58 357 -64 126 -172
|
||||
159 -263 79 -54 -47 -92 -138 -123 -298 -3 -14 -10 -22 -17 -18 -182 80 -297
|
||||
85 -443 21 l-54 -24 -4 22 c-36 185 -85 285 -156 322 -43 21 -74 24 -113 10z
|
||||
m77 -168 c42 -71 81 -301 57 -336 -5 -8 -31 -16 -58 -18 -26 -2 -62 -8 -80
|
||||
-13 l-31 -8 -7 49 c-8 59 2 172 22 248 14 57 48 121 63 121 5 0 20 -19 34 -43z
|
||||
m965 10 c40 -65 69 -239 56 -336 l-7 -49 -31 8 c-18 5 -54 11 -80 13 -27 2
|
||||
-53 10 -58 18 -12 17 -3 141 17 229 15 64 57 150 74 150 4 0 18 -15 29 -33z"/>
|
||||
<path d="M778 1361 c-73 -24 -116 -51 -165 -104 -55 -60 -76 -120 -71 -201 5
|
||||
-76 35 -129 106 -183 62 -47 127 -63 257 -63 172 0 258 36 329 138 42 59 48
|
||||
155 16 230 -29 68 -111 143 -188 173 -80 31 -207 36 -284 10z m257 -100 c161
|
||||
-71 194 -232 66 -318 -49 -33 -94 -43 -196 -43 -102 0 -147 10 -196 43 -178
|
||||
120 -32 356 211 343 39 -2 86 -12 115 -25z"/>
|
||||
<path d="M838 1159 c-25 -14 -22 -44 7 -67 20 -16 24 -26 19 -49 -7 -36 15
|
||||
-58 51 -49 21 5 25 12 25 46 0 29 5 42 20 50 27 15 27 66 0 75 -10 3 -28 1
|
||||
-40 -5 -14 -7 -26 -8 -39 0 -23 12 -22 12 -43 -1z"/>
|
||||
<path d="M397 1348 c-9 -7 -23 -30 -32 -50 -21 -53 -1 -103 47 -116 43 -11 60
|
||||
-6 92 27 40 41 43 81 11 119 -21 25 -34 32 -64 32 -20 0 -45 -6 -54 -12z"/>
|
||||
<path d="M1295 1328 c-32 -38 -29 -78 11 -119 32 -33 49 -38 92 -27 49 13 68
|
||||
62 46 118 -19 47 -38 60 -87 60 -27 0 -41 -7 -62 -32z"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.7 KiB |
@@ -0,0 +1,252 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
ILoadOptionsFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { ClientOptions } from 'openai';
|
||||
import { OpenAI } from 'langchain/llms/openai';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
type LmOpenAiOptions = {
|
||||
baseURL?: string;
|
||||
frequencyPenalty?: number;
|
||||
maxTokens?: number;
|
||||
presencePenalty?: number;
|
||||
temperature?: number;
|
||||
timeout?: number;
|
||||
maxRetries?: number;
|
||||
topP?: number;
|
||||
};
|
||||
|
||||
export class LmOpenAi implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'OpenAI Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmOpenAi',
|
||||
icon: 'file:openAi.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'For advanced usage with an AI chain',
|
||||
defaults: {
|
||||
name: 'OpenAI Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenai/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'openAiApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL:
|
||||
'={{ $parameter.options?.baseURL?.split("/").slice(0,-1).join("/") || "https://api.openai.com" }}',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'resourceLocator',
|
||||
default: { mode: 'list', value: 'gpt-3.5-turbo-instruct' },
|
||||
required: true,
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://beta.openai.com/docs/models/overview">Learn more</a>.',
|
||||
modes: [
|
||||
{
|
||||
displayName: 'From List',
|
||||
name: 'list',
|
||||
type: 'list',
|
||||
typeOptions: {
|
||||
searchListMethod: 'openAiModelSearch',
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'ID',
|
||||
name: 'id',
|
||||
type: 'string',
|
||||
},
|
||||
],
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
value: '={{$parameter.model.value}}',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Base URL',
|
||||
name: 'baseURL',
|
||||
default: 'https://api.openai.com/v1',
|
||||
description: 'Override the default base URL for the API',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
displayName: 'Frequency Penalty',
|
||||
name: 'frequencyPenalty',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
|
||||
description:
|
||||
"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim",
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokens',
|
||||
default: -1,
|
||||
description:
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
|
||||
type: 'number',
|
||||
typeOptions: {
|
||||
maxValue: 32768,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Presence Penalty',
|
||||
name: 'presencePenalty',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
|
||||
description:
|
||||
"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics",
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Timeout',
|
||||
name: 'timeout',
|
||||
default: 60000,
|
||||
description: 'Maximum amount of time a request is allowed to take in milliseconds',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Retries',
|
||||
name: 'maxRetries',
|
||||
default: 2,
|
||||
description: 'Maximum number of retries to attempt',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
methods = {
|
||||
listSearch: {
|
||||
async openAiModelSearch(this: ILoadOptionsFunctions) {
|
||||
const results = [];
|
||||
|
||||
const options = this.getNodeParameter('options', {}) as LmOpenAiOptions;
|
||||
|
||||
let uri = 'https://api.openai.com/v1/models';
|
||||
|
||||
if (options.baseURL) {
|
||||
uri = `${options.baseURL}/models`;
|
||||
}
|
||||
|
||||
const data = (
|
||||
await this.helpers.requestWithAuthentication.call(this, 'openAiApi', {
|
||||
method: 'GET',
|
||||
uri,
|
||||
json: true,
|
||||
})
|
||||
).data as Array<{ owned_by: string; id: string }>;
|
||||
|
||||
for (const model of data) {
|
||||
if (!model.owned_by?.startsWith('system')) continue;
|
||||
results.push({
|
||||
name: model.id,
|
||||
value: model.id,
|
||||
});
|
||||
}
|
||||
|
||||
return { results };
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex, '', {
|
||||
extractValue: true,
|
||||
}) as string;
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
baseURL?: string;
|
||||
frequencyPenalty?: number;
|
||||
maxTokens?: number;
|
||||
presencePenalty?: number;
|
||||
temperature?: number;
|
||||
timeout?: number;
|
||||
maxRetries?: number;
|
||||
topP?: number;
|
||||
};
|
||||
|
||||
const configuration: ClientOptions = {};
|
||||
if (options.baseURL) {
|
||||
configuration.baseURL = options.baseURL;
|
||||
}
|
||||
|
||||
const model = new OpenAI({
|
||||
openAIApiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
...options,
|
||||
configuration,
|
||||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="256px" height="260px" viewBox="0 0 256 260" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
|
||||
<title>OpenAI</title>
|
||||
<g>
|
||||
<path d="M239.183914,106.202783 C245.054304,88.5242096 243.02228,69.1733805 233.607599,53.0998864 C219.451678,28.4588021 190.999703,15.7836129 163.213007,21.739505 C147.554077,4.32145883 123.794909,-3.42398554 100.87901,1.41873898 C77.9631105,6.26146349 59.3690093,22.9572536 52.0959621,45.2214219 C33.8436494,48.9644867 18.0901721,60.392749 8.86672513,76.5818033 C-5.443491,101.182962 -2.19544431,132.215255 16.8986662,153.320094 C11.0060865,170.990656 13.0197283,190.343991 22.4238231,206.422991 C36.5975553,231.072344 65.0680342,243.746566 92.8695738,237.783372 C105.235639,251.708249 123.001113,259.630942 141.623968,259.52692 C170.105359,259.552169 195.337611,241.165718 204.037777,214.045661 C222.28734,210.296356 238.038489,198.869783 247.267014,182.68528 C261.404453,158.127515 258.142494,127.262775 239.183914,106.202783 L239.183914,106.202783 Z M141.623968,242.541207 C130.255682,242.559177 119.243876,238.574642 110.519381,231.286197 L112.054146,230.416496 L163.724595,200.590881 C166.340648,199.056444 167.954321,196.256818 167.970781,193.224005 L167.970781,120.373788 L189.815614,133.010026 C190.034132,133.121423 190.186235,133.330564 190.224885,133.572774 L190.224885,193.940229 C190.168603,220.758427 168.442166,242.484864 141.623968,242.541207 Z M37.1575749,197.93062 C31.456498,188.086359 29.4094818,176.546984 31.3766237,165.342426 L32.9113895,166.263285 L84.6329973,196.088901 C87.2389349,197.618207 90.4682717,197.618207 93.0742093,196.088901 L156.255402,159.663793 L156.255402,184.885111 C156.243557,185.149771 156.111725,185.394602 155.89729,185.550176 L103.561776,215.733903 C80.3054953,229.131632 50.5924954,221.165435 37.1575749,197.93062 Z M23.5493181,85.3811273 C29.2899861,75.4733097 38.3511911,67.9162648 49.1287482,64.0478825 L49.1287482,125.438515 C49.0891492,128.459425 50.6965386,131.262556 53.3237748,132.754232 L116.198014,169.025864 L94.3531808,181.662102 C94.1132325,181.789434 93.8257461,181.789434 93.5857979,181.662102 L41.3526015,151.529534 C18.1419426,138.076098 10.1817681,108.385562 23.5493181,85.125333 L23.5493181,85.3811273 Z M203.0146,127.075598 L139.935725,90.4458545 L161.7294,77.8607748 C161.969348,77.7334434 162.256834,77.7334434 162.496783,77.8607748 L214.729979,108.044502 C231.032329,117.451747 240.437294,135.426109 238.871504,154.182739 C237.305714,172.939368 225.050719,189.105572 207.414262,195.67963 L207.414262,134.288998 C207.322521,131.276867 205.650697,128.535853 203.0146,127.075598 Z M224.757116,94.3850867 L223.22235,93.4642272 L171.60306,63.3828173 C168.981293,61.8443751 165.732456,61.8443751 163.110689,63.3828173 L99.9806554,99.8079259 L99.9806554,74.5866077 C99.9533004,74.3254088 100.071095,74.0701869 100.287609,73.9215426 L152.520805,43.7889738 C168.863098,34.3743518 189.174256,35.2529043 204.642579,46.0434841 C220.110903,56.8340638 227.949269,75.5923959 224.757116,94.1804513 L224.757116,94.3850867 Z M88.0606409,139.097931 L66.2158076,126.512851 C65.9950399,126.379091 65.8450965,126.154176 65.8065367,125.898945 L65.8065367,65.684966 C65.8314495,46.8285367 76.7500605,29.6846032 93.8270852,21.6883055 C110.90411,13.6920079 131.063833,16.2835462 145.5632,28.338998 L144.028434,29.2086986 L92.3579852,59.0343142 C89.7419327,60.5687513 88.1282597,63.3683767 88.1117998,66.4011901 L88.0606409,139.097931 Z M99.9294965,113.5185 L128.06687,97.3011417 L156.255402,113.5185 L156.255402,145.953218 L128.169187,162.170577 L99.9806554,145.953218 L99.9294965,113.5185 Z" fill="#7D7D87"></path>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.7 KiB |
@@ -0,0 +1,150 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { HuggingFaceInference } from 'langchain/llms/hf';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmOpenHuggingFaceInference implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Hugging Face Inference Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmOpenHuggingFaceInference',
|
||||
icon: 'file:huggingface.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model HuggingFaceInference',
|
||||
defaults: {
|
||||
name: 'Hugging Face Inference Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmopenhuggingfaceinference/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'huggingFaceApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'string',
|
||||
default: 'gpt2',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Custom Inference Endpoint',
|
||||
name: 'endpointUrl',
|
||||
default: '',
|
||||
description: 'Custom endpoint URL',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
displayName: 'Frequency Penalty',
|
||||
name: 'frequencyPenalty',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
|
||||
description:
|
||||
"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim",
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokens',
|
||||
default: 128,
|
||||
description:
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
|
||||
type: 'number',
|
||||
typeOptions: {
|
||||
maxValue: 32768,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Presence Penalty',
|
||||
name: 'presencePenalty',
|
||||
default: 0,
|
||||
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
|
||||
description:
|
||||
"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics",
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
name: 'topK',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls the top tokens to consider within the sample operation to create new text',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('huggingFaceApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new HuggingFaceInference({
|
||||
model: modelName,
|
||||
apiKey: credentials.apiKey as string,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 34 KiB |
@@ -0,0 +1,164 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { ChatBedrock } from 'langchain/chat_models/bedrock';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmChatAwsBedrock implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'AWS Bedrock Chat Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmChatAwsBedrock',
|
||||
icon: 'file:bedrock.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model AWS Bedrock',
|
||||
defaults: {
|
||||
name: 'AWS Bedrock Chat Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatawsbedrock/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-credentials-name-unsuffixed
|
||||
name: 'aws',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '=https://bedrock.{{$credentials?.region ?? "eu-central-1"}}.amazonaws.com',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'model',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/foundation-models.html">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/foundation-models?&byOutputModality=TEXT',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'modelSummaries',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
// Not a foundational model
|
||||
pass: "={{ !['anthropic.claude-instant-v1-100k'].includes($responseItem.modelId) }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.modelName}}',
|
||||
description: '={{$responseItem.modelArn}}',
|
||||
value: '={{$responseItem.modelId}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxTokensToSample',
|
||||
default: 2000,
|
||||
description: 'The maximum number of tokens to generate in the completion',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('aws');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
temperature: number;
|
||||
maxTokensToSample: number;
|
||||
};
|
||||
|
||||
const model = new ChatBedrock({
|
||||
region: credentials.region as string,
|
||||
model: modelName,
|
||||
temperature: options.temperature,
|
||||
maxTokens: options.maxTokensToSample,
|
||||
credentials: {
|
||||
secretAccessKey: credentials.secretAccessKey as string,
|
||||
accessKeyId: credentials.accessKeyId as string,
|
||||
sessionToken: credentials.sessionToken as string,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24px" height="24px" viewBox="0 0 24 24" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<title>Icon-Architecture/16/Arch_Amazon-Bedrock_16</title>
|
||||
<defs>
|
||||
<linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
|
||||
<stop stop-color="#055F4E" offset="0%"></stop>
|
||||
<stop stop-color="#56C0A7" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Icon-Architecture/16/Arch_Amazon-Bedrock_16" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="Icon-Architecture-BG/16/Machine-Learning" fill="url(#linearGradient-1)">
|
||||
<rect id="Rectangle" x="0" y="0" width="24" height="24"></rect>
|
||||
</g>
|
||||
<g id="Icon-Service/16/Amazon-Bedrock_16" transform="translate(4.000000, 4.000000)" fill="#FFFFFF">
|
||||
<path d="M8,14.1397014 L5.574,14.9487014 L4.628,14.3177014 L5.658,13.9737014 L5.342,13.0257014 L3.574,13.6147014 L3,13.2327014 L3,10.4997014 C3,10.3107014 2.893,10.1377014 2.724,10.0527014 L1,9.19070136 L1,6.80870136 L2.5,6.05870136 L4,6.80870136 L4,8.49970136 C4,8.68970136 4.107,8.86270136 4.276,8.94770136 L6.276,9.94770136 L6.724,9.05270136 L5,8.19070136 L5,6.80870136 L6.724,5.94770136 C6.893,5.86270136 7,5.68970136 7,5.49970136 L7,3.99970136 L6,3.99970136 L6,5.19070136 L4.5,5.94070136 L3,5.19070136 L3,2.76770136 L4,2.10070136 L4,3.99970136 L5,3.99970136 L5,1.43470136 L5.574,1.05170136 L8,1.86070136 L8,14.1397014 Z M13.5,12.9997014 C13.775,12.9997014 14,13.2237014 14,13.4997014 C14,13.7757014 13.775,13.9997014 13.5,13.9997014 C13.225,13.9997014 13,13.7757014 13,13.4997014 C13,13.2237014 13.225,12.9997014 13.5,12.9997014 L13.5,12.9997014 Z M12.5,1.99970136 C12.775,1.99970136 13,2.22370136 13,2.49970136 C13,2.77570136 12.775,2.99970136 12.5,2.99970136 C12.225,2.99970136 12,2.77570136 12,2.49970136 C12,2.22370136 12.225,1.99970136 12.5,1.99970136 L12.5,1.99970136 Z M14.5,7.99970136 C14.775,7.99970136 15,8.22370136 15,8.49970136 C15,8.77570136 14.775,8.99970136 14.5,8.99970136 C14.225,8.99970136 14,8.77570136 14,8.49970136 C14,8.22370136 14.225,7.99970136 14.5,7.99970136 L14.5,7.99970136 Z M13.092,8.99970136 C13.299,9.58070136 13.849,9.99970136 14.5,9.99970136 C15.327,9.99970136 16,9.32770136 16,8.49970136 C16,7.67270136 15.327,6.99970136 14.5,6.99970136 C13.849,6.99970136 13.299,7.41970136 13.092,7.99970136 L9,7.99970136 L9,5.99970136 L12.5,5.99970136 C12.776,5.99970136 13,5.77670136 13,5.49970136 L13,3.90770136 C13.581,3.70070136 14,3.15070136 14,2.49970136 C14,1.67270136 13.327,0.999701362 12.5,0.999701362 C11.673,0.999701362 11,1.67270136 11,2.49970136 C11,3.15070136 11.419,3.70070136 12,3.90770136 L12,4.99970136 L9,4.99970136 L9,1.49970136 C9,1.28470136 8.862,1.09370136 8.658,1.02570136 L5.658,0.0257013622 C5.511,-0.0232986378 5.351,-0.00129863776 5.223,0.0837013622 L2.223,2.08370136 C2.084,2.17670136 2,2.33270136 2,2.49970136 L2,5.19070136 L0.276,6.05270136 C0.107,6.13770136 0,6.31070136 0,6.49970136 L0,9.49970136 C0,9.68970136 0.107,9.86270136 0.276,9.94770136 L2,10.8087014 L2,13.4997014 C2,13.6667014 2.084,13.8237014 2.223,13.9157014 L5.223,15.9157014 C5.306,15.9717014 5.402,15.9997014 5.5,15.9997014 C5.553,15.9997014 5.606,15.9917014 5.658,15.9737014 L8.658,14.9737014 C8.862,14.9067014 9,14.7157014 9,14.4997014 L9,11.9997014 L11.293,11.9997014 L12.146,12.8537014 L12.159,12.8407014 C12.061,13.0407014 12,13.2627014 12,13.4997014 C12,14.3267014 12.673,14.9997014 13.5,14.9997014 C14.327,14.9997014 15,14.3267014 15,13.4997014 C15,12.6727014 14.327,11.9997014 13.5,11.9997014 C13.262,11.9997014 13.04,12.0607014 12.841,12.1597014 L12.854,12.1467014 L11.854,11.1467014 C11.76,11.0527014 11.633,10.9997014 11.5,10.9997014 L9,10.9997014 L9,8.99970136 L13.092,8.99970136 Z" id="Fill-7"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.8 KiB |
@@ -0,0 +1,165 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { ChatGooglePaLM } from 'langchain/chat_models/googlepalm';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmChatGooglePalm implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Google PaLM Chat Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmChatGooglePalm',
|
||||
icon: 'file:google.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Chat Model Google PaLM',
|
||||
defaults: {
|
||||
name: 'Google PaLM Chat Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatgooglepalm/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'googlePalmApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '={{ $credentials.host }}',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'modelName',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://developers.generativeai.google/api/rest/generativelanguage/models/list">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/v1beta3/models',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'models',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
pass: "={{ $responseItem.name.startsWith('models/chat') }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.name}}',
|
||||
value: '={{$responseItem.name}}',
|
||||
description: '={{$responseItem.description}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: 'models/chat-bison-001',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
name: 'topK',
|
||||
default: 40,
|
||||
typeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },
|
||||
description:
|
||||
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 0.9,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('googlePalmApi');
|
||||
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new ChatGooglePaLM({
|
||||
apiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 48 48"><defs><path id="a" d="M44.5 20H24v8.5h11.8C34.7 33.9 30.1 37 24 37c-7.2 0-13-5.8-13-13s5.8-13 13-13c3.1 0 5.9 1.1 8.1 2.9l6.4-6.4C34.6 4.1 29.6 2 24 2 11.8 2 2 11.8 2 24s9.8 22 22 22c11 0 21-8 21-22 0-1.3-.2-2.7-.5-4z"/></defs><clipPath id="b"><use xlink:href="#a" overflow="visible"/></clipPath><path clip-path="url(#b)" fill="#FBBC05" d="M0 37V11l17 13z"/><path clip-path="url(#b)" fill="#EA4335" d="M0 11l17 13 7-6.1L48 14V0H0z"/><path clip-path="url(#b)" fill="#34A853" d="M0 37l30-23 7.9 1L48 0v48H0z"/><path clip-path="url(#b)" fill="#4285F4" d="M48 48L17 24l-4-3 35-10z"/></svg>
|
||||
|
After Width: | Height: | Size: 688 B |
@@ -0,0 +1,172 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GooglePaLM } from 'langchain/llms/googlepalm';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class LmGooglePalm implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Google PaLM Language Model',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
|
||||
name: 'lmGooglePalm',
|
||||
icon: 'file:google.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Language Model Google PaLM',
|
||||
defaults: {
|
||||
name: 'Google PaLM Language Model',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Language Models'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmgooglepalm/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiLanguageModel],
|
||||
outputNames: ['Model'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'googlePalmApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
requestDefaults: {
|
||||
ignoreHttpStatusErrors: true,
|
||||
baseURL: '={{ $credentials.host }}',
|
||||
},
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Model',
|
||||
name: 'modelName',
|
||||
type: 'options',
|
||||
description:
|
||||
'The model which will generate the completion. <a href="https://developers.generativeai.google/api/rest/generativelanguage/models/list">Learn more</a>.',
|
||||
typeOptions: {
|
||||
loadOptions: {
|
||||
routing: {
|
||||
request: {
|
||||
method: 'GET',
|
||||
url: '/v1beta3/models',
|
||||
},
|
||||
output: {
|
||||
postReceive: [
|
||||
{
|
||||
type: 'rootProperty',
|
||||
properties: {
|
||||
property: 'models',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'filter',
|
||||
properties: {
|
||||
pass: "={{ $responseItem.name.startsWith('models/text') }}",
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'setKeyValue',
|
||||
properties: {
|
||||
name: '={{$responseItem.name}}',
|
||||
value: '={{$responseItem.name}}',
|
||||
description: '={{$responseItem.description}}',
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'sort',
|
||||
properties: {
|
||||
key: 'name',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
type: 'body',
|
||||
property: 'model',
|
||||
},
|
||||
},
|
||||
default: 'models/text-bison-001',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
placeholder: 'Add Option',
|
||||
description: 'Additional options to add',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Maximum Number of Tokens',
|
||||
name: 'maxOutputTokens',
|
||||
default: 1024,
|
||||
description: 'The maximum number of tokens to generate in the completion',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Sampling Temperature',
|
||||
name: 'temperature',
|
||||
default: 0.7,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top K',
|
||||
name: 'topK',
|
||||
default: 40,
|
||||
typeOptions: { maxValue: 1, minValue: -1, numberPrecision: 1 },
|
||||
description:
|
||||
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Top P',
|
||||
name: 'topP',
|
||||
default: 0.9,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('googlePalmApi');
|
||||
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
||||
const model = new GooglePaLM({
|
||||
apiKey: credentials.apiKey as string,
|
||||
modelName,
|
||||
...options,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(model, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 48 48"><defs><path id="a" d="M44.5 20H24v8.5h11.8C34.7 33.9 30.1 37 24 37c-7.2 0-13-5.8-13-13s5.8-13 13-13c3.1 0 5.9 1.1 8.1 2.9l6.4-6.4C34.6 4.1 29.6 2 24 2 11.8 2 2 11.8 2 24s9.8 22 22 22c11 0 21-8 21-22 0-1.3-.2-2.7-.5-4z"/></defs><clipPath id="b"><use xlink:href="#a" overflow="visible"/></clipPath><path clip-path="url(#b)" fill="#FBBC05" d="M0 37V11l17 13z"/><path clip-path="url(#b)" fill="#EA4335" d="M0 11l17 13 7-6.1L48 14V0H0z"/><path clip-path="url(#b)" fill="#34A853" d="M0 37l30-23 7.9 1L48 0v48H0z"/><path clip-path="url(#b)" fill="#4285F4" d="M48 48L17 24l-4-3 35-10z"/></svg>
|
||||
|
After Width: | Height: | Size: 688 B |
@@ -0,0 +1,133 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { BufferWindowMemoryInput } from 'langchain/memory';
|
||||
import { BufferWindowMemory } from 'langchain/memory';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
class MemoryChatBufferSingleton {
|
||||
private static instance: MemoryChatBufferSingleton;
|
||||
|
||||
private memoryBuffer: Map<
|
||||
string,
|
||||
{ buffer: BufferWindowMemory; created: Date; last_accessed: Date }
|
||||
>;
|
||||
|
||||
private constructor() {
|
||||
this.memoryBuffer = new Map();
|
||||
}
|
||||
|
||||
public static getInstance(): MemoryChatBufferSingleton {
|
||||
if (!MemoryChatBufferSingleton.instance) {
|
||||
MemoryChatBufferSingleton.instance = new MemoryChatBufferSingleton();
|
||||
}
|
||||
return MemoryChatBufferSingleton.instance;
|
||||
}
|
||||
|
||||
public async getMemory(
|
||||
sessionKey: string,
|
||||
memoryParams: BufferWindowMemoryInput,
|
||||
): Promise<BufferWindowMemory> {
|
||||
await this.cleanupStaleBuffers();
|
||||
|
||||
let memoryInstance = this.memoryBuffer.get(sessionKey);
|
||||
if (memoryInstance) {
|
||||
memoryInstance.last_accessed = new Date();
|
||||
} else {
|
||||
const newMemory = new BufferWindowMemory(memoryParams);
|
||||
|
||||
memoryInstance = {
|
||||
buffer: newMemory,
|
||||
created: new Date(),
|
||||
last_accessed: new Date(),
|
||||
};
|
||||
this.memoryBuffer.set(sessionKey, memoryInstance);
|
||||
}
|
||||
return memoryInstance.buffer;
|
||||
}
|
||||
|
||||
private async cleanupStaleBuffers(): Promise<void> {
|
||||
const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000);
|
||||
|
||||
for (const [key, memoryInstance] of this.memoryBuffer.entries()) {
|
||||
if (memoryInstance.last_accessed < oneHourAgo) {
|
||||
await this.memoryBuffer.get(key)?.buffer.clear();
|
||||
this.memoryBuffer.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class MemoryBufferWindow implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Window Buffer Memory (easiest)',
|
||||
name: 'memoryBufferWindow',
|
||||
icon: 'fa:database',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Stores in n8n memory, so no credentials required',
|
||||
defaults: {
|
||||
name: 'Window Buffer Memory',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Memory'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorybufferwindow/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiMemory],
|
||||
outputNames: ['Memory'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Session Key',
|
||||
name: 'sessionKey',
|
||||
type: 'string',
|
||||
default: 'chat_history',
|
||||
description: 'The key to use to store the memory in the workflow data',
|
||||
},
|
||||
{
|
||||
displayName: 'Context Window Length',
|
||||
name: 'contextWindowLength',
|
||||
type: 'number',
|
||||
default: 5,
|
||||
description: 'The number of previous messages to consider for context',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const sessionKey = this.getNodeParameter('sessionKey', itemIndex) as string;
|
||||
const contextWindowLength = this.getNodeParameter('contextWindowLength', itemIndex) as number;
|
||||
const workflowId = this.getWorkflow().id;
|
||||
const memoryInstance = MemoryChatBufferSingleton.getInstance();
|
||||
|
||||
const memory = await memoryInstance.getMemory(`${workflowId}__${sessionKey}`, {
|
||||
k: contextWindowLength,
|
||||
inputKey: 'input',
|
||||
memoryKey: 'chat_history',
|
||||
outputKey: 'output',
|
||||
returnMessages: true,
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(memory, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IDataObject,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import type { BaseChatMemory } from 'langchain/memory';
|
||||
import type { BaseMessage } from 'langchain/schema';
|
||||
|
||||
function simplifyMessages(messages: BaseMessage[]) {
|
||||
const chunkedMessages = [];
|
||||
for (let i = 0; i < messages.length; i += 2) {
|
||||
chunkedMessages.push([messages[i], messages[i + 1]]);
|
||||
}
|
||||
|
||||
const transformedMessages = chunkedMessages.map((exchange) => {
|
||||
const simplified = {
|
||||
[exchange[0]._getType()]: exchange[0].content,
|
||||
};
|
||||
|
||||
if (exchange[1]) {
|
||||
simplified[exchange[1]._getType()] = exchange[1].content;
|
||||
}
|
||||
|
||||
return {
|
||||
json: simplified,
|
||||
};
|
||||
});
|
||||
return transformedMessages;
|
||||
}
|
||||
|
||||
export class MemoryChatRetriever implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Chat Messages Retriever',
|
||||
name: 'memoryChatRetriever',
|
||||
icon: 'fa:database',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Retrieve chat messages from memory and use them in the workflow',
|
||||
defaults: {
|
||||
name: 'Chat Messages Retriever',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Miscellaneous'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorychatretriever/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Memory',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiMemory,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.Main],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Simplify Output',
|
||||
name: 'simplifyOutput',
|
||||
type: 'boolean',
|
||||
description: 'Whether to simplify the output to only include the sender and the text',
|
||||
default: true,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Chat Memory Retriever');
|
||||
|
||||
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
|
||||
| BaseChatMemory
|
||||
| undefined;
|
||||
const simplifyOutput = this.getNodeParameter('simplifyOutput', 0) as boolean;
|
||||
|
||||
const messages = await memory?.chatHistory.getMessages();
|
||||
|
||||
if (simplifyOutput && messages) {
|
||||
return this.prepareOutputData(simplifyMessages(messages));
|
||||
}
|
||||
|
||||
const serializedMessages =
|
||||
messages?.map((message) => {
|
||||
const serializedMessage = message.toJSON();
|
||||
return { json: serializedMessage as unknown as IDataObject };
|
||||
}) ?? [];
|
||||
|
||||
return this.prepareOutputData(serializedMessages);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { MotorheadMemory } from 'langchain/memory';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class MemoryMotorhead implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Motorhead',
|
||||
name: 'memoryMotorhead',
|
||||
icon: 'fa:file-export',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Motorhead Memory',
|
||||
defaults: {
|
||||
name: 'Motorhead',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Memory'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memorymotorhead/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiMemory],
|
||||
outputNames: ['Memory'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'motorheadApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Session ID',
|
||||
name: 'sessionId',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('motorheadApi');
|
||||
|
||||
const sessionId = this.getNodeParameter('sessionId', itemIndex) as string;
|
||||
|
||||
const memory = new MotorheadMemory({
|
||||
sessionId,
|
||||
url: `${credentials.host as string}/motorhead`,
|
||||
clientId: credentials.clientId as string,
|
||||
apiKey: credentials.apiKey as string,
|
||||
memoryKey: 'chat_history',
|
||||
returnMessages: true,
|
||||
});
|
||||
|
||||
await memory.init();
|
||||
|
||||
return {
|
||||
response: logWrapper(memory, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
import { BufferMemory } from 'langchain/memory';
|
||||
import type { RedisChatMessageHistoryInput } from 'langchain/stores/message/redis';
|
||||
import { RedisChatMessageHistory } from 'langchain/stores/message/redis';
|
||||
import type { RedisClientOptions } from 'redis';
|
||||
import { createClient } from 'redis';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class MemoryRedisChat implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Redis Chat Memory',
|
||||
name: 'memoryRedisChat',
|
||||
icon: 'file:redis.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Stores the chat history in Redis.',
|
||||
defaults: {
|
||||
name: 'Redis Chat Memory',
|
||||
},
|
||||
credentials: [
|
||||
{
|
||||
name: 'redis',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Memory'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memoryredischat/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiMemory],
|
||||
outputNames: ['Memory'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Session Key',
|
||||
name: 'sessionKey',
|
||||
type: 'string',
|
||||
default: 'chat_history',
|
||||
description: 'The key to use to store the memory in the workflow data',
|
||||
},
|
||||
{
|
||||
displayName: 'Session Time To Live',
|
||||
name: 'sessionTTL',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description:
|
||||
'For how long the session should be stored in seconds. If set to 0 it will not expire.',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('redis');
|
||||
const sessionKey = this.getNodeParameter('sessionKey', itemIndex) as string;
|
||||
const sessionTTL = this.getNodeParameter('sessionTTL', itemIndex, 0) as number;
|
||||
|
||||
const redisOptions: RedisClientOptions = {
|
||||
socket: {
|
||||
host: credentials.host as string,
|
||||
port: credentials.port as number,
|
||||
},
|
||||
database: credentials.database as number,
|
||||
};
|
||||
|
||||
if (credentials.password) {
|
||||
redisOptions.password = credentials.password as string;
|
||||
}
|
||||
|
||||
const client = createClient({
|
||||
...redisOptions,
|
||||
});
|
||||
|
||||
client.on('error', async (error: Error) => {
|
||||
await client.quit();
|
||||
throw new NodeOperationError(this.getNode(), 'Redis Error: ' + error.message);
|
||||
});
|
||||
|
||||
const redisChatConfig: RedisChatMessageHistoryInput = {
|
||||
client,
|
||||
sessionId: sessionKey,
|
||||
};
|
||||
|
||||
if (sessionTTL > 0) {
|
||||
redisChatConfig.sessionTTL = sessionTTL;
|
||||
}
|
||||
const redisChatHistory = new RedisChatMessageHistory(redisChatConfig);
|
||||
|
||||
const memory = new BufferMemory({
|
||||
memoryKey: 'chat_history',
|
||||
chatHistory: redisChatHistory,
|
||||
returnMessages: true,
|
||||
inputKey: 'input',
|
||||
outputKey: 'output',
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(memory, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
<svg width="60" height="60" xmlns="http://www.w3.org/2000/svg"><g fill="none" fill-rule="evenodd" stroke-linecap="round" stroke-linejoin="round"><path d="M57.656 43.99c-3.201 1.683-19.787 8.561-23.318 10.417-3.532 1.856-5.494 1.838-8.283.494-2.79-1.343-20.449-8.535-23.629-10.067C.834 44.066.002 43.422.002 42.811v-6.117s22.98-5.045 26.69-6.388c3.711-1.342 4.995-1.39 8.154-.225 3.16 1.165 22.035 4.603 25.154 5.756v6.032c0 .605-.72 1.283-2.35 2.124l.006-.003z" fill="#A41E11"/><path d="M57.656 37.872c-3.201 1.685-19.787 8.56-23.318 10.417-3.532 1.856-5.494 1.838-8.283.494-2.79-1.343-20.449-8.534-23.63-10.068-3.18-1.533-3.243-2.588-.122-3.82l24.388-9.52c3.71-1.34 4.994-1.39 8.153-.225 3.16 1.165 19.643 7.78 22.747 8.951 3.103 1.17 3.24 2.086.037 3.786l.028-.015z" fill="#D82C20"/><path d="M57.656 34.015c-3.201 1.683-19.787 8.561-23.318 10.417-3.532 1.856-5.494 1.838-8.283.495-2.79-1.344-20.449-8.536-23.629-10.067C.834 34.092.002 33.447.002 32.836V26.72s22.98-5.045 26.69-6.387c3.711-1.343 4.995-1.39 8.154-.225 3.16 1.165 22.035 4.602 25.154 5.756v6.032c0 .605-.72 1.283-2.35 2.123l.006-.003z" fill="#A41E11"/><path d="M57.656 27.898c-3.201 1.685-19.787 8.561-23.318 10.417-3.532 1.856-5.494 1.838-8.283.495-2.79-1.344-20.449-8.534-23.63-10.067-3.18-1.534-3.243-2.588-.122-3.82l24.388-9.52c3.71-1.343 4.994-1.39 8.153-.225 3.16 1.166 19.644 7.785 22.765 8.935 3.121 1.15 3.24 2.085.038 3.785h.01z" fill="#D82C20"/><path d="M57.656 23.671c-3.201 1.683-19.787 8.561-23.318 10.419-3.532 1.858-5.494 1.838-8.283.495-2.79-1.344-20.449-8.535-23.629-10.069-1.592-.765-2.424-1.411-2.424-2.02v-6.11s22.98-5.045 26.69-6.388c3.711-1.343 4.995-1.39 8.154-.225 3.16 1.165 22.035 4.591 25.154 5.745v6.032c0 .605-.72 1.283-2.35 2.123l.006-.002z" fill="#A41E11"/><path d="M57.656 17.553c-3.201 1.685-19.787 8.561-23.318 10.417-3.532 1.856-5.494 1.838-8.283.495-2.79-1.344-20.449-8.534-23.63-10.068-3.18-1.533-3.243-2.587-.122-3.82l24.388-9.52c3.71-1.343 4.994-1.39 8.153-.226 3.16 1.165 19.643 7.785 22.765 8.936 3.122 1.15 3.24 2.085.038 3.785l.01.001z" fill="#D82C20"/><path d="M31.497 15.032l-1.88-3.153-6.002-.545 4.48-1.63L26.75 7.2l4.192 1.653 3.955-1.305-1.07 2.586 4.032 1.524-5.198.546-1.164 2.827zm-10.014 6.275l13.903-2.153-4.2 6.211-9.703-4.058zm-11.17-5.167c0-1.61 3.314-2.906 7.431-2.906 4.118 0 7.432 1.296 7.432 2.906s-3.314 2.905-7.432 2.905c-4.117 0-7.431-1.295-7.431-2.905z" fill="#FFF"/><path fill="#7A0C00" d="M52.233 15.714l-8.224 3.276-.007-6.556z"/><path fill="#AD2115" d="M44.01 18.991l-.89.353-8.217-3.276 9.094-3.63z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
@@ -0,0 +1,94 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import { XataChatMessageHistory } from 'langchain/stores/message/xata';
|
||||
import { BufferMemory } from 'langchain/memory';
|
||||
import { BaseClient } from '@xata.io/client';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
export class MemoryXata implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Xata',
|
||||
name: 'memoryXata',
|
||||
icon: 'file:xata.svg',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Xata Memory',
|
||||
defaults: {
|
||||
name: 'Xata',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-non-core-color-present
|
||||
color: '#1321A7',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Memory'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memoryxata/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiMemory],
|
||||
outputNames: ['Memory'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'xataApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Session ID',
|
||||
name: 'sessionId',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('xataApi');
|
||||
|
||||
const xataClient = new BaseClient({
|
||||
apiKey: credentials.apiKey as string,
|
||||
branch: (credentials.branch as string) || 'main',
|
||||
databaseURL: credentials.databaseEndpoint as string,
|
||||
});
|
||||
|
||||
const sessionId = this.getNodeParameter('sessionId', itemIndex) as string;
|
||||
|
||||
const table = (credentials.databaseEndpoint as string).match(
|
||||
/https:\/\/[^.]+\.[^.]+\.xata\.sh\/db\/([^\/:]+)/,
|
||||
);
|
||||
|
||||
if (table === null) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'It was not possible to extract the table from the Database Endpoint.',
|
||||
);
|
||||
}
|
||||
|
||||
const memory = new BufferMemory({
|
||||
chatHistory: new XataChatMessageHistory({
|
||||
table: table[1],
|
||||
sessionId,
|
||||
client: xataClient,
|
||||
apiKey: credentials.apiKey as string,
|
||||
}),
|
||||
memoryKey: 'chat_history',
|
||||
returnMessages: true,
|
||||
});
|
||||
return {
|
||||
response: logWrapper(memory, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
<svg width="1600" height="1600" viewBox="0 0 1600 1600" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1250.12 576.498c-.11 89.997-36 176.267-99.79 239.83l-.01-.007-226.282 225.489c-7.841 7.82-20.58 7.84-27.927-.44-55.015-61.995-85.587-142.175-85.49-225.478.106-89.997 36-176.267 99.787-239.83l.007.007 206.745-206.014c18.63-18.569 49.12-18.702 64.92 2.324 43.99 58.525 68.12 130.089 68.04 204.119zM440.552 817.702c-63.787-63.563-99.682-149.833-99.787-239.83-.087-74.03 24.048-145.594 68.035-204.119 15.803-21.026 46.294-20.893 64.928-2.324l206.741 206.016.006-.007c63.787 63.564 99.681 149.833 99.787 239.831.097 83.302-30.475 163.483-85.49 225.471-7.347 8.28-20.086 8.26-27.927.45L440.558 817.696l-.006.006zM1141.82 1221.19c-16.63 20.39-47.04 20.21-65.63 1.59l-127.698-127.84c-7.836-7.85-7.821-20.56.033-28.39l212.095-211.345c7.84-7.813 20.62-7.859 27.54.784 36.81 45.996 51.29 109.566 40.34 179.551-10.01 64.06-40.65 129.19-86.68 185.65zM514.696 1224.16c-18.594 18.61-49.002 18.79-65.626-1.6-46.036-56.46-76.672-121.58-86.687-185.64-10.943-69.992 3.531-133.562 40.342-179.558 6.916-8.642 19.703-8.597 27.544-.784l212.092 211.352c7.854 7.82 7.868 20.54.033 28.38l-127.698 127.85z" fill="#7D7D87"/></svg>
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
@@ -0,0 +1,84 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { ZepMemory } from 'langchain/memory/zep';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class MemoryZep implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Zep',
|
||||
name: 'memoryZep',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-icon-not-svg
|
||||
icon: 'file:zep.png',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Use Zep Memory',
|
||||
defaults: {
|
||||
name: 'Zep',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Memory'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.memoryzep/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiMemory],
|
||||
outputNames: ['Memory'],
|
||||
credentials: [
|
||||
{
|
||||
name: 'zepApi',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Session ID',
|
||||
name: 'sessionId',
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = (await this.getCredentials('zepApi')) as {
|
||||
apiKey?: string;
|
||||
apiUrl: string;
|
||||
};
|
||||
|
||||
// TODO: Should it get executed once per item or not?
|
||||
const sessionId = this.getNodeParameter('sessionId', itemIndex) as string;
|
||||
|
||||
const memory = new ZepMemory({
|
||||
sessionId,
|
||||
baseURL: credentials.apiUrl,
|
||||
apiKey: credentials.apiKey,
|
||||
memoryKey: 'chat_history',
|
||||
returnMessages: true,
|
||||
inputKey: 'input',
|
||||
outputKey: 'output',
|
||||
});
|
||||
|
||||
return {
|
||||
response: logWrapper(memory, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
BIN
packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/zep.png
Normal file
|
After Width: | Height: | Size: 3.8 KiB |
@@ -0,0 +1,86 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { OutputFixingParser } from 'langchain/output_parsers';
|
||||
import type { BaseOutputParser } from 'langchain/schema/output_parser';
|
||||
import type { BaseLanguageModel } from 'langchain/base_language';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
|
||||
export class OutputParserAutofixing implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Auto-fixing Output Parser',
|
||||
name: 'outputParserAutofixing',
|
||||
icon: 'fa:tools',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Automatically fix the output if it is not in the correct format',
|
||||
defaults: {
|
||||
name: 'Auto-fixing Output Parser',
|
||||
},
|
||||
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Output Parsers'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.outputparserautofixing/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Output Parser',
|
||||
maxConnections: 1,
|
||||
required: true,
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiOutputParser],
|
||||
outputNames: ['Output Parser'],
|
||||
properties: [
|
||||
{
|
||||
displayName:
|
||||
'This node wraps another output parser. If the first one fails it calls an LLM to fix the format',
|
||||
name: 'info',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
itemIndex,
|
||||
)) as BaseLanguageModel;
|
||||
const outputParser = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
itemIndex,
|
||||
)) as BaseOutputParser;
|
||||
|
||||
const parser = OutputFixingParser.fromLLM(model, outputParser);
|
||||
|
||||
return {
|
||||
response: logWrapper(parser, this),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
import { BaseOutputParser, OutputParserException } from 'langchain/schema/output_parser';
|
||||
|
||||
export class ItemListOutputParser extends BaseOutputParser<string[]> {
|
||||
lc_namespace = ['n8n-nodes-langchain', 'output_parsers', 'list_items'];
|
||||
|
||||
private numberOfItems: number | undefined;
|
||||
|
||||
private separator: string;
|
||||
|
||||
constructor(options: { numberOfItems?: number; separator?: string }) {
|
||||
super();
|
||||
if (options.numberOfItems && options.numberOfItems > 0) {
|
||||
this.numberOfItems = options.numberOfItems;
|
||||
}
|
||||
this.separator = options.separator ?? '\\n';
|
||||
if (this.separator === '\\n') {
|
||||
this.separator = '\n';
|
||||
}
|
||||
}
|
||||
|
||||
async parse(text: string): Promise<string[]> {
|
||||
const response = text
|
||||
.split(this.separator)
|
||||
.map((item) => item.trim())
|
||||
.filter((item) => item);
|
||||
|
||||
if (this.numberOfItems && response.length < this.numberOfItems) {
|
||||
// Only error if to few items got returned, if there are to many we can autofix it
|
||||
throw new OutputParserException(
|
||||
`Wrong number of items returned. Expected ${this.numberOfItems} items but got ${response.length} items instead.`,
|
||||
);
|
||||
}
|
||||
|
||||
return response.slice(0, this.numberOfItems);
|
||||
}
|
||||
|
||||
getFormatInstructions(): string {
|
||||
const instructions = `Your response should be a list of ${
|
||||
this.numberOfItems ? this.numberOfItems + ' ' : ''
|
||||
}items separated by`;
|
||||
|
||||
const numberOfExamples = this.numberOfItems ?? 3;
|
||||
|
||||
const examples: string[] = [];
|
||||
for (let i = 1; i <= numberOfExamples; i++) {
|
||||
examples.push(`item${i}`);
|
||||
}
|
||||
|
||||
return `${instructions} "${this.separator}" (for example: "${examples.join(this.separator)}")`;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { ItemListOutputParser } from './ItemListOutputParser';
|
||||
|
||||
export class OutputParserItemList implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Item List Output Parser',
|
||||
name: 'outputParserItemList',
|
||||
icon: 'fa:bars',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Return the results as separate items',
|
||||
defaults: {
|
||||
name: 'Item List Output Parser',
|
||||
},
|
||||
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Output Parsers'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.outputparseritemlist/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiOutputParser],
|
||||
outputNames: ['Output Parser'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Number Of Items',
|
||||
name: 'numberOfItems',
|
||||
type: 'number',
|
||||
default: -1,
|
||||
description:
|
||||
'Defines many many items should be returned maximally. If set to -1, there is no limit.',
|
||||
},
|
||||
// For that to be easily possible the metadata would have to be returned and be able to be read.
|
||||
// Would also be possible with a wrapper but that would be even more hacky and the output types
|
||||
// would not be correct anymore.
|
||||
// {
|
||||
// displayName: 'Parse Output',
|
||||
// name: 'parseOutput',
|
||||
// type: 'boolean',
|
||||
// default: true,
|
||||
// description: 'Whether the output should be automatically be parsed or left RAW',
|
||||
// },
|
||||
{
|
||||
displayName: 'Separator',
|
||||
name: 'separator',
|
||||
type: 'string',
|
||||
default: '\\n',
|
||||
description:
|
||||
'Defines the separator that should be used to split the results into separate items. Defaults to a new line but can be changed depending on the data that should be returned.',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
numberOfItems?: number;
|
||||
separator?: string;
|
||||
};
|
||||
|
||||
const parser = new ItemListOutputParser(options);
|
||||
|
||||
return {
|
||||
response: logWrapper(parser, this),
|
||||
};
|
||||
}
|
||||
}
|
||||