fix(Token Splitter Node): Cache tokenizer JSONs in memory (#17201)

This commit is contained in:
oleg
2025-07-10 19:08:29 +02:00
committed by GitHub
parent 36b410abdb
commit 2402926573
5 changed files with 115 additions and 54 deletions

View File

@@ -136,7 +136,7 @@ export async function estimateTokensFromStringList(
return 0;
}
const encoder = await encodingForModel(model);
const encoder = encodingForModel(model);
const encodedListLength = await Promise.all(
list.map(async (text) => {
try {