From a32e6701080226bc0bfbbb97ae750cf5d9dd3c8d Mon Sep 17 00:00:00 2001 From: sub-hub <70351692+sub-hub@users.noreply.github.com> Date: Thu, 3 Apr 2025 12:17:01 +0900 Subject: [PATCH] reduce max tokenize cache size --- src/ts/tokenizer.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ts/tokenizer.ts b/src/ts/tokenizer.ts index 6512e54a..3211c878 100644 --- a/src/ts/tokenizer.ts +++ b/src/ts/tokenizer.ts @@ -11,7 +11,7 @@ import { pluginV2 } from "./plugins/plugins"; import type { GemmaTokenizer } from "@huggingface/transformers"; import { LRUMap } from 'mnemonist'; -const MAX_CACHE_SIZE = 3000; +const MAX_CACHE_SIZE = 1500; const encodeCache = new LRUMap(MAX_CACHE_SIZE);