Fix: Correct tokenize flow in tokenizer encode function

This commit is contained in:
sub-hub
2025-04-21 13:34:01 +09:00
committed by GitHub
parent 33d8ed4568
commit 09228f3f86

View File

@@ -89,9 +89,7 @@ export async function encode(data:string):Promise<(number[]|Uint32Array|Int32Arr
default: default:
result = await tikJS(data, 'o200k_base'); break; result = await tikJS(data, 'o200k_base'); break;
} }
} } else if (db.aiModel === 'custom' && pluginTokenizer) {
if(db.aiModel === 'custom' && pluginTokenizer){
switch(pluginTokenizer){ switch(pluginTokenizer){
case 'mistral': case 'mistral':
result = await tokenizeWebTokenizers(data, 'mistral'); break; result = await tokenizeWebTokenizers(data, 'mistral'); break;
@@ -120,6 +118,8 @@ export async function encode(data:string):Promise<(number[]|Uint32Array|Int32Arr
} }
} }
// Fallback
if (result === undefined) {
if(modelInfo.tokenizer === LLMTokenizer.NovelList){ if(modelInfo.tokenizer === LLMTokenizer.NovelList){
result = await tokenizeWebTokenizers(data, 'novellist'); result = await tokenizeWebTokenizers(data, 'novellist');
} else if(modelInfo.tokenizer === LLMTokenizer.Claude){ } else if(modelInfo.tokenizer === LLMTokenizer.Claude){
@@ -145,7 +145,7 @@ export async function encode(data:string):Promise<(number[]|Uint32Array|Int32Arr
} else { } else {
result = await tikJS(data); result = await tikJS(data);
} }
}
if(db.useTokenizerCaching){ if(db.useTokenizerCaching){
encodeCache.set(cacheKey, result); encodeCache.set(cacheKey, result);
} }