[fix] tokenize problem

This commit is contained in:
kwaroran
2023-05-29 20:41:33 +09:00
parent bdf00126cc
commit 71b02cf104
2 changed files with 7 additions and 5 deletions

View File

@@ -171,7 +171,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
}
continue
}
const tokens = await tokenize(cont.content) + 1
const tokens = await tokenize(cont.content) + 5
if((chunkSize + tokens) > maxChunkSize){
if(stringlizedChat === ''){
stringlizedChat += `${cont.role === 'assistant' ? char.type === 'group' ? '' : char.name : db.username}: ${cont.content}\n\n`