[fix] supamemory chat overflow

This commit is contained in:
kwaroran
2023-05-17 01:17:45 +09:00
parent df7275b15e
commit e5c44406fb

View File

@@ -7,6 +7,8 @@ import { requestChatData } from "./request";
export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxContextTokens:number,room:Chat,char:character|groupChat): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string}>{ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxContextTokens:number,room:Chat,char:character|groupChat): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string}>{
const db = get(DataBase) const db = get(DataBase)
console.log("Memory: " + currentTokens)
if(currentTokens > maxContextTokens){ if(currentTokens > maxContextTokens){
let coIndex = -1 let coIndex = -1
for(let i=0;i<chats.length;i++){ for(let i=0;i<chats.length;i++){
@@ -29,14 +31,8 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
const id = splited.splice(0,1)[0] const id = splited.splice(0,1)[0]
const data = splited.join('\n') const data = splited.join('\n')
for(let i=0;i<chats.length;i++){ let i =0;
if(chats[0].memo === id){ while(true){
break
}
currentTokens -= (await tokenize(chats[0].content) + 1)
chats.splice(0, 1)
}
if(chats.length === 0){ if(chats.length === 0){
return { return {
currentTokens: currentTokens, currentTokens: currentTokens,
@@ -44,11 +40,19 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
error: "SupaMemory: chat ID not found" error: "SupaMemory: chat ID not found"
} }
} }
if(chats[0].memo === id){
break
}
currentTokens -= (await tokenize(chats[0].content) + 1)
chats.splice(0, 1)
i += 1
}
supaMemory = data supaMemory = data
currentTokens += await tokenize(supaMemory) + 1 currentTokens += await tokenize(supaMemory) + 1
} }
if(currentTokens < maxContextTokens){ if(currentTokens < maxContextTokens){
chats.unshift({ chats.unshift({
role: "system", role: "system",
@@ -134,7 +138,6 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
const tokenz = await tokenize(result + '\n\n') + 5 const tokenz = await tokenize(result + '\n\n') + 5
currentTokens += tokenz currentTokens += tokenz
supaMemory += result + '\n\n' supaMemory += result + '\n\n'
console.log(tokenz)
} }
chats.unshift({ chats.unshift({