[feat] added memory limit line

This commit is contained in:
kwaroran
2023-05-17 02:18:34 +09:00
parent e5c44406fb
commit 59c34956cf
9 changed files with 39 additions and 10 deletions

View File

@@ -80,6 +80,9 @@ export function setDatabase(data:Database){
if(checkNullish(data.language)){
data.language = 'en'
}
if(checkNullish(data.swipe)){
data.swipe = true
}
if(checkNullish(data.translator)){
data.translator = ''
}
@@ -187,6 +190,9 @@ export function setDatabase(data:Database){
if(checkNullish(data.supaMemoryPrompt)){
data.supaMemoryPrompt = ''
}
if(checkNullish(data.showMemoryLimit)){
data.showMemoryLimit = false
}
if(checkNullish(data.sdConfig)){
data.sdConfig = {
width:512,
@@ -399,6 +405,7 @@ export interface Database{
showUnrecommended:boolean
elevenLabKey:string
useExperimental:boolean
showMemoryLimit:boolean
}
@@ -422,6 +429,7 @@ export interface Chat{
localLore: loreBook[]
sdData?:string
supaMemoryData?:string
lastMemory?:string
}
export interface Message{

View File

@@ -238,18 +238,21 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
chats = sp.chats
currentTokens = sp.currentTokens
currentChat.supaMemoryData = sp.memory ?? currentChat.supaMemoryData
currentChat.lastMemory = sp.lastId ?? currentChat.lastMemory
}
else{
while(currentTokens > maxContextTokens){
if(chats.length <= 1){
alertError(language.errors.toomuchtoken)
return false
}
currentTokens -= (await tokenize(chats[0].content) + 1)
chats.splice(0, 1)
}
}
currentChat.lastMemory = chats[0].memo
console.log(currentChat.lastMemory)
}
let bias:{[key:number]:number} = {}

View File

@@ -5,7 +5,7 @@ import { tokenize } from "../tokenizer";
import { findCharacterbyId } from "../util";
import { requestChatData } from "./request";
export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxContextTokens:number,room:Chat,char:character|groupChat): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string}>{
export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxContextTokens:number,room:Chat,char:character|groupChat): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string;lastId?:string}>{
const db = get(DataBase)
console.log("Memory: " + currentTokens)
@@ -147,7 +147,8 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
return {
currentTokens: currentTokens,
chats: chats,
memory: lastId + '\n' + supaMemory
memory: lastId + '\n' + supaMemory,
lastId: lastId
}
}

View File

@@ -27,7 +27,8 @@ export function messageForm(arg:Message[], loadPages:number){
role: m.role,
data: reformatContent(m.data),
index: i,
saying: m.saying
saying: m.saying,
chatId: m.chatId ?? 'none'
})
}
return a.slice(0, loadPages)