diff --git a/src/lang/en.ts b/src/lang/en.ts
index dd11fe9e..790a4525 100644
--- a/src/lang/en.ts
+++ b/src/lang/en.ts
@@ -612,5 +612,10 @@ export const languageEnglish = {
nickname: "Nickname",
useRegexLorebook: "Use Regex",
customPromptTemplateToggle: "Custom Toggles",
- defaultVariables: "Default Variables"
+ defaultVariables: "Default Variables",
+ hypaAllocatedTokens: "Allocated Tokens",
+ hypaChunkSize: "Chunk Size",
+ hypaV2Desc: "HypaMemory V2 is a long-term memory system that use both summarized data and vector search.",
+ supaDesc: "SupaMemory is a long-term memory system that uses summarized data to AI.",
+ hanuraiDesc: "HanuraiMemory is a memory system that uses vector search.",
}
\ No newline at end of file
diff --git a/src/lib/Setting/Pages/OtherBotSettings.svelte b/src/lib/Setting/Pages/OtherBotSettings.svelte
index 020b2a3c..91a395c5 100644
--- a/src/lib/Setting/Pages/OtherBotSettings.svelte
+++ b/src/lib/Setting/Pages/OtherBotSettings.svelte
@@ -238,14 +238,20 @@
{#if $DataBase.hanuraiEnable}
+ {language.hanuraiDesc}
Chunk Size
{:else if $DataBase.supaMemoryType === 'hypaV2'}
- {language.HypaMemory} V2 is Experimental
+ {language.hypaV2Desc}
+ {language.hypaChunkSize}
+
+ {language.hypaAllocatedTokens}
+
{:else if $DataBase.supaMemoryType !== 'none'}
+ {language.supaDesc}
{language.SuperMemory} {language.model}
distilbart-cnn-6-6 (Free/Local)
diff --git a/src/ts/process/memory/hypav2.ts b/src/ts/process/memory/hypav2.ts
index 7b2f46d4..eb4e8761 100644
--- a/src/ts/process/memory/hypav2.ts
+++ b/src/ts/process/memory/hypav2.ts
@@ -67,7 +67,8 @@ export async function hypaMemoryV2(
//this is for the prompt
- let allocatedTokens = 3000
+ let allocatedTokens = db.hypaAllocatedTokens
+ let chunkSize = db.hypaChunkSize
currentTokens += allocatedTokens
currentTokens += 50 //this is for the template prompt
let mainPrompt = ""
@@ -93,13 +94,20 @@ export async function hypaMemoryV2(
while(currentTokens >= maxContextTokens){
- const idx = (Math.floor(chats.length/2))
- const targetId = chats[idx].memo
- const halfData = chats.slice(idx)
+ let idx = 0
+ let targetId = ''
+ const halfData:OpenAIChat[] = []
let halfDataTokens = 0
- for(const chat of halfData){
+ while(halfDataTokens < chunkSize){
+ const chat = chats[idx]
+ if(!chat){
+ break
+ }
halfDataTokens += await tokenizer.tokenizeChat(chat)
+ halfData.push(chat)
+ idx++
+ targetId = chat.memo
}
const stringlizedChat = halfData.map(e => `${e.role}: ${e.content}`).join('\n')
diff --git a/src/ts/storage/database.ts b/src/ts/storage/database.ts
index 665b50d5..70d591f1 100644
--- a/src/ts/storage/database.ts
+++ b/src/ts/storage/database.ts
@@ -403,6 +403,8 @@ export function setDatabase(data:Database){
data.customPromptTemplateToggle ??= ''
data.globalChatVariables ??= {}
data.templateDefaultVariables ??= ''
+ data.hypaAllocatedTokens ??= 3000
+ data.hypaChunkSize ??= 3000
changeLanguage(data.language)
DataBase.set(data)
@@ -663,6 +665,8 @@ export interface Database{
customPromptTemplateToggle:string
globalChatVariables:{[key:string]:string}
templateDefaultVariables:string
+ hypaAllocatedTokens:number
+ hypaChunkSize:number
}
export interface customscript{