[feat] better drive load and supamemory

This commit is contained in:
kwaroran
2023-05-20 04:10:54 +09:00
parent 6035d1e01d
commit 0e1e63e63c
2 changed files with 57 additions and 49 deletions

View File

@@ -3,7 +3,7 @@ import { alertError, alertInput, alertNormal, alertSelect, alertStore } from "..
import { DataBase, setDatabase, type Database } from "../database"; import { DataBase, setDatabase, type Database } from "../database";
import { forageStorage, getUnpargeables, isTauri } from "../globalApi"; import { forageStorage, getUnpargeables, isTauri } from "../globalApi";
import pako from "pako"; import pako from "pako";
import { BaseDirectory, readBinaryFile, readDir, writeBinaryFile } from "@tauri-apps/api/fs"; import { BaseDirectory, exists, readBinaryFile, readDir, writeBinaryFile } from "@tauri-apps/api/fs";
import { language } from "../../lang"; import { language } from "../../lang";
import { relaunch } from '@tauri-apps/api/process'; import { relaunch } from '@tauri-apps/api/process';
import { open } from '@tauri-apps/api/shell'; import { open } from '@tauri-apps/api/shell';
@@ -167,12 +167,17 @@ async function loadDrive(ACCESS_TOKEN:string) {
let loadedForageKeys = false let loadedForageKeys = false
async function checkImageExists(images:string) { async function checkImageExists(images:string) {
if(isTauri){
return await exists(`assets/` + images, {dir: BaseDirectory.AppData})
}
else{
if(!loadedForageKeys){ if(!loadedForageKeys){
foragekeys = await forageStorage.keys() foragekeys = await forageStorage.keys()
loadedForageKeys = true loadedForageKeys = true
} }
return foragekeys.includes('assets/' + images) return foragekeys.includes('assets/' + images)
} }
}
const fileNames = files.map((d) => { const fileNames = files.map((d) => {
return d.name return d.name
}) })

View File

@@ -25,6 +25,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
} }
let supaMemory = '' let supaMemory = ''
let lastId = ''
if(room.supaMemoryData && room.supaMemoryData.length > 4){ if(room.supaMemoryData && room.supaMemoryData.length > 4){
const splited = room.supaMemoryData.split('\n') const splited = room.supaMemoryData.split('\n')
@@ -41,6 +42,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
} }
} }
if(chats[0].memo === id){ if(chats[0].memo === id){
lastId = id
break break
} }
currentTokens -= (await tokenize(chats[0].content) + 1) currentTokens -= (await tokenize(chats[0].content) + 1)
@@ -64,8 +66,6 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
} }
} }
let lastId = ''
async function summarize(stringlizedChat:string){ async function summarize(stringlizedChat:string){
@@ -87,7 +87,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
body: JSON.stringify({ body: JSON.stringify({
"model": db.supaMemoryType === 'curie' ? "text-curie-001" : "text-davinci-003", "model": db.supaMemoryType === 'curie' ? "text-curie-001" : "text-davinci-003",
"prompt": promptbody, "prompt": promptbody,
"max_tokens": 500, "max_tokens": 600,
"temperature": 0 "temperature": 0
}) })
}) })
@@ -128,20 +128,21 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
return result return result
} }
if(supaMemory.split('\n\n').length >= 4){
const result = await summarize(supaMemory)
if(typeof(result) !== 'string'){
return result
}
currentTokens -= await tokenize(supaMemory)
currentTokens += await tokenize(result + '\n\n')
supaMemory = result + '\n\n'
}
while(currentTokens > maxContextTokens){ while(currentTokens > maxContextTokens){
const beforeToken = currentTokens
let maxChunkSize = maxContextTokens > 3500 ? 1200 : Math.floor(maxContextTokens / 3) let maxChunkSize = maxContextTokens > 3500 ? 1200 : Math.floor(maxContextTokens / 3)
while((currentTokens - (maxChunkSize * 0.7)) > maxContextTokens){ let summarized = false
maxChunkSize = Math.floor(maxChunkSize * 0.7) let chunkSize = 0
let stringlizedChat = ''
let spiceLen = 0
while(true){
const cont = chats[spiceLen]
if(!cont){
currentTokens = beforeToken
stringlizedChat = ''
chunkSize = 0
spiceLen = 0
if(summarized){
if(maxChunkSize < 500){ if(maxChunkSize < 500){
return { return {
currentTokens: currentTokens, currentTokens: currentTokens,
@@ -149,31 +150,40 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
error: "Not Enough Tokens" error: "Not Enough Tokens"
} }
} }
maxChunkSize = maxChunkSize * 0.7
}
else{
const result = await summarize(supaMemory)
if(typeof(result) !== 'string'){
return result
} }
let chunkSize = 0 console.log(currentTokens)
let stringlizedChat = '' currentTokens -= await tokenize(supaMemory)
currentTokens += await tokenize(result + '\n\n')
console.log(currentTokens)
while(true){ supaMemory = result + '\n\n'
const cont = chats[0] summarized = true
if(!cont){ if(currentTokens <= maxContextTokens){
return { break
currentTokens: currentTokens,
chats: chats,
error: "Not Enough Tokens"
} }
} }
continue
}
const tokens = await tokenize(cont.content) + 1 const tokens = await tokenize(cont.content) + 1
if((chunkSize + tokens) > maxChunkSize){ if((chunkSize + tokens) > maxChunkSize){
lastId = cont.memo lastId = cont.memo
break break
} }
stringlizedChat += `${cont.role === 'assistant' ? char.type === 'group' ? '' : char.name : db.username}: ${cont.content}\n\n` stringlizedChat += `${cont.role === 'assistant' ? char.type === 'group' ? '' : char.name : db.username}: ${cont.content}\n\n`
chats.splice(0, 1) spiceLen += 1
currentTokens -= tokens currentTokens -= tokens
chunkSize += tokens chunkSize += tokens
} }
chats.splice(0, spiceLen)
if(stringlizedChat !== ''){
const result = await summarize(stringlizedChat) const result = await summarize(stringlizedChat)
if(typeof(result) !== 'string'){ if(typeof(result) !== 'string'){
@@ -183,16 +193,9 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
const tokenz = await tokenize(result + '\n\n') + 5 const tokenz = await tokenize(result + '\n\n') + 5
currentTokens += tokenz currentTokens += tokenz
supaMemory += result.replace(/\n+/g,'\n') + '\n\n' supaMemory += result.replace(/\n+/g,'\n') + '\n\n'
if(supaMemory.split('\n\n').length >= 4){
const result = await summarize(supaMemory)
if(typeof(result) !== 'string'){
return result
}
currentTokens -= await tokenize(supaMemory)
currentTokens += await tokenize(result + '\n\n')
supaMemory = result + '\n\n'
} }
} }
chats.unshift({ chats.unshift({