From c810a64a0f202c6bc9813e106d3b061237ef0a27 Mon Sep 17 00:00:00 2001 From: kwaroran Date: Sat, 13 Apr 2024 17:28:55 +0900 Subject: [PATCH 1/5] Fix condition in supaMemory.ts to use greater than instead of less than --- src/ts/process/memory/supaMemory.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ts/process/memory/supaMemory.ts b/src/ts/process/memory/supaMemory.ts index b3761fa8..09ec59ac 100644 --- a/src/ts/process/memory/supaMemory.ts +++ b/src/ts/process/memory/supaMemory.ts @@ -283,7 +283,7 @@ export async function supaMemory( while(currentTokens > maxContextTokens){ const beforeToken = currentTokens let maxChunkSize = Math.floor(maxContextTokens / 3) - if(db.maxSupaChunkSize > maxChunkSize){ + if(db.maxSupaChunkSize < maxChunkSize){ maxChunkSize = db.maxSupaChunkSize } let summarized = false From 3f2aa381674c04a477a981d7e91a6c9cc91f6684 Mon Sep 17 00:00:00 2001 From: kwaroran Date: Sat, 13 Apr 2024 17:29:12 +0900 Subject: [PATCH 2/5] Update version numbers to 1.95.1 --- src-tauri/tauri.conf.json | 2 +- src/ts/storage/database.ts | 2 +- version.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 4e0d726f..32905218 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -8,7 +8,7 @@ }, "package": { "productName": "RisuAI", - "version": "1.95.0" + "version": "1.95.1" }, "tauri": { "allowlist": { diff --git a/src/ts/storage/database.ts b/src/ts/storage/database.ts index 4805075a..488fd42f 100644 --- a/src/ts/storage/database.ts +++ b/src/ts/storage/database.ts @@ -15,7 +15,7 @@ import type { OobaChatCompletionRequestParams } from '../model/ooba'; export const DataBase = writable({} as any as Database) export const loadedStore = writable(false) -export let appVer = "1.95.0" +export let appVer = "1.95.1" export let webAppSubVer = '' export function setDatabase(data:Database){ diff --git a/version.json b/version.json index 9094c3b4..d6f4e88c 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"1.95.0"} \ No newline at end of file +{"version":"1.95.1"} \ No newline at end of file From 706e208f5352181c9b9592a614caa9ad16a01d9f Mon Sep 17 00:00:00 2001 From: kwaroran Date: Mon, 15 Apr 2024 23:51:57 +0900 Subject: [PATCH 3/5] Refactor loadLoreBookPrompt function in lorebook.ts --- src/ts/process/lorebook.ts | 143 +++++++++++++------------------------ 1 file changed, 48 insertions(+), 95 deletions(-) diff --git a/src/ts/process/lorebook.ts b/src/ts/process/lorebook.ts index 4c4e605f..1f4e57cc 100644 --- a/src/ts/process/lorebook.ts +++ b/src/ts/process/lorebook.ts @@ -52,7 +52,6 @@ const rmRegex = / |\n/g export async function loadLoreBookPrompt(){ - const selectedID = get(selectedCharID) const db = get(DataBase) const char = db.characters[selectedID] @@ -65,9 +64,6 @@ export async function loadLoreBookPrompt(){ const loreDepth = char.loreSettings?.scanDepth ?? db.loreBookDepth const loreToken = char.loreSettings?.tokenBudget ?? db.loreBookToken const fullWordMatching = char.loreSettings?.fullWordMatching ?? false - if(char.lorePlus){ - return await loadLoreBookPlusPrompt() - } let activatiedPrompt: string[] = [] @@ -185,116 +181,73 @@ export async function loadLoreBookPrompt(){ let sactivated:string[] = [] + let decoratedArray:{ + depth:number, + pos:string, + prompt:string + }[] = [] activatiedPrompt = activatiedPrompt.filter((v) => { - //deprecated three @ for special prompt - if(v.startsWith("@@@end")){ - sactivated.push(v.replace('@@@end','').trim()) + const decorated = decoratorParser(v) + if(decorated.decorators['dont_activate']){ return false } - if(v.startsWith('@@end')){ - sactivated.push(v.replace('@@end','').trim()) + if(decorated.decorators['depth'] && decorated.decorators['depth'][0] === '0'){ + sactivated.push(decorated.prompt) return false } + if(decorated.decorators['position']){ + decoratedArray.push({ + depth: -1, + pos: decorated.decorators['position'][0], + prompt: decorated.prompt + }) + return false + } + if(decorated.decorators) return true }) return { act: activatiedPrompt.reverse().join('\n\n'), - special_act: sactivated.reverse().join('\n\n') + special_act: sactivated.reverse().join('\n\n'), + decorated: decoratedArray } } -export async function loadLoreBookPlusPrompt(){ - const selectedID = get(selectedCharID) - const db = get(DataBase) - const char = db.characters[selectedID] - const page = char.chatPage - const characterLore = char.globalLore ?? [] - const chatLore = char.chats[page].localLore ?? [] - const fullLore = characterLore.concat(chatLore).concat(getModuleLorebooks()).filter((v) => { return v.content }) - const currentChat = char.chats[page].message - const loreDepth = char.loreSettings?.scanDepth ?? db.loreBookDepth - const loreToken = char.loreSettings?.tokenBudget ?? db.loreBookToken +const supportedDecorators = ['depth','dont_activate','position'] +export function decoratorParser(prompt:string){ + const split = prompt.split('\n') + let decorators:{[name:string]:string[]} = {} - interface formatedLorePlus{ - content: string - simularity:number - } - - let formatedLores:formatedLorePlus[] = [] - let activatiedPrompt: string[] = [] - const hypaProcesser = new HypaProcesser('MiniLM') - - - const formatedChatMain = currentChat.slice(currentChat.length - loreDepth,currentChat.length).map((msg) => { - return msg.data - }).join('||').replace(rmRegex,'').toLocaleLowerCase() - const chatVec = await hypaProcesser.testText(formatedChatMain) - - - for(const lore of fullLore){ - let key = (lore.key ?? '').replace(rmRegex, '').toLocaleLowerCase().split(',') - key.push(lore.comment) - - let vec:number[] - - if(lore.loreCache && lore.loreCache.key === lore.content){ - const vect = lore.loreCache.data[0] - const v = Buffer.from(vect, 'base64') - const f = new Float32Array(v.buffer) - vec = Array.from(f) - } - else{ - vec = await hypaProcesser.testText(lore.content) - lore.loreCache = { - key: lore.content, - data: [Buffer.from(new Float32Array(vec).buffer).toString('base64')] + let fallbacking = false + for(let i=0;i { - return b.simularity - a.simularity - }) - - let i=0; - while(i < formatedLores.length){ - const lore = formatedLores[i] - const totalTokens = await tokenize(activatiedPrompt.concat([lore.content]).join('\n\n')) - if(totalTokens > loreToken){ - break - } - activatiedPrompt.push(lore.content) - i++ - } - - - let sactivated:string[] = [] - activatiedPrompt = activatiedPrompt.filter((v) => { - //deprecated three @ for special prompt - if(v.startsWith("@@@end")){ - sactivated.push(v.replace('@@@end','').trim()) - return false - } - if(v.startsWith('@@end')){ - sactivated.push(v.replace('@@end','').trim()) - return false - } - return true - }) - return { - act: activatiedPrompt.reverse().join('\n\n'), - special_act: sactivated.reverse().join('\n\n') + prompt: '', + decorators: decorators } - } export async function importLoreBook(mode:'global'|'local'|'sglobal'){ From d93fd7b191719d89da8d5cdba3985c2d13dfbf81 Mon Sep 17 00:00:00 2001 From: kwaroran Date: Mon, 15 Apr 2024 23:58:57 +0900 Subject: [PATCH 4/5] Refactor loadLoreBookPrompt function in lorebook.ts --- src/ts/process/request.ts | 104 +++++++++++++++++++------------------- 1 file changed, 53 insertions(+), 51 deletions(-) diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index bc0d32e8..6288eb69 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1796,64 +1796,66 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' let reader = res.body.getReader() const decoder = new TextDecoder() const parser = createParser(async (e) => { - if(e.type === 'event'){ - switch(e.event){ - case 'content_block_delta': { - if(e.data){ - text += JSON.parse(e.data).delta?.text - controller.enqueue({ - "0": text - }) - } - break - } - case 'error': { - if(e.data){ - const errormsg:string = JSON.parse(e.data).error?.message - if(errormsg && errormsg.toLocaleLowerCase().includes('overload') && db.antiClaudeOverload){ - console.log('Overload detected, retrying...') - reader.cancel() - rerequesting = true - await sleep(2000) - body.max_tokens -= await tokenize(text) - if(body.max_tokens < 0){ - body.max_tokens = 0 - } - if(body.messages.at(-1)?.role !== 'assistant'){ - body.messages.push({ - role: 'assistant', - content: '' - }) - } - body.messages[body.messages.length-1].content += text - const res = await fetchNative(replacerURL, { - body: JSON.stringify(body), - headers: { - "Content-Type": "application/json", - "x-api-key": apiKey, - "anthropic-version": "2023-06-01", - "accept": "application/json", - }, - method: "POST", - chatId: arg.chatId + try { + if(e.type === 'event'){ + switch(e.event){ + case 'content_block_delta': { + if(e.data){ + text += JSON.parse(e.data).delta?.text + controller.enqueue({ + "0": text }) - if(res.status !== 200){ - breakError = 'Error: ' + await textifyReadableStream(res.body) + } + break + } + case 'error': { + if(e.data){ + const errormsg:string = JSON.parse(e.data).error?.message + if(errormsg && errormsg.toLocaleLowerCase().includes('overload') && db.antiClaudeOverload){ + console.log('Overload detected, retrying...') + reader.cancel() + rerequesting = true + await sleep(2000) + body.max_tokens -= await tokenize(text) + if(body.max_tokens < 0){ + body.max_tokens = 0 + } + if(body.messages.at(-1)?.role !== 'assistant'){ + body.messages.push({ + role: 'assistant', + content: '' + }) + } + body.messages[body.messages.length-1].content += text + const res = await fetchNative(replacerURL, { + body: JSON.stringify(body), + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + "accept": "application/json", + }, + method: "POST", + chatId: arg.chatId + }) + if(res.status !== 200){ + breakError = 'Error: ' + await textifyReadableStream(res.body) + break + } + reader = res.body.getReader() + rerequesting = false break } - reader = res.body.getReader() - rerequesting = false - break + text += "Error:" + JSON.parse(e.data).error?.message + controller.enqueue({ + "0": text + }) } - text += "Error:" + JSON.parse(e.data).error?.message - controller.enqueue({ - "0": text - }) + break } - break } } - } + } catch (error) {} }) while(true){ if(rerequesting){ From ccb45ce2c531f74001cc5eddb6b8420d8897cb90 Mon Sep 17 00:00:00 2001 From: kwaroran Date: Tue, 16 Apr 2024 00:00:05 +0900 Subject: [PATCH 5/5] Fix error handling in requestChatDataMain function --- src/ts/process/request.ts | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 6288eb69..f141942e 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1917,10 +1917,16 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' result: JSON.stringify(res.data.error) } } + const resText = res?.data?.content?.[0]?.text + if(!resText){ + return { + type: 'fail', + result: JSON.stringify(res.data) + } + } return { type: 'success', - result: res.data.content[0].text - + result: resText } } else if(raiModel.startsWith('claude')){