Merge branch 'kwaroran:main' into main
This commit is contained in:
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"package": {
|
||||
"productName": "RisuAI",
|
||||
"version": "1.95.0"
|
||||
"version": "1.95.1"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
|
||||
@@ -52,7 +52,6 @@ const rmRegex = / |\n/g
|
||||
|
||||
export async function loadLoreBookPrompt(){
|
||||
|
||||
|
||||
const selectedID = get(selectedCharID)
|
||||
const db = get(DataBase)
|
||||
const char = db.characters[selectedID]
|
||||
@@ -65,9 +64,6 @@ export async function loadLoreBookPrompt(){
|
||||
const loreDepth = char.loreSettings?.scanDepth ?? db.loreBookDepth
|
||||
const loreToken = char.loreSettings?.tokenBudget ?? db.loreBookToken
|
||||
const fullWordMatching = char.loreSettings?.fullWordMatching ?? false
|
||||
if(char.lorePlus){
|
||||
return await loadLoreBookPlusPrompt()
|
||||
}
|
||||
|
||||
let activatiedPrompt: string[] = []
|
||||
|
||||
@@ -185,116 +181,73 @@ export async function loadLoreBookPrompt(){
|
||||
|
||||
|
||||
let sactivated:string[] = []
|
||||
let decoratedArray:{
|
||||
depth:number,
|
||||
pos:string,
|
||||
prompt:string
|
||||
}[] = []
|
||||
activatiedPrompt = activatiedPrompt.filter((v) => {
|
||||
//deprecated three @ for special prompt
|
||||
if(v.startsWith("@@@end")){
|
||||
sactivated.push(v.replace('@@@end','').trim())
|
||||
const decorated = decoratorParser(v)
|
||||
if(decorated.decorators['dont_activate']){
|
||||
return false
|
||||
}
|
||||
if(v.startsWith('@@end')){
|
||||
sactivated.push(v.replace('@@end','').trim())
|
||||
if(decorated.decorators['depth'] && decorated.decorators['depth'][0] === '0'){
|
||||
sactivated.push(decorated.prompt)
|
||||
return false
|
||||
}
|
||||
if(decorated.decorators['position']){
|
||||
decoratedArray.push({
|
||||
depth: -1,
|
||||
pos: decorated.decorators['position'][0],
|
||||
prompt: decorated.prompt
|
||||
})
|
||||
return false
|
||||
}
|
||||
if(decorated.decorators)
|
||||
return true
|
||||
})
|
||||
|
||||
return {
|
||||
act: activatiedPrompt.reverse().join('\n\n'),
|
||||
special_act: sactivated.reverse().join('\n\n')
|
||||
special_act: sactivated.reverse().join('\n\n'),
|
||||
decorated: decoratedArray
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadLoreBookPlusPrompt(){
|
||||
const selectedID = get(selectedCharID)
|
||||
const db = get(DataBase)
|
||||
const char = db.characters[selectedID]
|
||||
const page = char.chatPage
|
||||
const characterLore = char.globalLore ?? []
|
||||
const chatLore = char.chats[page].localLore ?? []
|
||||
const fullLore = characterLore.concat(chatLore).concat(getModuleLorebooks()).filter((v) => { return v.content })
|
||||
const currentChat = char.chats[page].message
|
||||
const loreDepth = char.loreSettings?.scanDepth ?? db.loreBookDepth
|
||||
const loreToken = char.loreSettings?.tokenBudget ?? db.loreBookToken
|
||||
const supportedDecorators = ['depth','dont_activate','position']
|
||||
export function decoratorParser(prompt:string){
|
||||
const split = prompt.split('\n')
|
||||
let decorators:{[name:string]:string[]} = {}
|
||||
|
||||
interface formatedLorePlus{
|
||||
content: string
|
||||
simularity:number
|
||||
}
|
||||
|
||||
let formatedLores:formatedLorePlus[] = []
|
||||
let activatiedPrompt: string[] = []
|
||||
const hypaProcesser = new HypaProcesser('MiniLM')
|
||||
|
||||
|
||||
const formatedChatMain = currentChat.slice(currentChat.length - loreDepth,currentChat.length).map((msg) => {
|
||||
return msg.data
|
||||
}).join('||').replace(rmRegex,'').toLocaleLowerCase()
|
||||
const chatVec = await hypaProcesser.testText(formatedChatMain)
|
||||
|
||||
|
||||
for(const lore of fullLore){
|
||||
let key = (lore.key ?? '').replace(rmRegex, '').toLocaleLowerCase().split(',')
|
||||
key.push(lore.comment)
|
||||
|
||||
let vec:number[]
|
||||
|
||||
if(lore.loreCache && lore.loreCache.key === lore.content){
|
||||
const vect = lore.loreCache.data[0]
|
||||
const v = Buffer.from(vect, 'base64')
|
||||
const f = new Float32Array(v.buffer)
|
||||
vec = Array.from(f)
|
||||
}
|
||||
else{
|
||||
vec = await hypaProcesser.testText(lore.content)
|
||||
lore.loreCache = {
|
||||
key: lore.content,
|
||||
data: [Buffer.from(new Float32Array(vec).buffer).toString('base64')]
|
||||
let fallbacking = false
|
||||
for(let i=0;i<split.length;i++){
|
||||
const line = split[i].trim()
|
||||
if(line.startsWith('@@')){
|
||||
const data = line.startsWith('@@@') ? line.replace('@@@','') : line.replace('@@','')
|
||||
const name = data.split(' ')[0]
|
||||
const values = data.replace(name,'').trim().split(',')
|
||||
if(!supportedDecorators.includes(name)){
|
||||
fallbacking = true
|
||||
continue
|
||||
}
|
||||
if((!line.startsWith('@@@')) || fallbacking){
|
||||
decorators[name] = values
|
||||
}
|
||||
}
|
||||
else if(line === '@@end' || line === '@@@end'){
|
||||
decorators['depth'] = ['0']
|
||||
}
|
||||
else{
|
||||
return {
|
||||
prompt: split.slice(i).join('\n').trim(),
|
||||
decorators: decorators
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
formatedLores.push({
|
||||
content: lore.content,
|
||||
simularity: hypaProcesser.similarityCheck(chatVec, vec)
|
||||
})
|
||||
}
|
||||
|
||||
formatedLores.sort((a, b) => {
|
||||
return b.simularity - a.simularity
|
||||
})
|
||||
|
||||
let i=0;
|
||||
while(i < formatedLores.length){
|
||||
const lore = formatedLores[i]
|
||||
const totalTokens = await tokenize(activatiedPrompt.concat([lore.content]).join('\n\n'))
|
||||
if(totalTokens > loreToken){
|
||||
break
|
||||
}
|
||||
activatiedPrompt.push(lore.content)
|
||||
i++
|
||||
}
|
||||
|
||||
|
||||
let sactivated:string[] = []
|
||||
activatiedPrompt = activatiedPrompt.filter((v) => {
|
||||
//deprecated three @ for special prompt
|
||||
if(v.startsWith("@@@end")){
|
||||
sactivated.push(v.replace('@@@end','').trim())
|
||||
return false
|
||||
}
|
||||
if(v.startsWith('@@end')){
|
||||
sactivated.push(v.replace('@@end','').trim())
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return {
|
||||
act: activatiedPrompt.reverse().join('\n\n'),
|
||||
special_act: sactivated.reverse().join('\n\n')
|
||||
prompt: '',
|
||||
decorators: decorators
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export async function importLoreBook(mode:'global'|'local'|'sglobal'){
|
||||
|
||||
@@ -283,7 +283,7 @@ export async function supaMemory(
|
||||
while(currentTokens > maxContextTokens){
|
||||
const beforeToken = currentTokens
|
||||
let maxChunkSize = Math.floor(maxContextTokens / 3)
|
||||
if(db.maxSupaChunkSize > maxChunkSize){
|
||||
if(db.maxSupaChunkSize < maxChunkSize){
|
||||
maxChunkSize = db.maxSupaChunkSize
|
||||
}
|
||||
let summarized = false
|
||||
|
||||
@@ -1804,64 +1804,66 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
||||
let reader = res.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
const parser = createParser(async (e) => {
|
||||
if(e.type === 'event'){
|
||||
switch(e.event){
|
||||
case 'content_block_delta': {
|
||||
if(e.data){
|
||||
text += JSON.parse(e.data).delta?.text
|
||||
controller.enqueue({
|
||||
"0": text
|
||||
})
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'error': {
|
||||
if(e.data){
|
||||
const errormsg:string = JSON.parse(e.data).error?.message
|
||||
if(errormsg && errormsg.toLocaleLowerCase().includes('overload') && db.antiClaudeOverload){
|
||||
console.log('Overload detected, retrying...')
|
||||
reader.cancel()
|
||||
rerequesting = true
|
||||
await sleep(2000)
|
||||
body.max_tokens -= await tokenize(text)
|
||||
if(body.max_tokens < 0){
|
||||
body.max_tokens = 0
|
||||
}
|
||||
if(body.messages.at(-1)?.role !== 'assistant'){
|
||||
body.messages.push({
|
||||
role: 'assistant',
|
||||
content: ''
|
||||
})
|
||||
}
|
||||
body.messages[body.messages.length-1].content += text
|
||||
const res = await fetchNative(replacerURL, {
|
||||
body: JSON.stringify(body),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": apiKey,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"accept": "application/json",
|
||||
},
|
||||
method: "POST",
|
||||
chatId: arg.chatId
|
||||
try {
|
||||
if(e.type === 'event'){
|
||||
switch(e.event){
|
||||
case 'content_block_delta': {
|
||||
if(e.data){
|
||||
text += JSON.parse(e.data).delta?.text
|
||||
controller.enqueue({
|
||||
"0": text
|
||||
})
|
||||
if(res.status !== 200){
|
||||
breakError = 'Error: ' + await textifyReadableStream(res.body)
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'error': {
|
||||
if(e.data){
|
||||
const errormsg:string = JSON.parse(e.data).error?.message
|
||||
if(errormsg && errormsg.toLocaleLowerCase().includes('overload') && db.antiClaudeOverload){
|
||||
console.log('Overload detected, retrying...')
|
||||
reader.cancel()
|
||||
rerequesting = true
|
||||
await sleep(2000)
|
||||
body.max_tokens -= await tokenize(text)
|
||||
if(body.max_tokens < 0){
|
||||
body.max_tokens = 0
|
||||
}
|
||||
if(body.messages.at(-1)?.role !== 'assistant'){
|
||||
body.messages.push({
|
||||
role: 'assistant',
|
||||
content: ''
|
||||
})
|
||||
}
|
||||
body.messages[body.messages.length-1].content += text
|
||||
const res = await fetchNative(replacerURL, {
|
||||
body: JSON.stringify(body),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": apiKey,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"accept": "application/json",
|
||||
},
|
||||
method: "POST",
|
||||
chatId: arg.chatId
|
||||
})
|
||||
if(res.status !== 200){
|
||||
breakError = 'Error: ' + await textifyReadableStream(res.body)
|
||||
break
|
||||
}
|
||||
reader = res.body.getReader()
|
||||
rerequesting = false
|
||||
break
|
||||
}
|
||||
reader = res.body.getReader()
|
||||
rerequesting = false
|
||||
break
|
||||
text += "Error:" + JSON.parse(e.data).error?.message
|
||||
controller.enqueue({
|
||||
"0": text
|
||||
})
|
||||
}
|
||||
text += "Error:" + JSON.parse(e.data).error?.message
|
||||
controller.enqueue({
|
||||
"0": text
|
||||
})
|
||||
break
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {}
|
||||
})
|
||||
while(true){
|
||||
if(rerequesting){
|
||||
@@ -1923,10 +1925,16 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
||||
result: JSON.stringify(res.data.error)
|
||||
}
|
||||
}
|
||||
const resText = res?.data?.content?.[0]?.text
|
||||
if(!resText){
|
||||
return {
|
||||
type: 'fail',
|
||||
result: JSON.stringify(res.data)
|
||||
}
|
||||
}
|
||||
return {
|
||||
type: 'success',
|
||||
result: res.data.content[0].text
|
||||
|
||||
result: resText
|
||||
}
|
||||
}
|
||||
else if(raiModel.startsWith('claude')){
|
||||
|
||||
@@ -15,7 +15,7 @@ import type { OobaChatCompletionRequestParams } from '../model/ooba';
|
||||
|
||||
export const DataBase = writable({} as any as Database)
|
||||
export const loadedStore = writable(false)
|
||||
export let appVer = "1.95.0"
|
||||
export let appVer = "1.95.1"
|
||||
export let webAppSubVer = ''
|
||||
|
||||
export function setDatabase(data:Database){
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":"1.95.0"}
|
||||
{"version":"1.95.1"}
|
||||
Reference in New Issue
Block a user