Update 1.13.0 (#74)
This commit is contained in:
@@ -8,7 +8,7 @@
|
|||||||
},
|
},
|
||||||
"package": {
|
"package": {
|
||||||
"productName": "RisuAI",
|
"productName": "RisuAI",
|
||||||
"version": "1.12.0"
|
"version": "1.13.0"
|
||||||
},
|
},
|
||||||
"tauri": {
|
"tauri": {
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
|
|||||||
@@ -23,9 +23,6 @@
|
|||||||
<span class="text-neutral-200">{language.emotionPrompt}</span>
|
<span class="text-neutral-200">{language.emotionPrompt}</span>
|
||||||
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm"bind:value={$DataBase.emotionPrompt2} placeholder="Leave it blank to use default">
|
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm"bind:value={$DataBase.emotionPrompt2} placeholder="Leave it blank to use default">
|
||||||
|
|
||||||
<span class="text-neutral-200">{language.SuperMemory} Prompt <Help key="experimental"/></span>
|
|
||||||
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm"bind:value={$DataBase.supaMemoryPrompt} placeholder="Leave it blank to use default">
|
|
||||||
|
|
||||||
<span class="text-neutral-200">{language.requestretrys}</span>
|
<span class="text-neutral-200">{language.requestretrys}</span>
|
||||||
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" type="number" min={0} max="20" bind:value={$DataBase.requestRetrys}>
|
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" type="number" min={0} max="20" bind:value={$DataBase.requestRetrys}>
|
||||||
|
|
||||||
|
|||||||
@@ -57,3 +57,21 @@
|
|||||||
<span class="text-neutral-200 mt-4 text-lg font-bold">TTS</span>
|
<span class="text-neutral-200 mt-4 text-lg font-bold">TTS</span>
|
||||||
<span class="text-neutral-200 mt-2">ElevenLabs API key</span>
|
<span class="text-neutral-200 mt-2">ElevenLabs API key</span>
|
||||||
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" bind:value={$DataBase.elevenLabKey}>
|
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" bind:value={$DataBase.elevenLabKey}>
|
||||||
|
|
||||||
|
|
||||||
|
<span class="text-neutral-200 mt-4 text-lg font-bold">SupaMemory</span>
|
||||||
|
<span class="text-neutral-200 mt-4">{language.SuperMemory} {language.model}</span>
|
||||||
|
<select class="bg-transparent input-text mt-2 mb-2 text-gray-200 appearance-none text-sm" bind:value={$DataBase.supaMemoryType}>
|
||||||
|
<option value="none" class="bg-darkbg appearance-none">None</option>
|
||||||
|
<option value="davinci" class="bg-darkbg appearance-none">OpenAI Davinci</option>
|
||||||
|
<option value="curie" class="bg-darkbg appearance-none">OpenAI Curie</option>
|
||||||
|
<option value="subModel" class="bg-darkbg appearance-none">{language.submodel} ({language.unrecommended})</option>
|
||||||
|
</select>
|
||||||
|
{#if $DataBase.supaMemoryType === 'davinci' || $DataBase.supaMemoryType === 'curie'}
|
||||||
|
<span class="text-neutral-200">{language.SuperMemory} OpenAI Key</span>
|
||||||
|
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" bind:value={$DataBase.supaMemoryKey}>
|
||||||
|
{/if}
|
||||||
|
{#if $DataBase.supaMemoryType !== 'none'}
|
||||||
|
<span class="text-neutral-200">{language.SuperMemory} Prompt</span>
|
||||||
|
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm"bind:value={$DataBase.supaMemoryPrompt} placeholder="recommended to leave it blank to use default">
|
||||||
|
{/if}
|
||||||
@@ -224,11 +224,11 @@
|
|||||||
<span class="text-neutral-200 ml-2">{language.jailbreakToggle}</span>
|
<span class="text-neutral-200 ml-2">{language.jailbreakToggle}</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{#if $DataBase.useExperimental}
|
{#if $DataBase.supaMemoryType !== 'none'}
|
||||||
<div class="flex mt-2 items-center">
|
<div class="flex mt-2 items-center">
|
||||||
<Check bind:check={currentChar.data.supaMemory}/>
|
<Check bind:check={currentChar.data.supaMemory}/>
|
||||||
<span class="text-neutral-200 ml-2">{language.ToggleSuperMemory} <Help key="experimental"/></span>
|
<span class="text-neutral-200 ml-2">{language.ToggleSuperMemory}</span>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{:else if subMenu === 1}
|
{:else if subMenu === 1}
|
||||||
<h2 class="mb-2 text-2xl font-bold mt-2">{language.characterDisplay}</h2>
|
<h2 class="mb-2 text-2xl font-bold mt-2">{language.characterDisplay}</h2>
|
||||||
@@ -518,7 +518,7 @@
|
|||||||
<span class="text-gray-400 mb-6 text-sm">{tokens.localNote} {language.tokens}</span>
|
<span class="text-gray-400 mb-6 text-sm">{tokens.localNote} {language.tokens}</span>
|
||||||
|
|
||||||
{#if currentChar.data.chats[currentChar.data.chatPage].supaMemoryData && currentChar.data.chats[currentChar.data.chatPage].supaMemoryData.length > 4}
|
{#if currentChar.data.chats[currentChar.data.chatPage].supaMemoryData && currentChar.data.chats[currentChar.data.chatPage].supaMemoryData.length > 4}
|
||||||
<span class="text-neutral-200">{language.SuperMemory} <Help key="experimental"/></span>
|
<span class="text-neutral-200">{language.SuperMemory}</span>
|
||||||
<textarea class="bg-transparent input-text mt-2 mb-2 text-gray-200 text-xs resize-none h-20 focus:bg-selected" autocomplete="off" bind:value={currentChar.data.chats[currentChar.data.chatPage].supaMemoryData}></textarea>
|
<textarea class="bg-transparent input-text mt-2 mb-2 text-gray-200 text-xs resize-none h-20 focus:bg-selected" autocomplete="off" bind:value={currentChar.data.chats[currentChar.data.chatPage].supaMemoryData}></textarea>
|
||||||
{/if}
|
{/if}
|
||||||
{#if $DataBase.showUnrecommended || currentChar.data.personality.length > 3}
|
{#if $DataBase.showUnrecommended || currentChar.data.personality.length > 3}
|
||||||
@@ -644,7 +644,7 @@
|
|||||||
|
|
||||||
{:else}
|
{:else}
|
||||||
{#if currentChar.data.chats[currentChar.data.chatPage].supaMemoryData && currentChar.data.chats[currentChar.data.chatPage].supaMemoryData.length > 4}
|
{#if currentChar.data.chats[currentChar.data.chatPage].supaMemoryData && currentChar.data.chats[currentChar.data.chatPage].supaMemoryData.length > 4}
|
||||||
<span class="text-neutral-200">{language.SuperMemory} <Help key="experimental"/></span>
|
<span class="text-neutral-200">{language.SuperMemory}</span>
|
||||||
<textarea class="bg-transparent input-text mt-2 mb-2 text-gray-200 text-xs resize-none h-20 focus:bg-selected" autocomplete="off" bind:value={currentChar.data.chats[currentChar.data.chatPage].supaMemoryData}></textarea>
|
<textarea class="bg-transparent input-text mt-2 mb-2 text-gray-200 text-xs resize-none h-20 focus:bg-selected" autocomplete="off" bind:value={currentChar.data.chats[currentChar.data.chatPage].supaMemoryData}></textarea>
|
||||||
{/if}
|
{/if}
|
||||||
{#if $DataBase.useExperimental}
|
{#if $DataBase.useExperimental}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { cloneDeep } from 'lodash';
|
|||||||
|
|
||||||
export const DataBase = writable({} as any as Database)
|
export const DataBase = writable({} as any as Database)
|
||||||
export const loadedStore = writable(false)
|
export const loadedStore = writable(false)
|
||||||
export let appVer = '1.12.0'
|
export let appVer = '1.13.0'
|
||||||
|
|
||||||
|
|
||||||
export function setDatabase(data:Database){
|
export function setDatabase(data:Database){
|
||||||
@@ -193,6 +193,12 @@ export function setDatabase(data:Database){
|
|||||||
if(checkNullish(data.showMemoryLimit)){
|
if(checkNullish(data.showMemoryLimit)){
|
||||||
data.showMemoryLimit = false
|
data.showMemoryLimit = false
|
||||||
}
|
}
|
||||||
|
if(checkNullish(data.supaMemoryKey)){
|
||||||
|
data.supaMemoryKey = ""
|
||||||
|
}
|
||||||
|
if(checkNullish(data.supaMemoryType)){
|
||||||
|
data.supaMemoryType = "none"
|
||||||
|
}
|
||||||
if(checkNullish(data.sdConfig)){
|
if(checkNullish(data.sdConfig)){
|
||||||
data.sdConfig = {
|
data.sdConfig = {
|
||||||
width:512,
|
width:512,
|
||||||
@@ -409,7 +415,9 @@ export interface Database{
|
|||||||
showMemoryLimit:boolean
|
showMemoryLimit:boolean
|
||||||
roundIcons:boolean
|
roundIcons:boolean
|
||||||
useStreaming:boolean
|
useStreaming:boolean
|
||||||
palmAPI:string
|
palmAPI:string,
|
||||||
|
supaMemoryKey:string
|
||||||
|
supaMemoryType:string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ export async function ParseMarkdown(data:string, char:(character | groupChat) =
|
|||||||
if(char.additionalAssets){
|
if(char.additionalAssets){
|
||||||
for(const asset of char.additionalAssets){
|
for(const asset of char.additionalAssets){
|
||||||
const assetPath = await getFileSrc(asset[1])
|
const assetPath = await getFileSrc(asset[1])
|
||||||
data = data.replaceAll(`{{raw::${asset[0]}}}`, assetPath).replaceAll(`{{img::${asset[0]}}}`,`<img src="${asset[0]}" />`)
|
data = data.replaceAll(`{{raw::${asset[0]}}}`, assetPath).replaceAll(`{{img::${asset[0]}}}`,`<img src="${assetPath}" />`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -229,7 +229,7 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
|
|||||||
currentTokens += (await tokenize(systemMsg) + 1)
|
currentTokens += (await tokenize(systemMsg) + 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
if(nowChatroom.supaMemory){
|
if(nowChatroom.supaMemory && db.supaMemoryType !== 'none'){
|
||||||
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom)
|
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom)
|
||||||
if(sp.error){
|
if(sp.error){
|
||||||
alertError(sp.error)
|
alertError(sp.error)
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ export async function requestChatData(arg:requestDataArgument, model:'model'|'su
|
|||||||
let trys = 0
|
let trys = 0
|
||||||
while(true){
|
while(true){
|
||||||
const da = await requestChatDataMain(arg, model)
|
const da = await requestChatDataMain(arg, model)
|
||||||
if(da.type === 'success'){
|
if(da.type === 'success' || da.type === 'streaming'){
|
||||||
return da
|
return da
|
||||||
}
|
}
|
||||||
trys += 1
|
trys += 1
|
||||||
|
|||||||
@@ -95,45 +95,60 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
|
|||||||
"[Summarize the ongoing role story. It must also remove redundancy and unnecessary content from the prompt so that gpt3 and other sublanguage models]\n"
|
"[Summarize the ongoing role story. It must also remove redundancy and unnecessary content from the prompt so that gpt3 and other sublanguage models]\n"
|
||||||
: db.supaMemoryPrompt
|
: db.supaMemoryPrompt
|
||||||
|
|
||||||
const promptbody = stringlizedChat + '\n\n' + supaPrompt + "\n\nOutput:"
|
let result = ''
|
||||||
|
|
||||||
const da = await fetch("https://api.openai.com/v1/completions",{
|
if(db.supaMemoryType !== 'subModel'){
|
||||||
headers: {
|
const promptbody = stringlizedChat + '\n\n' + supaPrompt + "\n\nOutput:"
|
||||||
"Content-Type": "application/json",
|
|
||||||
"Authorization": "Bearer " + db.openAIKey
|
const da = await fetch("https://api.openai.com/v1/completions",{
|
||||||
},
|
headers: {
|
||||||
method: "POST",
|
"Content-Type": "application/json",
|
||||||
body: JSON.stringify({
|
"Authorization": "Bearer " + db.openAIKey
|
||||||
"model": "text-davinci-003",
|
},
|
||||||
"prompt": promptbody,
|
method: "POST",
|
||||||
"max_tokens": 500,
|
body: JSON.stringify({
|
||||||
"temperature": 0
|
"model": db.supaMemoryType === 'curie' ? "text-curie-001" : "text-davinci-003",
|
||||||
|
"prompt": promptbody,
|
||||||
|
"max_tokens": 500,
|
||||||
|
"temperature": 0
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
if(da.status < 200 || da.status >= 300){
|
||||||
// const promptbody:OpenAIChat[] = [
|
return {
|
||||||
// {
|
currentTokens: currentTokens,
|
||||||
// role: "user",
|
chats: chats,
|
||||||
// content: stringlizedChat
|
error: "SupaMemory: HTTP: " + await da.text()
|
||||||
// },
|
}
|
||||||
// {
|
|
||||||
// role: "system",
|
|
||||||
// content: supaPrompt
|
|
||||||
// }
|
|
||||||
// ]
|
|
||||||
// const da = await requestChatData({
|
|
||||||
// formated: promptbody,
|
|
||||||
// bias: {}
|
|
||||||
// }, 'submodel')
|
|
||||||
|
|
||||||
const result = (await da.json()).choices[0].text.trim()
|
|
||||||
if(da.status < 200 || da.status >= 300){
|
|
||||||
return {
|
|
||||||
currentTokens: currentTokens,
|
|
||||||
chats: chats,
|
|
||||||
error: "SupaMemory: HTTP: " + await da.text()
|
|
||||||
}
|
}
|
||||||
|
result = (await da.json()).choices[0].text.trim()
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
const promptbody:OpenAIChat[] = [
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: stringlizedChat
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: "system",
|
||||||
|
content: supaPrompt
|
||||||
|
}
|
||||||
|
]
|
||||||
|
const da = await requestChatData({
|
||||||
|
formated: promptbody,
|
||||||
|
bias: {}
|
||||||
|
}, 'submodel')
|
||||||
|
if(da.type === 'fail' || da.type === 'streaming'){
|
||||||
|
return {
|
||||||
|
currentTokens: currentTokens,
|
||||||
|
chats: chats,
|
||||||
|
error: "SupaMemory: HTTP: " + da.result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = da.result
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const tokenz = await tokenize(result + '\n\n') + 5
|
const tokenz = await tokenize(result + '\n\n') + 5
|
||||||
currentTokens += tokenz
|
currentTokens += tokenz
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":"1.12.0"}
|
{"version":"1.13.0"}
|
||||||
Reference in New Issue
Block a user