[feat] added hypamemory toggle

This commit is contained in:
kwaroran
2023-06-29 00:59:08 +09:00
parent e6dc0dcb06
commit 68fbb99c42
4 changed files with 14 additions and 12 deletions

View File

@@ -75,4 +75,9 @@
{#if $DataBase.supaMemoryType !== 'none'}
<span class="text-neutral-200">{language.SuperMemory} Prompt</span>
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm"bind:value={$DataBase.supaMemoryPrompt} placeholder="recommended to leave it blank to use default">
{/if}
{#if ($DataBase.supaMemoryType === 'davinci' || $DataBase.supaMemoryType === 'curie') && $DataBase.useExperimental}
<div class="flex">
<Check bind:check={$DataBase.hypaMemory} name='Able HypaMemory'/> <Help key="experimental" />
</div>
{/if}

View File

@@ -296,7 +296,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
if(nowChatroom.supaMemory && db.supaMemoryType !== 'none'){
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer)
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer, {
asHyper: db.hypaMemory
})
if(sp.error){
alertError(sp.error)
return false

View File

@@ -6,6 +6,7 @@ import { requestChatData } from "../request";
import { cloneDeep } from "lodash";
import { HypaProcesser } from "./hypamemory";
import { stringlizeChat } from "../stringlize";
import { globalFetch } from "src/ts/storage/globalApi";
export async function supaMemory(
chats:OpenAIChat[],
@@ -154,28 +155,21 @@ export async function supaMemory(
if(db.supaMemoryType !== 'subModel'){
const promptbody = stringlizedChat + '\n\n' + supaPrompt + "\n\nOutput:"
const da = await fetch("https://api.openai.com/v1/completions",{
const da = await globalFetch("https://api.openai.com/v1/completions",{
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + db.supaMemoryKey
},
method: "POST",
body: JSON.stringify({
body: {
"model": db.supaMemoryType === 'curie' ? "text-curie-001" : "text-davinci-003",
"prompt": promptbody,
"max_tokens": 600,
"temperature": 0
})
}
})
if(da.status < 200 || da.status >= 300){
return {
currentTokens: currentTokens,
chats: chats,
error: "SupaMemory: HTTP: " + await da.text()
}
}
result = (await da.json()).choices[0].text.trim()
result = (await da.data).choices[0].text.trim()
}
else {
const promptbody:OpenAIChat[] = [

View File

@@ -529,6 +529,7 @@ export interface Database{
useChatSticker:boolean,
useAdditionalAssetsPreview:boolean,
usePlainFetch:boolean
hypaMemory:boolean
}
interface hordeConfig{