diff --git a/src/lib/Setting/Pages/OtherBotSettings.svelte b/src/lib/Setting/Pages/OtherBotSettings.svelte
index 09ece2ba..1553c8c8 100644
--- a/src/lib/Setting/Pages/OtherBotSettings.svelte
+++ b/src/lib/Setting/Pages/OtherBotSettings.svelte
@@ -75,4 +75,9 @@
{#if $DataBase.supaMemoryType !== 'none'}
{language.SuperMemory} Prompt
+{/if}
+{#if ($DataBase.supaMemoryType === 'davinci' || $DataBase.supaMemoryType === 'curie') && $DataBase.useExperimental}
+
+
+
{/if}
\ No newline at end of file
diff --git a/src/ts/process/index.ts b/src/ts/process/index.ts
index e7269dd7..ba1338f8 100644
--- a/src/ts/process/index.ts
+++ b/src/ts/process/index.ts
@@ -296,7 +296,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
if(nowChatroom.supaMemory && db.supaMemoryType !== 'none'){
- const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer)
+ const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer, {
+ asHyper: db.hypaMemory
+ })
if(sp.error){
alertError(sp.error)
return false
diff --git a/src/ts/process/memory/supaMemory.ts b/src/ts/process/memory/supaMemory.ts
index 924f8d57..5c6bbc9e 100644
--- a/src/ts/process/memory/supaMemory.ts
+++ b/src/ts/process/memory/supaMemory.ts
@@ -6,6 +6,7 @@ import { requestChatData } from "../request";
import { cloneDeep } from "lodash";
import { HypaProcesser } from "./hypamemory";
import { stringlizeChat } from "../stringlize";
+import { globalFetch } from "src/ts/storage/globalApi";
export async function supaMemory(
chats:OpenAIChat[],
@@ -154,28 +155,21 @@ export async function supaMemory(
if(db.supaMemoryType !== 'subModel'){
const promptbody = stringlizedChat + '\n\n' + supaPrompt + "\n\nOutput:"
- const da = await fetch("https://api.openai.com/v1/completions",{
+ const da = await globalFetch("https://api.openai.com/v1/completions",{
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + db.supaMemoryKey
},
method: "POST",
- body: JSON.stringify({
+ body: {
"model": db.supaMemoryType === 'curie' ? "text-curie-001" : "text-davinci-003",
"prompt": promptbody,
"max_tokens": 600,
"temperature": 0
- })
+ }
})
- if(da.status < 200 || da.status >= 300){
- return {
- currentTokens: currentTokens,
- chats: chats,
- error: "SupaMemory: HTTP: " + await da.text()
- }
- }
- result = (await da.json()).choices[0].text.trim()
+ result = (await da.data).choices[0].text.trim()
}
else {
const promptbody:OpenAIChat[] = [
diff --git a/src/ts/storage/database.ts b/src/ts/storage/database.ts
index b944c9e5..2c258a5d 100644
--- a/src/ts/storage/database.ts
+++ b/src/ts/storage/database.ts
@@ -529,6 +529,7 @@ export interface Database{
useChatSticker:boolean,
useAdditionalAssetsPreview:boolean,
usePlainFetch:boolean
+ hypaMemory:boolean
}
interface hordeConfig{