hypav2 update

incomplete
This commit is contained in:
LightningHyperBlaze45654
2024-06-11 21:06:59 -07:00
parent 9f431f7dba
commit ff1793a123
2 changed files with 45 additions and 31 deletions

View File

@@ -273,6 +273,19 @@
</div>
{:else if $DataBase.supaMemoryType === 'hypaV2'}
<span class="mb-2 text-textcolor2 text-sm text-wrap break-words max-w-full">{language.hypaV2Desc}</span>
<span class="text-textcolor mt-4">{language.SuperMemory} {language.model}</span>
<SelectInput className="mt-2 mb-2" bind:value={$DataBase.supaMemoryType}>
<OptionInput value="distilbart" >distilbart-cnn-6-6 (Free/Local)</OptionInput>
<OptionInput value="instruct35" >OpenAI 3.5 Turbo Instruct</OptionInput>
<OptionInput value="subModel" >{language.submodel}</OptionInput>
</SelectInput>
<span class="text-textcolor">{language.SuperMemory} Prompt</span>
<TextInput size="sm" marginBottom bind:value={$DataBase.supaMemoryPrompt} placeholder="Leave it blank to use default"/>
<span class="text-textcolor">{language.HypaMemory} Model</span>
<SelectInput className="mt-2 mb-2" bind:value={$DataBase.hypaModel}>
<OptionInput value="MiniLM" >MiniLM-L6-v2 (Free / Local)</OptionInput>
<OptionInput value="ada" >OpenAI Ada (Davinci / Curie Only)</OptionInput>
</SelectInput>
<span class="text-textcolor">{language.hypaChunkSize}</span>
<NumberInput size="sm" marginBottom bind:value={$DataBase.hypaChunkSize} min={100} />
<span class="text-textcolor">{language.hypaAllocatedTokens}</span>

View File

@@ -17,37 +17,6 @@ export interface HypaV2Data{
}
async function summary(stringlizedChat:string):Promise<{
success:boolean
data:string
}>{
const promptbody:OpenAIChat[] = [
{
role: "user",
content: stringlizedChat
},
{
role: "system",
content: "Summarize this roleplay scene in a coherent narrative format for future reference. Summarize what happened, focusing on events and interactions between them. If someone or something is new or changed, include a brief characterization of them."
}
]
const da = await requestChatData({
formated: promptbody,
bias: {},
useStreaming: false,
noMultiGen: true
}, 'model')
if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){
return {
data: "Hypamemory HTTP: " + da.result,
success: false
}
}
return {
data: da.result,
success: true
}
}
export async function hypaMemoryV2(
chats:OpenAIChat[],
@@ -111,6 +80,38 @@ export async function hypaMemoryV2(
targetId = chat.memo
}
async function summary(stringlizedChat:string):Promise<{
success:boolean
data:string
}>{
const promptbody:OpenAIChat[] = [
{
role: "user",
content: stringlizedChat
},
{
role: "system",
content: "Summarize this roleplay scene in a coherent narrative format for future reference. Summarize what happened, focusing on events and interactions between them. If someone or something is new or changed, include a brief characterization of them."
}
]
const da = await requestChatData({
formated: promptbody,
bias: {},
useStreaming: false,
noMultiGen: true
}, 'model')
if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){
return {
data: "Hypamemory HTTP: " + da.result,
success: false
}
}
return {
data: da.result,
success: true
}
}
const stringlizedChat = halfData.map(e => `${e.role}: ${e.content}`).join('\n')
const summaryData = await summary(stringlizedChat)