[feat] prompt template memory

This commit is contained in:
kwaroran
2023-09-09 08:57:29 +09:00
parent a122e1cc5d
commit 5a0547c688
4 changed files with 32 additions and 5 deletions

View File

@@ -263,6 +263,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
//await tokenize currernt
let currentTokens = db.maxResponse
let supaMemoryCardUsed = false
//for unexpected error
currentTokens += 50
@@ -377,6 +378,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
await tokenizeChatArray(chats)
break
}
case 'memory':{
supaMemoryCardUsed = true
break
}
}
}
}
@@ -388,7 +393,6 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
}
}
const examples = exampleMessage(currentChar, db.username)
@@ -487,6 +491,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
return [risuChatParser(v[0].replaceAll("\\n","\n"), {chara: currentChar}),v[1]]
})
let memories:OpenAIChat[] = []
@@ -499,7 +504,16 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
if(v.memo !== 'supaMemory' && v.memo !== 'hypaMemory'){
v.removable = true
}
else if(supaMemoryCardUsed){
memories.push(v)
return {
role: 'system',
content: '',
} as const
}
return v
}).filter((v) => {
return v.content !== ''
})
@@ -669,6 +683,16 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
pushPrompts(chats)
break
}
case 'memory':{
let pmt = cloneDeep(memories)
if(card.innerFormat && pmt.length > 0){
for(let i=0;i<pmt.length;i++){
pmt[i].content = risuChatParser(card.innerFormat, {chara: currentChar}).replace('{{slot}}', pmt[i].content)
}
}
pushPrompts(pmt)
}
}
}
}

View File

@@ -8,7 +8,7 @@ export interface ProomptPlain {
}
export interface ProomptTyped {
type: 'persona'|'description'|'lorebook'|'postEverything'
type: 'persona'|'description'|'lorebook'|'postEverything'|'memory'
innerFormat?: string
}