Add memo to detect supaMemory model requests in the plugin

This commit is contained in:
Bo26fhmC5M
2024-12-04 13:46:35 +09:00
parent dbd7485c50
commit ecc2817a7b

View File

@@ -87,7 +87,7 @@ async function summary(stringlizedChat: string): Promise<{ success: boolean; dat
let parsedPrompt = parseChatML(supaPrompt.replaceAll('{{slot}}', stringlizedChat)) let parsedPrompt = parseChatML(supaPrompt.replaceAll('{{slot}}', stringlizedChat))
const promptbody: OpenAIChat[] = parsedPrompt ?? [ const promptbody: OpenAIChat[] = (parsedPrompt ?? [
{ {
role: "user", role: "user",
content: stringlizedChat content: stringlizedChat
@@ -96,7 +96,10 @@ async function summary(stringlizedChat: string): Promise<{ success: boolean; dat
role: "system", role: "system",
content: supaPrompt content: supaPrompt
} }
]; ]).map(message => ({
...message,
memo: "supaPrompt"
}));
console.log("Using submodel: ", db.subModel, "for supaMemory model"); console.log("Using submodel: ", db.subModel, "for supaMemory model");
const da = await requestChatData({ const da = await requestChatData({
formated: promptbody, formated: promptbody,