fix: re‑implement prompt text storage without memo field (#860)

# PR Checklist
- [ ] Have you checked if it works normally in all models? *Ignore this
if it doesn't use models.*
- [x] Have you checked if it works normally in all web, local, and node
hosted versions? If it doesn't, have you blocked it in those versions?
- [x] Have you added type definitions?

# Description
## Related PRs

- This PR replaces the previous implementation in
[#848](https://github.com/kwaroran/RisuAI/pull/848).

---

## Problem

- After [#848](https://github.com/kwaroran/RisuAI/pull/848), input token
counts and `\n\n` spacing changed (e.g. custom API & OpenAI compatible
models).
- Before, I used the `memo` field in each message to help get **the
final, parsed and pre-processed prompt template** from the `formated`
array for storage.
- But the `memo` field is used in `pushPrompts()` to decide when to
merge strings inside `formated`:

```js
// Existing code
if (chat.role === 'system') {
    const endf = formated.at(-1)
    if (endf && endf.role === 'system' && endf.memo === chat.memo && endf.name === chat.name) {
        formated[formated.length - 1].content += '\n\n' + chat.content
    }
}
```
- Therefore, relying on `memo` produced wrong results.


## Solution
- Rolled back all `memo` logic (For a clean diff, compare with 040652d,
the last version before the memo logic was added).
- Added `pushPromptInfoBody()` (independent of global `formated`):

```js
function pushPromptInfoBody(role, fmt, promptBody) {
    if (!fmt.trim()) return;
    promptBody.push({
        role: role,
        content: risuChatParser(fmt),
    });
}
```
- Prompt text is now collected in a single switch(`card.type`) block,
without needing to filter with `memo` or external fields.


## Result

Prompt text is still stored per message without side effects.


## Considerations (commit
[8dc05cc](8dc05ccace))

- Extracting only the original `globalNote` is hard unless parser order
is changed.
1. When `positionParser` is called first, sections marked as `position`
are inserted early, making it impossible to extract only the prompt
template afterward.
2. If `risuChatParser` is called separately **just to save the prompt
text**, the saved prompt may differ from the actual prompt sent,
especially in cases involving `{{random}}` values.
- Reordering the function calls might help, but since it could change
the prompt that is actually sent and cause differences from the previous
output, it may be better to leave things as they are.
- Personally, I tend to avoid using `position` in main system prompts;
adding it often makes the main prompt structure a bit messy (Using
`position` is very useful for things that change frequently, such as
bots or personas)
- Global notes are typically used for individual bots, rather than as
prompts shared globally, so skipping them here has no functional
drawback, I think.
- The **Custom Inner Format** fields such as authorNote, persona,
description, and memory mainly serve as labels, so things like
`{{random}}` are rarely used here. For this reason, I used **2(only
`risuChatParser` is called just to save the prompt text)** in this
context.

## Final Note

Sorry for any inconvenience! If you have questions or feedback, please
let me know.
This commit is contained in:
kwaroran
2025-05-24 20:26:42 +09:00
committed by GitHub

View File

@@ -400,15 +400,13 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(currentChat.note){
unformated.authorNote.push({
role: 'system',
content: risuChatParser(currentChat.note, {chara: currentChar}),
memo: 'authornote'
content: risuChatParser(currentChat.note, {chara: currentChar})
})
}
else if(getAuthorNoteDefaultText() !== ''){
unformated.authorNote.push({
role: 'system',
content: risuChatParser(getAuthorNoteDefaultText(), {chara: currentChar}),
memo: 'authornote'
content: risuChatParser(getAuthorNoteDefaultText(), {chara: currentChar})
})
}
@@ -438,8 +436,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
unformated.description.push({
role: 'system',
content: description,
memo: 'description',
content: description
})
if(nowChatroom.type === 'group'){
@@ -460,8 +457,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
for(const lorebook of normalActives){
unformated.lorebook.push({
role: lorebook.role,
content: risuChatParser(lorebook.prompt, {chara: currentChar}),
memo: 'lore',
content: risuChatParser(lorebook.prompt, {chara: currentChar})
})
}
@@ -485,8 +481,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(DBState.db.personaPrompt){
unformated.personaPrompt.push({
role: 'system',
content: risuChatParser(getPersonaPrompt(), {chara: currentChar}),
memo: 'persona',
content: risuChatParser(getPersonaPrompt(), {chara: currentChar})
})
}
@@ -511,8 +506,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
for(const lorebook of postEverythingLorebooks){
unformated.postEverything.push({
role: lorebook.role,
content: risuChatParser(lorebook.prompt, {chara: currentChar}),
memo: 'postEverything',
content: risuChatParser(lorebook.prompt, {chara: currentChar})
})
}
@@ -1100,10 +1094,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{
}
}
type MemoType = 'persona' | 'description' | 'authornote' | 'supaMemory';
const promptBodyMap: Record<MemoType, string[]> = { persona: [], description: [], authornote: [], supaMemory: [] };
function pushPromptInfoBody(memo: MemoType, fmt: string) {
promptBodyMap[memo].push(risuChatParser(fmt));
let promptBodyformatedForChatStore: OpenAIChat[] = []
function pushPromptInfoBody(role: "function" | "system" | "user" | "assistant", fmt: string, promptBody: OpenAIChat[]) {
if(!fmt.trim()){
return
}
promptBody.push({
role: role,
content: risuChatParser(fmt),
})
}
if(promptTemplate){
@@ -1118,7 +1117,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
pmt[i].content = risuChatParser(positionParser(card.innerFormat), {chara: currentChar}).replace('{{slot}}', pmt[i].content)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody(card.type, card.innerFormat)
pushPromptInfoBody(pmt[i].role, card.innerFormat, promptBodyformatedForChatStore)
}
}
}
@@ -1133,7 +1132,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
pmt[i].content = risuChatParser(positionParser(card.innerFormat), {chara: currentChar}).replace('{{slot}}', pmt[i].content)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody(card.type, card.innerFormat)
pushPromptInfoBody(pmt[i].role, card.innerFormat, promptBodyformatedForChatStore)
}
}
}
@@ -1148,7 +1147,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
pmt[i].content = risuChatParser(positionParser(card.innerFormat), {chara: currentChar}).replace('{{slot}}', pmt[i].content || card.defaultText || '')
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody(card.type, card.innerFormat)
pushPromptInfoBody(pmt[i].role, card.innerFormat, promptBodyformatedForChatStore)
}
}
}
@@ -1206,6 +1205,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{
content: content
}
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat && card.type2 !== 'globalNote'){
pushPromptInfoBody(prompt.role, prompt.content, promptBodyformatedForChatStore)
}
pushPrompts([prompt])
break
}
@@ -1267,7 +1270,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
pmt[i].content = risuChatParser(card.innerFormat, {chara: currentChar}).replace('{{slot}}', pmt[i].content)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody('supaMemory', card.innerFormat)
pushPromptInfoBody(pmt[i].role, card.innerFormat, promptBodyformatedForChatStore)
}
}
}
@@ -1306,6 +1309,13 @@ export async function sendChat(chatProcessIndex = -1,arg:{
return v
})
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
promptBodyformatedForChatStore = promptBodyformatedForChatStore.map((v) => {
v.content = v.content.trim()
return v
})
}
if(currentChar.depth_prompt && currentChar.depth_prompt.prompt && currentChar.depth_prompt.prompt.length > 0){
//depth_prompt
@@ -1318,6 +1328,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{
formated = await runLuaEditTrigger(currentChar, 'editRequest', formated)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
promptBodyformatedForChatStore = await runLuaEditTrigger(currentChar, 'editRequest', promptBodyformatedForChatStore)
promptInfo.promptText = promptBodyformatedForChatStore
}
//token rechecking
let inputTokens = 0
@@ -1388,29 +1403,6 @@ export async function sendChat(chatProcessIndex = -1,arg:{
return true
}
function isPromptMemo(m: string): m is MemoType {
return ['persona', 'description', 'authornote', 'supaMemory'].includes(m);
}
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
const promptBodyInfo: OpenAIChat[] = formated.flatMap(format => {
if (isPromptMemo(format.memo)) {
return promptBodyMap[format.memo].map(content => ({
role: format.role,
content,
}))
}
if (format.memo == null) {
return [format]
}
return []
})
promptInfo.promptText = promptBodyInfo
}
let result = ''
let emoChanged = false
let resendChat = false