Merge branch 'main' into patch-2

This commit is contained in:
kwaroran
2025-05-17 01:09:01 +09:00
committed by GitHub
7 changed files with 194 additions and 13 deletions

View File

@@ -1,12 +1,12 @@
import { get, writable } from "svelte/store";
import { type character, type MessageGenerationInfo, type Chat, changeToPreset, setCurrentChat } from "../storage/database.svelte";
import { type character, type MessageGenerationInfo, type Chat, type MessagePresetInfo, changeToPreset, setCurrentChat } from "../storage/database.svelte";
import { DBState } from '../stores.svelte';
import { CharEmotion, selectedCharID } from "../stores.svelte";
import { ChatTokenizer, tokenize, tokenizeNum } from "../tokenizer";
import { language } from "../../lang";
import { alertError, alertToast } from "../alert";
import { loadLoreBookV3Prompt } from "./lorebook.svelte";
import { findCharacterbyId, getAuthorNoteDefaultText, getPersonaPrompt, getUserName, isLastCharPunctuation, trimUntilPunctuation } from "../util";
import { findCharacterbyId, getAuthorNoteDefaultText, getPersonaPrompt, getUserName, isLastCharPunctuation, trimUntilPunctuation, parseToggleSyntax } from "../util";
import { requestChatData } from "./request";
import { stableDiff } from "./stableDiff";
import { processScript, processScriptFull, risuChatParser } from "./scripts";
@@ -30,7 +30,7 @@ import { runLuaEditTrigger } from "./lua";
import { getGlobalChatVar, parseChatML } from "../parser.svelte";
import { getModelInfo, LLMFlags } from "../model/modellist";
import { hypaMemoryV3 } from "./memory/hypav3";
import { getModuleAssets } from "./modules";
import { getModuleAssets, getModuleToggles } from "./modules";
import { getFileSrc, readImage } from "../globalApi.svelte";
export interface OpenAIChat{
@@ -186,6 +186,39 @@ export async function sendChat(chatProcessIndex = -1,arg:{
return v
})
// ─────────────────────────────────────────────────────────
// Snapshot preset name & toggles before sending a message.
// Ensures correct metadata is recorded, even if presets
// change immediately after clicking "send".
//
// Used later in promptInfo assembly (e.g. promptInfo.promptText)
// ─────────────────────────────────────────────────────────
let promptInfo: MessagePresetInfo = {}
let initialPresetNameForPromptInfo = null
let initialPromptTogglesForPromptInfo: {
key: string,
value: string,
}[] = []
if(DBState.db.promptInfoInsideChat){
initialPresetNameForPromptInfo = DBState.db.botPresets[DBState.db.botPresetsId]?.name ?? ''
initialPromptTogglesForPromptInfo = parseToggleSyntax(DBState.db.customPromptTemplateToggle + getModuleToggles())
.flatMap(toggle => {
const raw = DBState.db.globalChatVariables[`toggle_${toggle.key}`]
if (toggle.type === 'select' || toggle.type === 'text') {
return [{ key: toggle.value, value: toggle.options[raw] }];
}
if (raw === '1') {
return [{ key: toggle.value, value: 'ON' }];
}
return [];
})
promptInfo = {
promptName: initialPresetNameForPromptInfo,
promptToggles: initialPromptTogglesForPromptInfo,
}
}
// ─────────────────────────────────────────────────────────────
let currentChar:character
let caculatedChatTokens = 0
@@ -367,13 +400,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(currentChat.note){
unformated.authorNote.push({
role: 'system',
content: risuChatParser(currentChat.note, {chara: currentChar})
content: risuChatParser(currentChat.note, {chara: currentChar}),
memo: 'authornote'
})
}
else if(getAuthorNoteDefaultText() !== ''){
unformated.authorNote.push({
role: 'system',
content: risuChatParser(getAuthorNoteDefaultText(), {chara: currentChar})
content: risuChatParser(getAuthorNoteDefaultText(), {chara: currentChar}),
memo: 'authornote'
})
}
@@ -403,7 +438,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{
unformated.description.push({
role: 'system',
content: description
content: description,
memo: 'description',
})
if(nowChatroom.type === 'group'){
@@ -424,7 +460,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{
for(const lorebook of normalActives){
unformated.lorebook.push({
role: lorebook.role,
content: risuChatParser(lorebook.prompt, {chara: currentChar})
content: risuChatParser(lorebook.prompt, {chara: currentChar}),
memo: 'lore',
})
}
@@ -448,7 +485,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(DBState.db.personaPrompt){
unformated.personaPrompt.push({
role: 'system',
content: risuChatParser(getPersonaPrompt(), {chara: currentChar})
content: risuChatParser(getPersonaPrompt(), {chara: currentChar}),
memo: 'persona',
})
}
@@ -473,7 +511,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{
for(const lorebook of postEverythingLorebooks){
unformated.postEverything.push({
role: lorebook.role,
content: risuChatParser(lorebook.prompt, {chara: currentChar})
content: risuChatParser(lorebook.prompt, {chara: currentChar}),
memo: 'postEverything',
})
}
@@ -1061,6 +1100,12 @@ export async function sendChat(chatProcessIndex = -1,arg:{
}
}
type MemoType = 'persona' | 'description' | 'authornote' | 'supaMemory';
const promptBodyMap: Record<MemoType, string[]> = { persona: [], description: [], authornote: [], supaMemory: [] };
function pushPromptInfoBody(memo: MemoType, fmt: string) {
promptBodyMap[memo].push(risuChatParser(fmt));
}
if(promptTemplate){
const template = promptTemplate
@@ -1071,6 +1116,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(card.innerFormat && pmt.length > 0){
for(let i=0;i<pmt.length;i++){
pmt[i].content = risuChatParser(positionParser(card.innerFormat), {chara: currentChar}).replace('{{slot}}', pmt[i].content)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody(card.type, card.innerFormat)
}
}
}
@@ -1082,6 +1131,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(card.innerFormat && pmt.length > 0){
for(let i=0;i<pmt.length;i++){
pmt[i].content = risuChatParser(positionParser(card.innerFormat), {chara: currentChar}).replace('{{slot}}', pmt[i].content)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody(card.type, card.innerFormat)
}
}
}
@@ -1093,6 +1146,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(card.innerFormat && pmt.length > 0){
for(let i=0;i<pmt.length;i++){
pmt[i].content = risuChatParser(positionParser(card.innerFormat), {chara: currentChar}).replace('{{slot}}', pmt[i].content || card.defaultText || '')
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody(card.type, card.innerFormat)
}
}
}
@@ -1208,6 +1265,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{
if(card.innerFormat && pmt.length > 0){
for(let i=0;i<pmt.length;i++){
pmt[i].content = risuChatParser(card.innerFormat, {chara: currentChar}).replace('{{slot}}', pmt[i].content)
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
pushPromptInfoBody('supaMemory', card.innerFormat)
}
}
}
@@ -1327,6 +1388,29 @@ export async function sendChat(chatProcessIndex = -1,arg:{
return true
}
function isPromptMemo(m: string): m is MemoType {
return ['persona', 'description', 'authornote', 'supaMemory'].includes(m);
}
if(DBState.db.promptInfoInsideChat && DBState.db.promptTextInfoInsideChat){
const promptBodyInfo: OpenAIChat[] = formated.flatMap(format => {
if (isPromptMemo(format.memo)) {
return promptBodyMap[format.memo].map(content => ({
role: format.role,
content,
}))
}
if (format.memo == null) {
return [format]
}
return []
})
promptInfo.promptText = promptBodyInfo
}
let result = ''
let emoChanged = false
let resendChat = false
@@ -1353,6 +1437,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
saying: currentChar.chaId,
time: Date.now(),
generationInfo,
promptInfo,
})
}
DBState.db.characters[selectedChar].chats[selectedChat].isStreaming = true
@@ -1432,7 +1517,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{
data: result,
saying: currentChar.chaId,
time: Date.now(),
generationInfo
generationInfo,
promptInfo,
}
if(inlayResult.promise){
const p = await inlayResult.promise
@@ -1445,7 +1531,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{
data: result,
saying: currentChar.chaId,
time: Date.now(),
generationInfo
generationInfo,
promptInfo,
})
const ind = DBState.db.characters[selectedChar].chats[selectedChat].message.length - 1
if(inlayResult.promise){

View File

@@ -1,6 +1,6 @@
import { getChatVar, hasher, setChatVar, getGlobalChatVar, type simpleCharacterArgument } from "../parser.svelte";
import { getChatVar, hasher, setChatVar, getGlobalChatVar, type simpleCharacterArgument, risuChatParser } from "../parser.svelte";
import { LuaEngine, LuaFactory } from "wasmoon";
import { getCurrentCharacter, getCurrentChat, getDatabase, setCurrentChat, setDatabase, type Chat, type character, type groupChat } from "../storage/database.svelte";
import { getCurrentCharacter, getCurrentChat, getDatabase, setDatabase, type Chat, type character, type groupChat } from "../storage/database.svelte";
import { get } from "svelte/store";
import { ReloadGUIPointer, selectedCharID } from "../stores.svelte";
import { alertSelect, alertError, alertInput, alertNormal } from "../alert";
@@ -14,6 +14,7 @@ import { getModuleTriggers } from "./modules";
import { Mutex } from "../mutex";
import { tokenize } from "../tokenizer";
import { fetchNative } from "../globalApi.svelte";
import { getPersonaPrompt, getUserName } from '../util';
let luaFactory:LuaFactory
let LuaSafeIds = new Set<string>()
@@ -460,6 +461,26 @@ export async function runLua(code:string, arg:{
return char.firstMessage
})
luaEngine.global.set('getPersonaName', (id:string) => {
if(!LuaSafeIds.has(id)){
return
}
return getUserName()
})
luaEngine.global.set('getPersonaDescription', (id:string) => {
if(!LuaSafeIds.has(id)){
return
}
const db = getDatabase()
const selectedChar = get(selectedCharID)
const char = db.characters[selectedChar]
return risuChatParser(getPersonaPrompt(), { chara: char })
})
luaEngine.global.set('getBackgroundEmbedding', async (id:string) => {
if(!LuaSafeIds.has(id)){
return

View File

@@ -1023,6 +1023,9 @@ export interface Database{
}[]
igpPrompt:string
useTokenizerCaching:boolean
showMenuHypaMemoryModal:boolean
promptInfoInsideChat:boolean
promptTextInfoInsideChat:boolean
}
interface SeparateParameters{
@@ -1501,6 +1504,7 @@ export interface Message{
chatId?:string
time?: number
generationInfo?: MessageGenerationInfo
promptInfo?: MessagePresetInfo
name?:string
otherUser?:boolean
}
@@ -1513,6 +1517,12 @@ export interface MessageGenerationInfo{
maxContext?: number
}
export interface MessagePresetInfo{
promptName?: string,
promptToggles?: {key: string, value: string}[],
promptText?: OpenAIChat[],
}
interface AINsettings{
top_p: number,
rep_pen: number,
@@ -1879,6 +1889,7 @@ import type { Parameter } from '../process/request';
import type { HypaModel } from '../process/memory/hypamemory';
import type { SerializableHypaV3Data } from '../process/memory/hypav3';
import { defaultHotkeys, type Hotkey } from '../defaulthotkeys';
import type { OpenAIChat } from '../process/index.svelte';
export async function downloadPreset(id:number, type:'json'|'risupreset'|'return' = 'json'){
saveCurrentPreset()