diff --git a/src/lang/en.ts b/src/lang/en.ts index 5b47a1b2..8a389823 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -241,6 +241,7 @@ export const languageEnglish = { finallyOption1Desc: "This will enable advanced tools, but it will make the UI more complex. Recommended for advanced users.", finallyOption2: "No", finallyOption2Desc: "This will disable advanced tools, and make the UI more simple. Recommended for new users.", + igpPrompt: "if IGP Prompt is not a blank, it will be executed after the main model request, after main model execution, as a emotion model, adding the result after the main model response." }, triggerDesc: { v2Header: "Header", @@ -1113,4 +1114,5 @@ export const languageEnglish = { fallbackWhenBlankResponse: "Fallback When Blank Response", doNotChangeFallbackModels: "Do Not Change Fallback Models on Preset Change", customModels: "Custom Models", + igpPrompt: "IGP Prompt" } diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index 35b3413b..f83eb636 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -988,7 +988,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-2.5-pro-exp-03-25', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasImageOutput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], + flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole, LLMFlags.geminiThinking], parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], tokenizer: LLMTokenizer.GoogleCloud, recommended: true diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts index 8ef7d485..08e02c07 100644 --- a/src/ts/process/index.svelte.ts +++ b/src/ts/process/index.svelte.ts @@ -27,7 +27,7 @@ import { runImageEmbedding } from "./transformers"; import { hanuraiMemory } from "./memory/hanuraiMemory"; import { hypaMemoryV2 } from "./memory/hypav2"; import { runLuaEditTrigger } from "./lua"; -import { parseChatML } from "../parser.svelte"; +import { getGlobalChatVar, parseChatML } from "../parser.svelte"; import { getModelInfo, LLMFlags } from "../model/modellist"; import { hypaMemoryV3 } from "./memory/hypav3"; import { getModuleAssets } from "./modules"; @@ -1494,6 +1494,18 @@ export async function sendChat(chatProcessIndex = -1,arg:{ }) } + const igp = risuChatParser(DBState.db.igpPrompt ?? "") + + if(igp){ + const igpFormated = parseChatML(igp) + const rq = await requestChatData({ + formated: igpFormated, + bias: {} + },'emotion', abortSignal) + + DBState.db.characters[selectedChar].chats[selectedChat].message[DBState.db.characters[selectedChar].chats[selectedChat].message.length - 1].data += rq + } + if(resendChat){ doingChat.set(false) return await sendChat(chatProcessIndex, { diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index e3fffd39..70976ae7 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -980,6 +980,7 @@ export interface Database{ params: string flags: LLMFlags[] }[] + igpPrompt:string } interface SeparateParameters{