Add igp prompt

This commit is contained in:
kwaroran
2025-03-27 19:13:10 +09:00
parent 294b0e2a52
commit 02401d4d09
4 changed files with 17 additions and 2 deletions

View File

@@ -241,6 +241,7 @@ export const languageEnglish = {
finallyOption1Desc: "This will enable advanced tools, but it will make the UI more complex. Recommended for advanced users.",
finallyOption2: "No",
finallyOption2Desc: "This will disable advanced tools, and make the UI more simple. Recommended for new users.",
igpPrompt: "if IGP Prompt is not a blank, it will be executed after the main model request, after main model execution, as a emotion model, adding the result after the main model response."
},
triggerDesc: {
v2Header: "Header",
@@ -1113,4 +1114,5 @@ export const languageEnglish = {
fallbackWhenBlankResponse: "Fallback When Blank Response",
doNotChangeFallbackModels: "Do Not Change Fallback Models on Preset Change",
customModels: "Custom Models",
igpPrompt: "IGP Prompt"
}

View File

@@ -988,7 +988,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-2.5-pro-exp-03-25',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasImageOutput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole, LLMFlags.geminiThinking],
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
tokenizer: LLMTokenizer.GoogleCloud,
recommended: true

View File

@@ -27,7 +27,7 @@ import { runImageEmbedding } from "./transformers";
import { hanuraiMemory } from "./memory/hanuraiMemory";
import { hypaMemoryV2 } from "./memory/hypav2";
import { runLuaEditTrigger } from "./lua";
import { parseChatML } from "../parser.svelte";
import { getGlobalChatVar, parseChatML } from "../parser.svelte";
import { getModelInfo, LLMFlags } from "../model/modellist";
import { hypaMemoryV3 } from "./memory/hypav3";
import { getModuleAssets } from "./modules";
@@ -1494,6 +1494,18 @@ export async function sendChat(chatProcessIndex = -1,arg:{
})
}
const igp = risuChatParser(DBState.db.igpPrompt ?? "")
if(igp){
const igpFormated = parseChatML(igp)
const rq = await requestChatData({
formated: igpFormated,
bias: {}
},'emotion', abortSignal)
DBState.db.characters[selectedChar].chats[selectedChat].message[DBState.db.characters[selectedChar].chats[selectedChat].message.length - 1].data += rq
}
if(resendChat){
doingChat.set(false)
return await sendChat(chatProcessIndex, {

View File

@@ -980,6 +980,7 @@ export interface Database{
params: string
flags: LLMFlags[]
}[]
igpPrompt:string
}
interface SeparateParameters{