diff --git a/src/lang/en.ts b/src/lang/en.ts index a034db13..a553509d 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -843,4 +843,5 @@ export const languageEnglish = { showPromptComparison: "Show Prompt Comparison", hypaV3Desc: "HypaMemory V3 is a long-term memory system that use both summarized data and vector search.", inlayErrorResponse: "Inlay Error Response", + triggerEffRunAxLLM: "Run Auxiliary Model", } \ No newline at end of file diff --git a/src/lang/ko.ts b/src/lang/ko.ts index 9d3cfa0b..9c221a44 100644 --- a/src/lang/ko.ts +++ b/src/lang/ko.ts @@ -767,4 +767,5 @@ export const languageKorean = { "translateBeforeHTMLFormatting": "HTML 포맷 전 번역", "retranslate": "다시 번역", "loading": "로딩중", + "triggerEffRunAxLLM": "보조 모델 실행", } \ No newline at end of file diff --git a/src/lib/SideBars/Scripts/TriggerData.svelte b/src/lib/SideBars/Scripts/TriggerData.svelte index 1f75fabb..b3906fec 100644 --- a/src/lib/SideBars/Scripts/TriggerData.svelte +++ b/src/lib/SideBars/Scripts/TriggerData.svelte @@ -304,6 +304,13 @@ index: '' } } + else if(effect.type === 'runAxLLM'){ + value.effect[i] = { + type: 'runAxLLM', + value: '', + inputVar: '' + } + } }}> {language.triggerEffSetVar} {language.triggerEffImperson} @@ -319,6 +326,7 @@ {language.runImgGen} {language.cutChat} {language.modifyChat} + {language.triggerEffRunAxLLM} {#if (value.type !== 'start' && (effect.type === 'systemprompt' || effect.type === 'stop')) || @@ -333,7 +341,8 @@ effect.type === 'showAlert' || effect.type === 'sendAIprompt' || effect.type === 'extractRegex' || - effect.type === 'runImgGen' + effect.type === 'runImgGen' || + effect.type === 'runAxLLM' ) } {language.triggerLowLevelOnly} @@ -459,6 +468,14 @@ {/if} + + {#if effect.type === 'runAxLLM'} + {language.prompt} + + + {language.resultStoredVar} + + {/if} {/each} diff --git a/src/ts/process/lua.ts b/src/ts/process/lua.ts index c7e1f636..9b7daf2f 100644 --- a/src/ts/process/lua.ts +++ b/src/ts/process/lua.ts @@ -419,6 +419,64 @@ export async function runLua(code:string, arg:{ return true }) + luaEngine.global.set('axLLMMain', async (id:string, promptStr:string) => { + let prompt:{ + role: string, + content: string + }[] = JSON.parse(promptStr) + if(!LuaLowLevelIds.has(id)){ + return + } + let promptbody:OpenAIChat[] = prompt.map((dict) => { + let role:'system'|'user'|'assistant' = 'assistant' + switch(dict['role']){ + case 'system': + case 'sys': + role = 'system' + break + case 'user': + role = 'user' + break + case 'assistant': + case 'bot': + case 'char':{ + role = 'assistant' + break + } + } + + return { + content: dict['content'] ?? '', + role: role, + } + }) + const result = await requestChatData({ + formated: promptbody, + bias: {}, + useStreaming: false, + noMultiGen: true, + }, 'otherAx') + + if(result.type === 'fail'){ + return JSON.stringify({ + success: false, + result: 'Error: ' + result.result + }) + } + + if(result.type === 'streaming' || result.type === 'multiline'){ + return JSON.stringify({ + success: false, + result: result.result + }) + } + + return JSON.stringify({ + success: true, + result: result.result + }) + }) + await luaEngine.doString(luaCodeWarper(code)) luaEngineState.code = code } @@ -538,6 +596,10 @@ function LLM(id, prompt) return json.decode(LLMMain(id, json.encode(prompt)):await()) end +function axLLM(id, prompt) + return json.decode(axLLMMain(id, json.encode(prompt)):await()) +end + local editRequestFuncs = {} local editDisplayFuncs = {} local editInputFuncs = {} diff --git a/src/ts/process/triggers.ts b/src/ts/process/triggers.ts index ac4d24a8..1a4dee16 100644 --- a/src/ts/process/triggers.ts +++ b/src/ts/process/triggers.ts @@ -25,7 +25,7 @@ export interface triggerscript{ export type triggerCondition = triggerConditionsVar|triggerConditionsExists|triggerConditionsChatIndex -export type triggerEffect = triggerCode|triggerEffectCutChat|triggerEffectModifyChat|triggerEffectImgGen|triggerEffectRegex|triggerEffectRunLLM|triggerEffectCheckSimilarity|triggerEffectSendAIprompt|triggerEffectShowAlert|triggerEffectSetvar|triggerEffectSystemPrompt|triggerEffectImpersonate|triggerEffectCommand|triggerEffectStop|triggerEffectRunTrigger +export type triggerEffect = triggerCode|triggerEffectCutChat|triggerEffectModifyChat|triggerEffectImgGen|triggerEffectRegex|triggerEffectRunLLM|triggerEffectCheckSimilarity|triggerEffectSendAIprompt|triggerEffectShowAlert|triggerEffectSetvar|triggerEffectSystemPrompt|triggerEffectImpersonate|triggerEffectCommand|triggerEffectStop|triggerEffectRunTrigger|triggerEffectRunAxLLM export type triggerConditionsVar = { type:'var'|'value' @@ -138,6 +138,12 @@ export interface triggerEffectRunLLM{ inputVar: string } +export interface triggerEffectRunAxLLM{ + type: 'runAxLLM', + value: string, + inputVar: string +} + export type additonalSysPrompt = { start:string, historyend: string, @@ -504,6 +510,7 @@ export async function runTrigger(char:character,mode:triggerMode, arg:{ setVar(effect.inputVar, res) break } + case 'triggerlua':{ const triggerCodeResult = await runLua(effect.code,{ lowLevelAccess: trigger.lowLevelAccess, @@ -520,6 +527,33 @@ export async function runTrigger(char:character,mode:triggerMode, arg:{ chat = getCurrentChat() break } + + case 'runAxLLM':{ + if(!trigger.lowLevelAccess){ + break + } + const effectValue = risuChatParser(effect.value,{chara:char}) + const varName = effect.inputVar + let promptbody:OpenAIChat[] = parseChatML(effectValue) + if(!promptbody){ + promptbody = [{role:'user', content:effectValue}] + } + const result = await requestChatData({ + formated: promptbody, + bias: {}, + useStreaming: false, + noMultiGen: true, + }, 'otherAx') + + if(result.type === 'fail' || result.type === 'streaming' || result.type === 'multiline'){ + setVar(varName, 'Error: ' + result.result) + } + else{ + setVar(varName, result.result) + } + + break + } } } }