feat: add trigger effect RunAxLLM

This commit is contained in:
Bo26fhmC5M
2025-02-08 17:15:45 +09:00
parent 89c34dfdd7
commit fb941148e0
5 changed files with 117 additions and 2 deletions

View File

@@ -419,6 +419,64 @@ export async function runLua(code:string, arg:{
return true
})
luaEngine.global.set('axLLMMain', async (id:string, promptStr:string) => {
let prompt:{
role: string,
content: string
}[] = JSON.parse(promptStr)
if(!LuaLowLevelIds.has(id)){
return
}
let promptbody:OpenAIChat[] = prompt.map((dict) => {
let role:'system'|'user'|'assistant' = 'assistant'
switch(dict['role']){
case 'system':
case 'sys':
role = 'system'
break
case 'user':
role = 'user'
break
case 'assistant':
case 'bot':
case 'char':{
role = 'assistant'
break
}
}
return {
content: dict['content'] ?? '',
role: role,
}
})
const result = await requestChatData({
formated: promptbody,
bias: {},
useStreaming: false,
noMultiGen: true,
}, 'otherAx')
if(result.type === 'fail'){
return JSON.stringify({
success: false,
result: 'Error: ' + result.result
})
}
if(result.type === 'streaming' || result.type === 'multiline'){
return JSON.stringify({
success: false,
result: result.result
})
}
return JSON.stringify({
success: true,
result: result.result
})
})
await luaEngine.doString(luaCodeWarper(code))
luaEngineState.code = code
}
@@ -538,6 +596,10 @@ function LLM(id, prompt)
return json.decode(LLMMain(id, json.encode(prompt)):await())
end
function axLLM(id, prompt)
return json.decode(axLLMMain(id, json.encode(prompt)):await())
end
local editRequestFuncs = {}
local editDisplayFuncs = {}
local editInputFuncs = {}

View File

@@ -25,7 +25,7 @@ export interface triggerscript{
export type triggerCondition = triggerConditionsVar|triggerConditionsExists|triggerConditionsChatIndex
export type triggerEffect = triggerCode|triggerEffectCutChat|triggerEffectModifyChat|triggerEffectImgGen|triggerEffectRegex|triggerEffectRunLLM|triggerEffectCheckSimilarity|triggerEffectSendAIprompt|triggerEffectShowAlert|triggerEffectSetvar|triggerEffectSystemPrompt|triggerEffectImpersonate|triggerEffectCommand|triggerEffectStop|triggerEffectRunTrigger
export type triggerEffect = triggerCode|triggerEffectCutChat|triggerEffectModifyChat|triggerEffectImgGen|triggerEffectRegex|triggerEffectRunLLM|triggerEffectCheckSimilarity|triggerEffectSendAIprompt|triggerEffectShowAlert|triggerEffectSetvar|triggerEffectSystemPrompt|triggerEffectImpersonate|triggerEffectCommand|triggerEffectStop|triggerEffectRunTrigger|triggerEffectRunAxLLM
export type triggerConditionsVar = {
type:'var'|'value'
@@ -138,6 +138,12 @@ export interface triggerEffectRunLLM{
inputVar: string
}
export interface triggerEffectRunAxLLM{
type: 'runAxLLM',
value: string,
inputVar: string
}
export type additonalSysPrompt = {
start:string,
historyend: string,
@@ -504,6 +510,7 @@ export async function runTrigger(char:character,mode:triggerMode, arg:{
setVar(effect.inputVar, res)
break
}
case 'triggerlua':{
const triggerCodeResult = await runLua(effect.code,{
lowLevelAccess: trigger.lowLevelAccess,
@@ -520,6 +527,33 @@ export async function runTrigger(char:character,mode:triggerMode, arg:{
chat = getCurrentChat()
break
}
case 'runAxLLM':{
if(!trigger.lowLevelAccess){
break
}
const effectValue = risuChatParser(effect.value,{chara:char})
const varName = effect.inputVar
let promptbody:OpenAIChat[] = parseChatML(effectValue)
if(!promptbody){
promptbody = [{role:'user', content:effectValue}]
}
const result = await requestChatData({
formated: promptbody,
bias: {},
useStreaming: false,
noMultiGen: true,
}, 'otherAx')
if(result.type === 'fail' || result.type === 'streaming' || result.type === 'multiline'){
setVar(varName, 'Error: ' + result.result)
}
else{
setVar(varName, result.result)
}
break
}
}
}
}