This commit is contained in:
Kwaroran
2025-02-15 21:32:11 +09:00
27 changed files with 1264 additions and 496 deletions

View File

@@ -419,6 +419,64 @@ export async function runLua(code:string, arg:{
return true
})
luaEngine.global.set('axLLMMain', async (id:string, promptStr:string) => {
let prompt:{
role: string,
content: string
}[] = JSON.parse(promptStr)
if(!LuaLowLevelIds.has(id)){
return
}
let promptbody:OpenAIChat[] = prompt.map((dict) => {
let role:'system'|'user'|'assistant' = 'assistant'
switch(dict['role']){
case 'system':
case 'sys':
role = 'system'
break
case 'user':
role = 'user'
break
case 'assistant':
case 'bot':
case 'char':{
role = 'assistant'
break
}
}
return {
content: dict['content'] ?? '',
role: role,
}
})
const result = await requestChatData({
formated: promptbody,
bias: {},
useStreaming: false,
noMultiGen: true,
}, 'otherAx')
if(result.type === 'fail'){
return JSON.stringify({
success: false,
result: 'Error: ' + result.result
})
}
if(result.type === 'streaming' || result.type === 'multiline'){
return JSON.stringify({
success: false,
result: result.result
})
}
return JSON.stringify({
success: true,
result: result.result
})
})
await luaEngine.doString(luaCodeWarper(code))
luaEngineState.code = code
}
@@ -538,6 +596,10 @@ function LLM(id, prompt)
return json.decode(LLMMain(id, json.encode(prompt)):await())
end
function axLLM(id, prompt)
return json.decode(axLLMMain(id, json.encode(prompt)):await())
end
local editRequestFuncs = {}
local editDisplayFuncs = {}
local editInputFuncs = {}

View File

@@ -106,7 +106,7 @@ export class HypaProcesser{
if(!gf.ok){
throw gf.data
throw JSON.stringify(gf.data)
}
const result:number[][] = []

View File

@@ -421,6 +421,12 @@ export async function hypaMemoryV3(
continue;
}
if (db.hypaV3Settings.doNotSummarizeUserMessage && chat.role === "user") {
console.log(`[HypaV3] Skipping user role at index ${i}`);
continue;
}
toSummarize.push(chat);
}
@@ -436,23 +442,25 @@ export async function hypaMemoryV3(
}
// Attempt summarization
const summarizeResult = await retryableSummarize(toSummarize);
if (toSummarize.length > 0) {
const summarizeResult = await retryableSummarize(toSummarize);
if (!summarizeResult.success) {
return {
currentTokens,
chats,
error: `[HypaV3] Summarization failed after maximum retries: ${summarizeResult.data}`,
memory: toSerializableHypaV3Data(data),
};
if (!summarizeResult.success) {
return {
currentTokens,
chats,
error: `[HypaV3] Summarization failed after maximum retries: ${summarizeResult.data}`,
memory: toSerializableHypaV3Data(data),
};
}
data.summaries.push({
text: summarizeResult.data,
chatMemos: new Set(toSummarize.map((chat) => chat.memo)),
isImportant: false,
});
}
data.summaries.push({
text: summarizeResult.data,
chatMemos: new Set(toSummarize.map((chat) => chat.memo)),
isImportant: false,
});
currentTokens -= toSummarizeTokens;
startIdx = endIdx;
}
@@ -469,6 +477,37 @@ export async function hypaMemoryV3(
availableMemoryTokens
);
// Early return if no summaries
if (data.summaries.length === 0) {
// Generate final memory prompt
const memory = encapsulateMemoryPrompt("");
const newChats: OpenAIChat[] = [
{
role: "system",
content: memory,
memo: "supaMemory",
},
...chats.slice(startIdx),
];
console.log(
"[HypaV3] Exiting function:",
"\nCurrent Tokens:",
currentTokens,
"\nAll chats, including memory prompt:",
newChats,
"\nMemory Data:",
data
);
return {
currentTokens,
chats: newChats,
memory: toSerializableHypaV3Data(data),
};
}
const selectedSummaries: Summary[] = [];
const randomMemoryRatio =
1 -

View File

@@ -24,6 +24,7 @@ export interface RisuModule{
backgroundEmbedding?:string
assets?:[string,string,string][]
namespace?:string
customModuleToggle?:string
}
export async function exportModule(module:RisuModule, arg:{
@@ -267,7 +268,20 @@ function getModuleByIds(ids:string[]){
modules.push(module)
}
}
return modules
return deduplicateModuleById(modules)
}
function deduplicateModuleById(modules:RisuModule[]){
let ids:string[] = []
let newModules:RisuModule[] = []
for(let i=0;i<modules.length;i++){
if(ids.includes(modules[i].id)){
continue
}
ids.push(modules[i].id)
newModules.push(modules[i])
}
return newModules
}
let lastModules = ''
@@ -279,6 +293,10 @@ export function getModules(){
if (currentChat){
ids = ids.concat(currentChat.modules ?? [])
}
if(db.moduleIntergration){
const intList = db.moduleIntergration.split(',').map((s) => s.trim())
ids = ids.concat(intList)
}
const idsJoined = ids.join('-')
if(lastModules === idsJoined){
return lastModuleData
@@ -352,6 +370,20 @@ export function getModuleRegexScripts() {
return customscripts
}
export function getModuleToggles() {
const modules = getModules()
let costomModuleToggles: string = ''
for (const module of modules) {
if(!module){
continue
}
if (module.customModuleToggle) {
costomModuleToggles += '\n' + module.customModuleToggle + '\n'
}
}
return costomModuleToggles
}
export async function applyModule() {
const sel = await alertModuleSelect()
if (!sel) {
@@ -425,4 +457,9 @@ export function moduleUpdate(){
ReloadGUIPointer.set(get(ReloadGUIPointer) + 1)
lastModuleIds = ids
}
}
export function refreshModules(){
lastModules = ''
lastModuleData = []
}

View File

@@ -152,6 +152,12 @@ export interface triggerEffectRunLLM{
inputVar: string
}
export interface triggerEffectRunAxLLM{
type: 'runAxLLM',
value: string,
inputVar: string
}
export type additonalSysPrompt = {
start:string,
historyend: string,
@@ -1033,6 +1039,7 @@ export async function runTrigger(char:character,mode:triggerMode, arg:{
setVar(effect.inputVar, res)
break
}
case 'triggerlua':{
const triggerCodeResult = await runLua(effect.code,{
lowLevelAccess: trigger.lowLevelAccess,