refactor: Add noMultiGen flag to requestChatDataMain functions

This commit is contained in:
kwaroran
2024-05-27 12:34:07 +09:00
parent 66bbee9918
commit f822765d95
4 changed files with 9 additions and 2 deletions

View File

@@ -35,6 +35,7 @@ async function summary(stringlizedChat:string):Promise<{
formated: promptbody, formated: promptbody,
bias: {}, bias: {},
useStreaming: false, useStreaming: false,
noMultiGen: true
}, 'model') }, 'model')
if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){ if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){
return { return {

View File

@@ -264,7 +264,9 @@ export async function supaMemory(
] ]
const da = await requestChatData({ const da = await requestChatData({
formated: promptbody, formated: promptbody,
bias: {} bias: {},
useStreaming: false,
noMultiGen: true
}, 'submodel') }, 'submodel')
if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){ if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){
return { return {

View File

@@ -34,7 +34,9 @@ export async function stableDiff(currentChar:character,prompt:string){
currentChar: currentChar, currentChar: currentChar,
temperature: 0.2, temperature: 0.2,
maxTokens: 300, maxTokens: 300,
bias: {} bias: {},
useStreaming: false,
noMultiGen: true
}, 'submodel') }, 'submodel')

View File

@@ -463,6 +463,8 @@ async function translateLLM(text:string, arg:{to:string}){
], ],
bias: {}, bias: {},
useStreaming: false, useStreaming: false,
noMultiGen: true,
maxTokens: 1000,
}, 'submodel') }, 'submodel')
if(rq.type === 'fail' || rq.type === 'streaming' || rq.type === 'multiline'){ if(rq.type === 'fail' || rq.type === 'streaming' || rq.type === 'multiline'){