Fix max tokens

This commit is contained in:
kwaroran
2024-09-13 08:25:34 +09:00
parent be47141a25
commit 567a6e3dea

View File

@@ -505,6 +505,11 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
})
if(aiModel.startsWith('gpt4o1')){
body.max_completion_tokens = body.max_tokens
delete body.max_tokens
}
if(db.generationSeed > 0){
body.seed = db.generationSeed
}