Fix mistral top_p and fix seperateParameters

This commit is contained in:
Kwaroran
2024-12-11 03:44:07 +09:00
parent 91e27f3a84
commit 64e759e187
3 changed files with 7 additions and 8 deletions

View File

@@ -708,7 +708,7 @@ export const LLMModels: LLMModel[] = [
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
recommended: true,
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -719,7 +719,7 @@ export const LLMModels: LLMModel[] = [
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
recommended: true,
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -729,7 +729,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral,
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -739,7 +739,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral,
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -749,7 +749,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral,
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
recommended: true,
tokenizer: LLMTokenizer.Mistral
},

View File

@@ -520,10 +520,9 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
body: applyParameters({
model: requestModel,
messages: reformatedChat,
top_p: db.top_p,
safe_prompt: false,
max_tokens: arg.maxTokens,
}, ['temperature', 'presence_penalty', 'frequency_penalty'], {}, arg.mode ),
}, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
headers: {
"Authorization": "Bearer " + db.mistralKey,
},

View File

@@ -453,7 +453,7 @@ export function setDatabase(data:Database){
data.vertexClientEmail ??= ''
data.vertexPrivateKey ??= ''
data.seperateParametersEnabled ??= false
data.seperateParameters = {
data.seperateParameters ??= {
memory: {},
emotion: {},
translate: {},