Fix mistral top_p and fix seperateParameters

This commit is contained in:
Kwaroran
2024-12-11 03:44:07 +09:00
parent 91e27f3a84
commit 64e759e187
3 changed files with 7 additions and 8 deletions

View File

@@ -708,7 +708,7 @@ export const LLMModels: LLMModel[] = [
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
recommended: true,
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -719,7 +719,7 @@ export const LLMModels: LLMModel[] = [
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
recommended: true,
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -729,7 +729,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral,
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -739,7 +739,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral,
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral
},
{
@@ -749,7 +749,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral,
format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
recommended: true,
tokenizer: LLMTokenizer.Mistral
},