From 64e759e18791c965f81878b65a6998fdc5a8f4ac Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Wed, 11 Dec 2024 03:44:07 +0900 Subject: [PATCH] Fix mistral top_p and fix seperateParameters --- src/ts/model/modellist.ts | 10 +++++----- src/ts/process/request.ts | 3 +-- src/ts/storage/database.svelte.ts | 2 +- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index bbca5d7d..baa1b626 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -708,7 +708,7 @@ export const LLMModels: LLMModel[] = [ format: LLMFormat.Mistral, flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], recommended: true, - parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], + parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], tokenizer: LLMTokenizer.Mistral }, { @@ -719,7 +719,7 @@ export const LLMModels: LLMModel[] = [ format: LLMFormat.Mistral, flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], recommended: true, - parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], + parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], tokenizer: LLMTokenizer.Mistral }, { @@ -729,7 +729,7 @@ export const LLMModels: LLMModel[] = [ provider: LLMProvider.Mistral, format: LLMFormat.Mistral, flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], - parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], + parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], tokenizer: LLMTokenizer.Mistral }, { @@ -739,7 +739,7 @@ export const LLMModels: LLMModel[] = [ provider: LLMProvider.Mistral, format: LLMFormat.Mistral, flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], - parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], + parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], tokenizer: LLMTokenizer.Mistral }, { @@ -749,7 +749,7 @@ export const LLMModels: LLMModel[] = [ provider: LLMProvider.Mistral, format: LLMFormat.Mistral, flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], - parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], + parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], recommended: true, tokenizer: LLMTokenizer.Mistral }, diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 95502cf1..df15704b 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -520,10 +520,9 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise