Fix mistral top_p and fix seperateParameters

This commit is contained in:
Kwaroran
2024-12-11 03:44:07 +09:00
parent 91e27f3a84
commit 64e759e187
3 changed files with 7 additions and 8 deletions

View File

@@ -708,7 +708,7 @@ export const LLMModels: LLMModel[] = [
format: LLMFormat.Mistral, format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
recommended: true, recommended: true,
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral tokenizer: LLMTokenizer.Mistral
}, },
{ {
@@ -719,7 +719,7 @@ export const LLMModels: LLMModel[] = [
format: LLMFormat.Mistral, format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
recommended: true, recommended: true,
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral tokenizer: LLMTokenizer.Mistral
}, },
{ {
@@ -729,7 +729,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral, provider: LLMProvider.Mistral,
format: LLMFormat.Mistral, format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral tokenizer: LLMTokenizer.Mistral
}, },
{ {
@@ -739,7 +739,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral, provider: LLMProvider.Mistral,
format: LLMFormat.Mistral, format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
tokenizer: LLMTokenizer.Mistral tokenizer: LLMTokenizer.Mistral
}, },
{ {
@@ -749,7 +749,7 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.Mistral, provider: LLMProvider.Mistral,
format: LLMFormat.Mistral, format: LLMFormat.Mistral,
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole], flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'], parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
recommended: true, recommended: true,
tokenizer: LLMTokenizer.Mistral tokenizer: LLMTokenizer.Mistral
}, },

View File

@@ -520,10 +520,9 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
body: applyParameters({ body: applyParameters({
model: requestModel, model: requestModel,
messages: reformatedChat, messages: reformatedChat,
top_p: db.top_p,
safe_prompt: false, safe_prompt: false,
max_tokens: arg.maxTokens, max_tokens: arg.maxTokens,
}, ['temperature', 'presence_penalty', 'frequency_penalty'], {}, arg.mode ), }, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
headers: { headers: {
"Authorization": "Bearer " + db.mistralKey, "Authorization": "Bearer " + db.mistralKey,
}, },

View File

@@ -453,7 +453,7 @@ export function setDatabase(data:Database){
data.vertexClientEmail ??= '' data.vertexClientEmail ??= ''
data.vertexPrivateKey ??= '' data.vertexPrivateKey ??= ''
data.seperateParametersEnabled ??= false data.seperateParametersEnabled ??= false
data.seperateParameters = { data.seperateParameters ??= {
memory: {}, memory: {},
emotion: {}, emotion: {},
translate: {}, translate: {},