Fix deepseek chat
This commit is contained in:
@@ -1289,7 +1289,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole, LLMFlags.mustStartWithUserInput, LLMFlags.hasPrefill, LLMFlags.deepSeekPrefix, LLMFlags.hasStreaming],
|
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole, LLMFlags.mustStartWithUserInput, LLMFlags.hasPrefill, LLMFlags.deepSeekPrefix, LLMFlags.hasStreaming],
|
||||||
parameters: ['frequency_penalty', 'presence_penalty','temperature', 'top_p'],
|
parameters: ['frequency_penalty', 'presence_penalty','temperature', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.DeepSeek,
|
tokenizer: LLMTokenizer.DeepSeek,
|
||||||
endpoint: 'https://api.deepseek.com/beta',
|
endpoint: 'https://api.deepseek.com/beta/chat/completions',
|
||||||
keyIdentifier: 'deepseek',
|
keyIdentifier: 'deepseek',
|
||||||
recommended: true
|
recommended: true
|
||||||
},
|
},
|
||||||
|
|||||||
Reference in New Issue
Block a user