Add new LLMs
This commit is contained in:
@@ -507,6 +507,20 @@ export const LLMModels: LLMModel[] = [
|
|||||||
parameters: OpenAIParameters,
|
parameters: OpenAIParameters,
|
||||||
tokenizer: LLMTokenizer.tiktokenO200Base
|
tokenizer: LLMTokenizer.tiktokenO200Base
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'gpt41',
|
||||||
|
internalID: 'gpt-4.1',
|
||||||
|
name: 'GPT 4.1',
|
||||||
|
provider: LLMProvider.OpenAI,
|
||||||
|
format: LLMFormat.OpenAICompatible,
|
||||||
|
flags: [
|
||||||
|
LLMFlags.hasImageInput,
|
||||||
|
LLMFlags.hasFullSystemPrompt,
|
||||||
|
LLMFlags.hasStreaming,
|
||||||
|
],
|
||||||
|
parameters: OpenAIParameters,
|
||||||
|
tokenizer: LLMTokenizer.tiktokenO200Base
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: 'o1',
|
id: 'o1',
|
||||||
internalID: 'o1',
|
internalID: 'o1',
|
||||||
@@ -539,6 +553,38 @@ export const LLMModels: LLMModel[] = [
|
|||||||
parameters: ['reasoning_effort'],
|
parameters: ['reasoning_effort'],
|
||||||
tokenizer: LLMTokenizer.tiktokenO200Base
|
tokenizer: LLMTokenizer.tiktokenO200Base
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'o3',
|
||||||
|
internalID: 'o3',
|
||||||
|
name: 'o3',
|
||||||
|
provider: LLMProvider.OpenAI,
|
||||||
|
format: LLMFormat.OpenAICompatible,
|
||||||
|
flags: [
|
||||||
|
LLMFlags.hasStreaming,
|
||||||
|
LLMFlags.OAICompletionTokens,
|
||||||
|
LLMFlags.hasFullSystemPrompt,
|
||||||
|
LLMFlags.hasImageInput,
|
||||||
|
LLMFlags.DeveloperRole
|
||||||
|
],
|
||||||
|
parameters: ['reasoning_effort'],
|
||||||
|
tokenizer: LLMTokenizer.tiktokenO200Base
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'o4-mini',
|
||||||
|
internalID: 'o4-mini',
|
||||||
|
name: 'o4-mini',
|
||||||
|
provider: LLMProvider.OpenAI,
|
||||||
|
format: LLMFormat.OpenAICompatible,
|
||||||
|
flags: [
|
||||||
|
LLMFlags.hasStreaming,
|
||||||
|
LLMFlags.OAICompletionTokens,
|
||||||
|
LLMFlags.hasFullSystemPrompt,
|
||||||
|
LLMFlags.hasImageInput,
|
||||||
|
LLMFlags.DeveloperRole
|
||||||
|
],
|
||||||
|
parameters: ['reasoning_effort'],
|
||||||
|
tokenizer: LLMTokenizer.tiktokenO200Base
|
||||||
|
},
|
||||||
|
|
||||||
{
|
{
|
||||||
name: "Claude 3.5 Sonnet",
|
name: "Claude 3.5 Sonnet",
|
||||||
@@ -991,8 +1037,26 @@ export const LLMModels: LLMModel[] = [
|
|||||||
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole, LLMFlags.geminiThinking],
|
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole, LLMFlags.geminiThinking],
|
||||||
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
|
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud,
|
tokenizer: LLMTokenizer.GoogleCloud,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Gemini Pro 2.5 Preview (05/06)",
|
||||||
|
id: 'gemini-2.5-pro-preview-05-06',
|
||||||
|
provider: LLMProvider.GoogleCloud,
|
||||||
|
format: LLMFormat.GoogleCloud,
|
||||||
|
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole, LLMFlags.geminiThinking],
|
||||||
|
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
|
||||||
|
tokenizer: LLMTokenizer.GoogleCloud,
|
||||||
recommended: true
|
recommended: true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Gemini Flash 2.5 Preview (04/17)",
|
||||||
|
id: 'gemini-2.5-flash-preview-04-17',
|
||||||
|
provider: LLMProvider.GoogleCloud,
|
||||||
|
format: LLMFormat.GoogleCloud,
|
||||||
|
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole, LLMFlags.geminiThinking],
|
||||||
|
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
|
||||||
|
tokenizer: LLMTokenizer.GoogleCloud,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "Gemini Flash 2.0 Thinking 1219",
|
name: "Gemini Flash 2.0 Thinking 1219",
|
||||||
id: 'gemini-2.0-flash-thinking-exp-1219',
|
id: 'gemini-2.0-flash-thinking-exp-1219',
|
||||||
|
|||||||
Reference in New Issue
Block a user