fix: remove geminiBlockOff flag from LLM models
This commit is contained in:
@@ -788,7 +788,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-exp-1121',
|
id: 'gemini-exp-1121',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud,
|
tokenizer: LLMTokenizer.GoogleCloud,
|
||||||
},
|
},
|
||||||
@@ -797,7 +797,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-exp-1206',
|
id: 'gemini-exp-1206',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
|
|||||||
Reference in New Issue
Block a user