fix: remove geminiBlockOff flag from LLM models

This commit is contained in:
Kwaroran
2025-01-07 00:14:58 +09:00
parent 708faeaea8
commit 64b9b89220

View File

@@ -788,7 +788,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-exp-1121',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud,
},
@@ -797,7 +797,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-exp-1206',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},