Add GeminiBlockOff
This commit is contained in:
@@ -17,7 +17,8 @@ export enum LLMFlags{
|
||||
hasVideoInput,
|
||||
OAICompletionTokens,
|
||||
DeveloperRole,
|
||||
geminiThinking
|
||||
geminiThinking,
|
||||
geminiBlockOff
|
||||
}
|
||||
|
||||
export enum LLMProvider{
|
||||
@@ -787,7 +788,7 @@ export const LLMModels: LLMModel[] = [
|
||||
id: 'gemini-exp-1121',
|
||||
provider: LLMProvider.GoogleCloud,
|
||||
format: LLMFormat.GoogleCloud,
|
||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||
parameters: ['temperature', 'top_k', 'top_p'],
|
||||
tokenizer: LLMTokenizer.GoogleCloud,
|
||||
},
|
||||
@@ -796,7 +797,7 @@ export const LLMModels: LLMModel[] = [
|
||||
id: 'gemini-exp-1206',
|
||||
provider: LLMProvider.GoogleCloud,
|
||||
format: LLMFormat.GoogleCloud,
|
||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||
parameters: ['temperature', 'top_k', 'top_p'],
|
||||
tokenizer: LLMTokenizer.GoogleCloud
|
||||
},
|
||||
@@ -805,7 +806,7 @@ export const LLMModels: LLMModel[] = [
|
||||
id: 'gemini-2.0-flash-exp',
|
||||
provider: LLMProvider.GoogleCloud,
|
||||
format: LLMFormat.GoogleCloud,
|
||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||
flags: [LLMFlags.geminiBlockOff,LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
|
||||
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
|
||||
tokenizer: LLMTokenizer.GoogleCloud,
|
||||
recommended: true
|
||||
|
||||
@@ -1625,6 +1625,12 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
|
||||
}
|
||||
]
|
||||
|
||||
if(arg.modelInfo.flags.includes(LLMFlags.geminiBlockOff)){
|
||||
for(let i=0;i<uncensoredCatagory.length;i++){
|
||||
uncensoredCatagory[i].threshold = "OFF"
|
||||
}
|
||||
}
|
||||
|
||||
let para:Parameter[] = ['temperature', 'top_p', 'top_k', 'presence_penalty', 'frequency_penalty']
|
||||
|
||||
para = para.filter((v) => {
|
||||
|
||||
Reference in New Issue
Block a user