Add OAI response API
This commit is contained in:
@@ -62,7 +62,8 @@ export enum LLMFormat{
|
||||
Kobold,
|
||||
Ollama,
|
||||
Horde,
|
||||
AWSBedrockClaude
|
||||
AWSBedrockClaude,
|
||||
OpenAIResponseAPI
|
||||
}
|
||||
|
||||
export enum LLMTokenizer{
|
||||
@@ -1391,6 +1392,21 @@ for(let model of LLMModels){
|
||||
model.fullName ??= model.provider !== LLMProvider.AsIs ? `${ProviderNames.get(model.provider) ?? ''} ${model.name}`.trim() : model.name
|
||||
}
|
||||
|
||||
for(let i=0; i<LLMModels.length; i++){
|
||||
if(LLMModels[i].provider === LLMProvider.OpenAI && LLMModels[i].format === LLMFormat.OpenAICompatible){
|
||||
LLMModels.push({
|
||||
...LLMModels[i],
|
||||
format: LLMFormat.OpenAIResponseAPI,
|
||||
flags: [...LLMModels[i].flags, LLMFlags.hasPrefill],
|
||||
id: `${LLMModels[i].id}-response-api`,
|
||||
name: `${LLMModels[i].name} (Response API)`,
|
||||
fullName: `${LLMModels[i].fullName ?? LLMModels[i].name} (Response API)`,
|
||||
recommended: false
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function getModelInfo(id: string): LLMModel{
|
||||
|
||||
const db = getDatabase()
|
||||
|
||||
Reference in New Issue
Block a user