Add OAI response API

This commit is contained in:
kwaroran
2025-03-20 12:08:34 +09:00
parent fff4ec74af
commit ad4f52239b
6 changed files with 226 additions and 1 deletions

View File

@@ -62,7 +62,8 @@ export enum LLMFormat{
Kobold,
Ollama,
Horde,
AWSBedrockClaude
AWSBedrockClaude,
OpenAIResponseAPI
}
export enum LLMTokenizer{
@@ -1391,6 +1392,21 @@ for(let model of LLMModels){
model.fullName ??= model.provider !== LLMProvider.AsIs ? `${ProviderNames.get(model.provider) ?? ''} ${model.name}`.trim() : model.name
}
for(let i=0; i<LLMModels.length; i++){
if(LLMModels[i].provider === LLMProvider.OpenAI && LLMModels[i].format === LLMFormat.OpenAICompatible){
LLMModels.push({
...LLMModels[i],
format: LLMFormat.OpenAIResponseAPI,
flags: [...LLMModels[i].flags, LLMFlags.hasPrefill],
id: `${LLMModels[i].id}-response-api`,
name: `${LLMModels[i].name} (Response API)`,
fullName: `${LLMModels[i].fullName ?? LLMModels[i].name} (Response API)`,
recommended: false
})
}
}
export function getModelInfo(id: string): LLMModel{
const db = getDatabase()