Add custommodel
This commit is contained in:
@@ -1450,6 +1450,24 @@ export function getModelInfo(id: string): LLMModel{
|
||||
tokenizer: LLMTokenizer.Unknown
|
||||
}
|
||||
}
|
||||
if(id.startsWith('xcustom:::')){
|
||||
const customModels = db?.customModels || []
|
||||
const found = customModels.find((model) => model.id === id)
|
||||
if(found){
|
||||
return {
|
||||
id: found.id,
|
||||
name: found.name,
|
||||
shortName: found.name,
|
||||
fullName: found.name,
|
||||
internalID: found.internalId,
|
||||
provider: LLMProvider.AsIs,
|
||||
format: found.format,
|
||||
flags: found.flags,
|
||||
parameters: ['temperature', 'top_p', 'frequency_penalty', 'presence_penalty', 'repetition_penalty', 'min_p', 'top_a', 'top_k', 'thinking_tokens'],
|
||||
tokenizer: found.tokenizer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id,
|
||||
|
||||
@@ -55,6 +55,7 @@ interface RequestDataArgumentExtended extends requestDataArgument{
|
||||
modelInfo?:LLMModel
|
||||
customURL?:string
|
||||
mode?:ModelModeExtended
|
||||
key?:string
|
||||
}
|
||||
|
||||
type requestDataResponse = {
|
||||
@@ -515,6 +516,11 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
|
||||
targ.modelInfo.format = db.customAPIFormat
|
||||
targ.customURL = db.forceReplaceUrl
|
||||
}
|
||||
if(targ.aiModel.startsWith('xcustom:::')){
|
||||
const found = db.customModels.find(m => m.id === targ.aiModel)
|
||||
targ.customURL = found?.url
|
||||
targ.key = found?.key
|
||||
}
|
||||
|
||||
if(db.seperateModelsForAxModels && !arg.staticModel){
|
||||
if(db.seperateModels[model]){
|
||||
@@ -775,7 +781,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
||||
max_tokens: arg.maxTokens,
|
||||
}, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
|
||||
headers: {
|
||||
"Authorization": "Bearer " + db.mistralKey,
|
||||
"Authorization": "Bearer " + (arg.key ?? db.mistralKey),
|
||||
},
|
||||
abortSignal: arg.abortSignal,
|
||||
chatId: arg.chatId
|
||||
@@ -978,7 +984,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
||||
}
|
||||
|
||||
let headers = {
|
||||
"Authorization": "Bearer " + (aiModel === 'reverse_proxy' ? db.proxyKey : (aiModel === 'openrouter' ? db.openrouterKey : db.openAIKey)),
|
||||
"Authorization": "Bearer " + (arg.key ?? (aiModel === 'reverse_proxy' ? db.proxyKey : (aiModel === 'openrouter' ? db.openrouterKey : db.openAIKey))),
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
@@ -1375,7 +1381,7 @@ async function requestOpenAILegacyInstruct(arg:RequestDataArgumentExtended):Prom
|
||||
},
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer " + db.openAIKey,
|
||||
"Authorization": "Bearer " + (arg.key ?? db.openAIKey)
|
||||
},
|
||||
chatId: arg.chatId
|
||||
});
|
||||
@@ -1511,7 +1517,7 @@ async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise
|
||||
url: "https://api.openai.com/v1/responses",
|
||||
body: body,
|
||||
headers: {
|
||||
"Authorization": "Bearer " + db.openAIKey,
|
||||
"Authorization": "Bearer " + (arg.key ?? db.openAIKey),
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
})
|
||||
@@ -1526,7 +1532,7 @@ async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise
|
||||
body: body,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": "Bearer " + db.openAIKey,
|
||||
"Authorization": "Bearer " + (arg.key ?? db.openAIKey),
|
||||
},
|
||||
chatId: arg.chatId
|
||||
});
|
||||
@@ -1642,7 +1648,7 @@ async function requestNovelAI(arg:RequestDataArgumentExtended):Promise<requestDa
|
||||
const da = await globalFetch(aiModel === 'novelai_kayra' ? "https://text.novelai.net/ai/generate" : "https://api.novelai.net/ai/generate", {
|
||||
body: body,
|
||||
headers: {
|
||||
"Authorization": "Bearer " + db.novelai.token
|
||||
"Authorization": "Bearer " + (arg.key ?? db.novelai.token)
|
||||
},
|
||||
abortSignal,
|
||||
chatId: arg.chatId
|
||||
@@ -2803,7 +2809,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
|
||||
url: arg.customURL ?? 'https://api.cohere.com/v1/chat',
|
||||
body: body,
|
||||
headers: {
|
||||
"Authorization": "Bearer " + db.cohereAPIKey,
|
||||
"Authorization": "Bearer " + (arg.key ?? db.cohereAPIKey),
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
})
|
||||
@@ -2813,7 +2819,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
|
||||
const res = await globalFetch(arg.customURL ?? 'https://api.cohere.com/v1/chat', {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": "Bearer " + db.cohereAPIKey,
|
||||
"Authorization": "Bearer " + (arg.key ?? db.cohereAPIKey),
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: body
|
||||
|
||||
@@ -512,6 +512,7 @@ export function setDatabase(data:Database){
|
||||
translate: data.fallbackModels.translate.filter((v) => v !== ''),
|
||||
otherAx: data.fallbackModels.otherAx.filter((v) => v !== '')
|
||||
}
|
||||
data.customModels ??= []
|
||||
changeLanguage(data.language)
|
||||
setDatabaseLite(data)
|
||||
}
|
||||
@@ -968,6 +969,17 @@ export interface Database{
|
||||
}
|
||||
doNotChangeFallbackModels: boolean
|
||||
fallbackWhenBlankResponse: boolean
|
||||
customModels: {
|
||||
id: string
|
||||
internalId: string
|
||||
url: string
|
||||
format: LLMFormat
|
||||
tokenizer: LLMTokenizer
|
||||
key: string
|
||||
name: string
|
||||
params: string
|
||||
flags: LLMFlags[]
|
||||
}[]
|
||||
}
|
||||
|
||||
interface SeparateParameters{
|
||||
@@ -1784,7 +1796,7 @@ import type { RisuModule } from '../process/modules';
|
||||
import type { SerializableHypaV2Data } from '../process/memory/hypav2';
|
||||
import { decodeRPack, encodeRPack } from '../rpack/rpack_bg';
|
||||
import { DBState, selectedCharID } from '../stores.svelte';
|
||||
import { LLMFlags, LLMFormat } from '../model/modellist';
|
||||
import { LLMFlags, LLMFormat, LLMTokenizer } from '../model/modellist';
|
||||
import type { Parameter } from '../process/request';
|
||||
import type { HypaModel } from '../process/memory/hypamemory';
|
||||
import type { SerializableHypaV3Data } from '../process/memory/hypav3';
|
||||
|
||||
Reference in New Issue
Block a user