Add custommodel

This commit is contained in:
Kwaroran
2025-03-23 15:21:31 +09:00
parent bcb8a28034
commit 3535c29c10
7 changed files with 174 additions and 11 deletions

View File

@@ -1112,4 +1112,5 @@ export const languageEnglish = {
fallbackModel: "Fallback Model", fallbackModel: "Fallback Model",
fallbackWhenBlankResponse: "Fallback When Blank Response", fallbackWhenBlankResponse: "Fallback When Blank Response",
doNotChangeFallbackModels: "Do Not Change Fallback Models on Preset Change", doNotChangeFallbackModels: "Do Not Change Fallback Models on Preset Change",
customModels: "Custom Models",
} }

View File

@@ -14,6 +14,8 @@
import { Capacitor } from "@capacitor/core"; import { Capacitor } from "@capacitor/core";
import { capStorageInvestigation } from "src/ts/storage/mobileStorage"; import { capStorageInvestigation } from "src/ts/storage/mobileStorage";
import Arcodion from "src/lib/UI/Arcodion.svelte"; import Arcodion from "src/lib/UI/Arcodion.svelte";
import { PlusIcon, TrashIcon } from "lucide-svelte";
import { v4 } from "uuid";
let estaStorage:{ let estaStorage:{
key:string, key:string,
@@ -254,6 +256,117 @@
{/each} {/each}
</Arcodion> </Arcodion>
{#snippet CustomFlagButton(index:number,name:string,flag:number)}
<Button className="mt-2" onclick={(e) => {
if(DBState.db.customModels[index].flags.includes(flag)){
DBState.db.customModels[index].flags = DBState.db.customModels[index].flags.filter((f) => f !== flag)
}
else{
DBState.db.customModels[index].flags.push(flag)
}
}} styled={DBState.db.customModels[index].flags.includes(flag) ? 'primary' : 'outlined'}>
{name}
</Button>
{/snippet}
<Arcodion styled name={language.customModels} className="overflow-x-auto">
{#each DBState.db.customModels as model, index}
<Arcodion styled name={model.name ?? "Unnamed"}>
<span class="text-textcolor">{language.name}</span>
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].name}/>
<span class="text-textcolor">{language.proxyRequestModel}</span>
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].internalId}/>
<span class="text-textcolor">URL</span>
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].url}/>
<span class="text-textcolor">{language.tokenizer}</span>
<SelectInput size={"sm"} value={DBState.db.customModels[index].tokenizer.toString()} onchange={(e) => {
DBState.db.customModels[index].tokenizer = parseInt(e.currentTarget.value)
}}>
<OptionInput value="0">tiktokenCl100kBase</OptionInput>
<OptionInput value="1">tiktokenO200Base</OptionInput>
<OptionInput value="2">Mistral</OptionInput>
<OptionInput value="3">Llama</OptionInput>
<OptionInput value="4">NovelAI</OptionInput>
<OptionInput value="5">Claude</OptionInput>
<OptionInput value="6">NovelList</OptionInput>
<OptionInput value="7">Llama3</OptionInput>
<OptionInput value="8">Gemma</OptionInput>
<OptionInput value="9">GoogleCloud</OptionInput>
<OptionInput value="10">Cohere</OptionInput>
<OptionInput value="12">DeepSeek</OptionInput>
</SelectInput>
<span class="text-textcolor">{language.format}</span>
<SelectInput size={"sm"} value={DBState.db.customModels[index].format.toString()} onchange={(e) => {
DBState.db.customModels[index].format = parseInt(e.currentTarget.value)
}}>
<OptionInput value="0">OpenAICompatible</OptionInput>
<OptionInput value="1">OpenAILegacyInstruct</OptionInput>
<OptionInput value="2">Anthropic</OptionInput>
<OptionInput value="3">AnthropicLegacy</OptionInput>
<OptionInput value="4">Mistral</OptionInput>
<OptionInput value="5">GoogleCloud</OptionInput>
<OptionInput value="6">VertexAIGemini</OptionInput>
<OptionInput value="7">NovelList</OptionInput>
<OptionInput value="8">Cohere</OptionInput>
<OptionInput value="9">NovelAI</OptionInput>
<OptionInput value="11">OobaLegacy</OptionInput>
<OptionInput value="13">Ooba</OptionInput>
<OptionInput value="14">Kobold</OptionInput>
<OptionInput value="17">AWSBedrockClaude</OptionInput>
<OptionInput value="18">OpenAIResponseAPI</OptionInput>
</SelectInput>
<span class="text-textcolor">{language.proxyAPIKey}</span>
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].key}/>
<span class="text-textcolor">{language.additionalParams}</span>
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].params}/>
<Arcodion styled name={language.flags}>
{@render CustomFlagButton(index,'hasImageInput', 0)}
{@render CustomFlagButton(index,'hasImageOutput', 1)}
{@render CustomFlagButton(index,'hasAudioInput', 2)}
{@render CustomFlagButton(index,'hasAudioOutput', 3)}
{@render CustomFlagButton(index,'hasPrefill', 4)}
{@render CustomFlagButton(index,'hasCache', 5)}
{@render CustomFlagButton(index,'hasFullSystemPrompt', 6)}
{@render CustomFlagButton(index,'hasFirstSystemPrompt', 7)}
{@render CustomFlagButton(index,'hasStreaming', 8)}
{@render CustomFlagButton(index,'requiresAlternateRole', 9)}
{@render CustomFlagButton(index,'mustStartWithUserInput', 10)}
{@render CustomFlagButton(index,'hasVideoInput', 12)}
{@render CustomFlagButton(index,'OAICompletionTokens', 13)}
{@render CustomFlagButton(index,'DeveloperRole', 14)}
{@render CustomFlagButton(index,'geminiThinking', 15)}
{@render CustomFlagButton(index,'geminiBlockOff', 16)}
{@render CustomFlagButton(index,'deepSeekPrefix', 17)}
{@render CustomFlagButton(index,'deepSeekThinkingInput', 18)}
{@render CustomFlagButton(index,'deepSeekThinkingOutput', 19)}
</Arcodion>
</Arcodion>
{/each}
<div class="flex items-center mt-4">
<Button onclick={() => {
DBState.db.customModels.push({
internalId: "",
url: "",
tokenizer: 0,
format: 0,
id: 'xcustom:::' + v4(),
key: "",
name: "",
params: "",
flags: [],
})
}}>
<PlusIcon />
</Button>
<Button onclick={() => {
DBState.db.customModels.pop()
}}>
<TrashIcon />
</Button>
</div>
</Arcodion>
<Button <Button
className="mt-4" className="mt-4"
onclick={async () => { onclick={async () => {

View File

@@ -9,6 +9,7 @@
help?: (keyof (typeof language.help))|''; help?: (keyof (typeof language.help))|'';
disabled?: boolean; disabled?: boolean;
children?: import('svelte').Snippet; children?: import('svelte').Snippet;
className?: string;
} }
let { let {
@@ -16,7 +17,8 @@
styled = false, styled = false,
help = '', help = '',
disabled = false, disabled = false,
children children,
className = ""
}: Props = $props(); }: Props = $props();
</script> </script>
{#if disabled} {#if disabled}
@@ -35,7 +37,7 @@
<Help key={help} /> <Help key={help} />
{/if}</button> {/if}</button>
{#if open} {#if open}
<div class="flex flex-col border border-selected p-2 rounded-b-md"> <div class={"flex flex-col border border-selected p-2 rounded-b-md " + className}>
{@render children?.()} {@render children?.()}
</div> </div>
{/if} {/if}

View File

@@ -74,6 +74,17 @@
{/await} {/await}
</Arcodion> </Arcodion>
{#if DBState?.db.customModels?.length > 0}
<Arcodion name={language.customModels}>
{#each DBState.db.customModels as model}
<button class="hover:bg-selected px-6 py-2 text-lg" onclick={() => {changeModel(model.id)}}>{model.name ?? "Unnamed"}</button>
{/each}
</Arcodion>
{/if}
{#if blankable} {#if blankable}
<button class="hover:bg-selected px-6 py-2 text-lg" onclick={() => {changeModel('')}}>{language.none}</button> <button class="hover:bg-selected px-6 py-2 text-lg" onclick={() => {changeModel('')}}>{language.none}</button>
{/if} {/if}

View File

@@ -1450,6 +1450,24 @@ export function getModelInfo(id: string): LLMModel{
tokenizer: LLMTokenizer.Unknown tokenizer: LLMTokenizer.Unknown
} }
} }
if(id.startsWith('xcustom:::')){
const customModels = db?.customModels || []
const found = customModels.find((model) => model.id === id)
if(found){
return {
id: found.id,
name: found.name,
shortName: found.name,
fullName: found.name,
internalID: found.internalId,
provider: LLMProvider.AsIs,
format: found.format,
flags: found.flags,
parameters: ['temperature', 'top_p', 'frequency_penalty', 'presence_penalty', 'repetition_penalty', 'min_p', 'top_a', 'top_k', 'thinking_tokens'],
tokenizer: found.tokenizer
}
}
}
return { return {
id, id,

View File

@@ -55,6 +55,7 @@ interface RequestDataArgumentExtended extends requestDataArgument{
modelInfo?:LLMModel modelInfo?:LLMModel
customURL?:string customURL?:string
mode?:ModelModeExtended mode?:ModelModeExtended
key?:string
} }
type requestDataResponse = { type requestDataResponse = {
@@ -515,6 +516,11 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
targ.modelInfo.format = db.customAPIFormat targ.modelInfo.format = db.customAPIFormat
targ.customURL = db.forceReplaceUrl targ.customURL = db.forceReplaceUrl
} }
if(targ.aiModel.startsWith('xcustom:::')){
const found = db.customModels.find(m => m.id === targ.aiModel)
targ.customURL = found?.url
targ.key = found?.key
}
if(db.seperateModelsForAxModels && !arg.staticModel){ if(db.seperateModelsForAxModels && !arg.staticModel){
if(db.seperateModels[model]){ if(db.seperateModels[model]){
@@ -775,7 +781,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
max_tokens: arg.maxTokens, max_tokens: arg.maxTokens,
}, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ), }, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
headers: { headers: {
"Authorization": "Bearer " + db.mistralKey, "Authorization": "Bearer " + (arg.key ?? db.mistralKey),
}, },
abortSignal: arg.abortSignal, abortSignal: arg.abortSignal,
chatId: arg.chatId chatId: arg.chatId
@@ -978,7 +984,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
} }
let headers = { let headers = {
"Authorization": "Bearer " + (aiModel === 'reverse_proxy' ? db.proxyKey : (aiModel === 'openrouter' ? db.openrouterKey : db.openAIKey)), "Authorization": "Bearer " + (arg.key ?? (aiModel === 'reverse_proxy' ? db.proxyKey : (aiModel === 'openrouter' ? db.openrouterKey : db.openAIKey))),
"Content-Type": "application/json" "Content-Type": "application/json"
} }
@@ -1375,7 +1381,7 @@ async function requestOpenAILegacyInstruct(arg:RequestDataArgumentExtended):Prom
}, },
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer " + db.openAIKey, "Authorization": "Bearer " + (arg.key ?? db.openAIKey)
}, },
chatId: arg.chatId chatId: arg.chatId
}); });
@@ -1511,7 +1517,7 @@ async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise
url: "https://api.openai.com/v1/responses", url: "https://api.openai.com/v1/responses",
body: body, body: body,
headers: { headers: {
"Authorization": "Bearer " + db.openAIKey, "Authorization": "Bearer " + (arg.key ?? db.openAIKey),
"Content-Type": "application/json" "Content-Type": "application/json"
} }
}) })
@@ -1526,7 +1532,7 @@ async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise
body: body, body: body,
headers: { headers: {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": "Bearer " + db.openAIKey, "Authorization": "Bearer " + (arg.key ?? db.openAIKey),
}, },
chatId: arg.chatId chatId: arg.chatId
}); });
@@ -1642,7 +1648,7 @@ async function requestNovelAI(arg:RequestDataArgumentExtended):Promise<requestDa
const da = await globalFetch(aiModel === 'novelai_kayra' ? "https://text.novelai.net/ai/generate" : "https://api.novelai.net/ai/generate", { const da = await globalFetch(aiModel === 'novelai_kayra' ? "https://text.novelai.net/ai/generate" : "https://api.novelai.net/ai/generate", {
body: body, body: body,
headers: { headers: {
"Authorization": "Bearer " + db.novelai.token "Authorization": "Bearer " + (arg.key ?? db.novelai.token)
}, },
abortSignal, abortSignal,
chatId: arg.chatId chatId: arg.chatId
@@ -2803,7 +2809,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
url: arg.customURL ?? 'https://api.cohere.com/v1/chat', url: arg.customURL ?? 'https://api.cohere.com/v1/chat',
body: body, body: body,
headers: { headers: {
"Authorization": "Bearer " + db.cohereAPIKey, "Authorization": "Bearer " + (arg.key ?? db.cohereAPIKey),
"Content-Type": "application/json" "Content-Type": "application/json"
} }
}) })
@@ -2813,7 +2819,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
const res = await globalFetch(arg.customURL ?? 'https://api.cohere.com/v1/chat', { const res = await globalFetch(arg.customURL ?? 'https://api.cohere.com/v1/chat', {
method: "POST", method: "POST",
headers: { headers: {
"Authorization": "Bearer " + db.cohereAPIKey, "Authorization": "Bearer " + (arg.key ?? db.cohereAPIKey),
"Content-Type": "application/json" "Content-Type": "application/json"
}, },
body: body body: body

View File

@@ -512,6 +512,7 @@ export function setDatabase(data:Database){
translate: data.fallbackModels.translate.filter((v) => v !== ''), translate: data.fallbackModels.translate.filter((v) => v !== ''),
otherAx: data.fallbackModels.otherAx.filter((v) => v !== '') otherAx: data.fallbackModels.otherAx.filter((v) => v !== '')
} }
data.customModels ??= []
changeLanguage(data.language) changeLanguage(data.language)
setDatabaseLite(data) setDatabaseLite(data)
} }
@@ -968,6 +969,17 @@ export interface Database{
} }
doNotChangeFallbackModels: boolean doNotChangeFallbackModels: boolean
fallbackWhenBlankResponse: boolean fallbackWhenBlankResponse: boolean
customModels: {
id: string
internalId: string
url: string
format: LLMFormat
tokenizer: LLMTokenizer
key: string
name: string
params: string
flags: LLMFlags[]
}[]
} }
interface SeparateParameters{ interface SeparateParameters{
@@ -1784,7 +1796,7 @@ import type { RisuModule } from '../process/modules';
import type { SerializableHypaV2Data } from '../process/memory/hypav2'; import type { SerializableHypaV2Data } from '../process/memory/hypav2';
import { decodeRPack, encodeRPack } from '../rpack/rpack_bg'; import { decodeRPack, encodeRPack } from '../rpack/rpack_bg';
import { DBState, selectedCharID } from '../stores.svelte'; import { DBState, selectedCharID } from '../stores.svelte';
import { LLMFlags, LLMFormat } from '../model/modellist'; import { LLMFlags, LLMFormat, LLMTokenizer } from '../model/modellist';
import type { Parameter } from '../process/request'; import type { Parameter } from '../process/request';
import type { HypaModel } from '../process/memory/hypamemory'; import type { HypaModel } from '../process/memory/hypamemory';
import type { SerializableHypaV3Data } from '../process/memory/hypav3'; import type { SerializableHypaV3Data } from '../process/memory/hypav3';