diff --git a/src/lib/Setting/Pages/BotSettings.svelte b/src/lib/Setting/Pages/BotSettings.svelte
index d5decb19..36431235 100644
--- a/src/lib/Setting/Pages/BotSettings.svelte
+++ b/src/lib/Setting/Pages/BotSettings.svelte
@@ -27,6 +27,7 @@
import PromptSettings from "./PromptSettings.svelte";
import { openPresetList } from "src/ts/stores.svelte";
import { selectSingleFile } from "src/ts/util";
+ import { LLMFormat } from "src/ts/model/modellist";
let tokens = $state({
mainPrompt: 0,
@@ -133,39 +134,27 @@
{language.proxyAPIKey}
{language.proxyRequestModel}
-
- None
- GPT 3.5
- GPT 3.5 16k
- GPT-4
- GPT-4o
- GPT-4 32k
- GPT-4 Turbo
- GPT-4 Turbo 1106
- GPT-4 Turbo 1106 Vision
- GPT-3.5 0301
- GPT-4 0301
- GPT-3.5 0613
- GPT-4 0613
- claude-2.1
- claude-2.0
- claude-2
- claude-v1.3
- claude-v1.3-100k
- claude-v1.2
- claude-instant-v1.1
- claude-instant-v1.1-100k
- claude-3-opus-20240229
- claude-3-sonnet-20240229
- claude-3-5-sonnet-20240620
- claude-3-5-sonnet-20241022
- Custom
+
+ {language.format}
+ {
+ DBState.db.customAPIFormat = parseInt(e.currentTarget.value)
+ }}>
+
+ OpenAI Compatible
+
+
+ Anthropic Claude
+
+
+ Mistral
+
+
+ Google Cloud
+
+
+ Cohere
+
- {#if DBState.db.proxyRequestModel === 'custom'}
-
- {:else}
-
- {/if}
{/if}
{#if DBState.db.aiModel.startsWith('risullm')}
Risu {language.apiKey}
diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts
index 0932b9c5..d9e88bf8 100644
--- a/src/ts/process/request.ts
+++ b/src/ts/process/request.ts
@@ -1,19 +1,16 @@
-import { get } from "svelte/store";
import type { MultiModal, OpenAIChat, OpenAIChatFull } from "./index.svelte";
import { getCurrentCharacter, getDatabase, type character } from "../storage/database.svelte";
import { pluginProcess } from "../plugins/plugins";
import { language } from "../../lang";
-import { stringlizeAINChat, stringlizeChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize";
+import { stringlizeAINChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize";
import { addFetchLog, fetchNative, globalFetch, isNodeServer, isTauri, textifyReadableStream } from "../globalApi.svelte";
import { sleep } from "../util";
import { NovelAIBadWordIds, stringlizeNAIChat } from "./models/nai";
import { strongBan, tokenize, tokenizeNum } from "../tokenizer";
-import { runGGUFModel } from "./models/local";
import { risuChatParser } from "../parser.svelte";
import { SignatureV4 } from "@smithy/signature-v4";
import { HttpRequest } from "@smithy/protocol-http";
import { Sha256 } from "@aws-crypto/sha256-js";
-import { v4 } from "uuid";
import { supportsInlayImage } from "./files/image";
import { Capacitor } from "@capacitor/core";
import { getFreeOpenRouterModel } from "../model/openrouter";
@@ -47,9 +44,9 @@ interface requestDataArgument{
interface RequestDataArgumentExtended extends requestDataArgument{
aiModel?:string
multiGen?:boolean
- realAIModel?:string
abortSignal?:AbortSignal
modelInfo?:LLMModel
+ customURL?:string
}
type requestDataResponse = {
@@ -199,19 +196,12 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
targ.biasString = arg.biasString ?? []
targ.aiModel = (model === 'model' ? db.aiModel : db.subModel)
targ.multiGen = ((db.genTime > 1 && targ.aiModel.startsWith('gpt') && (!arg.continue)) && (!arg.noMultiGen))
- targ.realAIModel = targ.aiModel
targ.abortSignal = abortSignal
targ.modelInfo = getModelInfo(targ.aiModel)
if(targ.aiModel === 'reverse_proxy'){
- if(db.proxyRequestModel === 'custom' && db.customProxyRequestModel.startsWith('claude')){
- targ.realAIModel = db.customProxyRequestModel
- }
- if(db.proxyRequestModel.startsWith('claude')){
- targ.realAIModel = db.proxyRequestModel
- }
- if(db.forceProxyAsOpenAI){
- targ.realAIModel = 'reverse_proxy'
- }
+ targ.modelInfo.internalID = db.customProxyRequestModel
+ targ.modelInfo.format = db.customAPIFormat
+ targ.customURL = db.forceReplaceUrl
}
const format = targ.modelInfo.format
@@ -384,7 +374,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise 0) ? logit_bias.join("<<|>>") : undefined,
logit_bias_values: (logit_bias_values.length > 0) ? logit_bias_values.join("|") : undefined,
};
- const response = await globalFetch(api_server_url + '/api', {
+ const response = await globalFetch(arg.customURL ?? api_server_url + '/api', {
method: 'POST',
headers: headers,
body: send_body,
@@ -1771,7 +1762,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise