Merge branch 'main' of https://github.com/kwaroran/RisuAI
This commit is contained in:
@@ -29,7 +29,7 @@
|
|||||||
},
|
},
|
||||||
"productName": "RisuAI",
|
"productName": "RisuAI",
|
||||||
"mainBinaryName": "RisuAI",
|
"mainBinaryName": "RisuAI",
|
||||||
"version": "156.0.0",
|
"version": "158.1.0",
|
||||||
"identifier": "co.aiclient.risu",
|
"identifier": "co.aiclient.risu",
|
||||||
"plugins": {
|
"plugins": {
|
||||||
"updater": {
|
"updater": {
|
||||||
|
|||||||
@@ -22,7 +22,8 @@
|
|||||||
|
|
||||||
let didFirstSetup: boolean = $derived(DBState.db?.didFirstSetup)
|
let didFirstSetup: boolean = $derived(DBState.db?.didFirstSetup)
|
||||||
let gridOpen = $state(false)
|
let gridOpen = $state(false)
|
||||||
|
let aprilFools = $state(new Date().getMonth() === 3 && new Date().getDate() === 1)
|
||||||
|
let aprilFoolsPage = $state(0)
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<main class="flex bg-bg w-full h-full max-w-100vw text-textcolor" ondragover={(e) => {
|
<main class="flex bg-bg w-full h-full max-w-100vw text-textcolor" ondragover={(e) => {
|
||||||
@@ -39,7 +40,60 @@
|
|||||||
checkCharOrder()
|
checkCharOrder()
|
||||||
}
|
}
|
||||||
}}>
|
}}>
|
||||||
{#if !$loadedStore}
|
{#if aprilFools}
|
||||||
|
|
||||||
|
<div class="bg-[#212121] w-full h-screen min-h-screen text-black flex relative">
|
||||||
|
<div class="w-full max-w-3xl mx-auto py-8 px-4 flex justify-center items-center">
|
||||||
|
<!-- svelte-ignore a11y_no_static_element_interactions -->
|
||||||
|
<div class="flex flex-col w-full items-center text-[#bbbbbb]">
|
||||||
|
{#if aprilFoolsPage === 0}
|
||||||
|
<h1 class="text-3xl text-white font-bold mb-6">What can I help you?</h1>
|
||||||
|
<textarea class="resize-none w-full placeholder-[#bbbbbb] bg-[#303030] rounded-3xl h-[110px] py-4 px-6 mb-6" placeholder="Ask me" onkeydown={(e) => {
|
||||||
|
if(e.key === 'Enter'){
|
||||||
|
aprilFoolsPage = 1
|
||||||
|
}
|
||||||
|
}}></textarea>
|
||||||
|
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||||
|
<div class="flex gap-1.5" onclick={() => {
|
||||||
|
aprilFoolsPage = 1
|
||||||
|
}}>
|
||||||
|
<button class="rounded-full border border-[#bbbbbb15] px-4 py-2">
|
||||||
|
<span class="text-[#bbbbbb]">🔍</span>
|
||||||
|
Search
|
||||||
|
</button>
|
||||||
|
<button class="rounded-full border border-[#bbbbbb15] px-4 py-2">
|
||||||
|
<span class="text-[#bbbbbb]">🎮</span>
|
||||||
|
Games
|
||||||
|
</button>
|
||||||
|
<button class="rounded-full border border-[#bbbbbb15] px-4 py-2">
|
||||||
|
<span class="text-[#bbbbbb]">🎨</span>
|
||||||
|
Roleplay
|
||||||
|
</button>
|
||||||
|
<button class="rounded-full border border-[#bbbbbb15] px-4 py-2">
|
||||||
|
More
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<h1 class="text-3xl text-white font-bold mb-6">
|
||||||
|
Happy April Fools!
|
||||||
|
</h1>
|
||||||
|
<p class="text-[#bbbbbb] mb-6">
|
||||||
|
<!-- svelte-ignore a11y_missing_attribute -->
|
||||||
|
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||||
|
Go <a class="text-blue-500 cursor-pointer" onclick={() => {
|
||||||
|
aprilFoolsPage = 0
|
||||||
|
aprilFools = false
|
||||||
|
}}>
|
||||||
|
Back to normal
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<span class="absolute top-4 left-4 font-bold text-[#bbbbbb] text-md md:text-lg">RisyGTP-9</span>
|
||||||
|
</div>
|
||||||
|
{:else if !$loadedStore}
|
||||||
<div class="w-full h-full flex justify-center items-center text-textcolor text-xl bg-gray-900 flex-col">
|
<div class="w-full h-full flex justify-center items-center text-textcolor text-xl bg-gray-900 flex-col">
|
||||||
<div class="flex flex-row items-center">
|
<div class="flex flex-row items-center">
|
||||||
<svg class="animate-spin -ml-1 mr-3 h-5 w-5 text-textcolor" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
<svg class="animate-spin -ml-1 mr-3 h-5 w-5 text-textcolor" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
||||||
|
|||||||
@@ -1077,7 +1077,7 @@ export const languageEnglish = {
|
|||||||
playMessageOnTranslateEnd: "Play Audio on Translate Completion",
|
playMessageOnTranslateEnd: "Play Audio on Translate Completion",
|
||||||
seperateModelsForAxModels: "Seperate Models for Auxiliary Models",
|
seperateModelsForAxModels: "Seperate Models for Auxiliary Models",
|
||||||
axModelsDef: "Ax Models Definition",
|
axModelsDef: "Ax Models Definition",
|
||||||
doNotChangeSeperateModels: "Do Not Change Seperate Models",
|
doNotChangeSeperateModels: "Do Not Change Seperate Models on Preset Change",
|
||||||
tools: "Tools",
|
tools: "Tools",
|
||||||
action: "Action",
|
action: "Action",
|
||||||
hotkey: "Hotkey",
|
hotkey: "Hotkey",
|
||||||
@@ -1108,4 +1108,9 @@ export const languageEnglish = {
|
|||||||
focusInput: "Focus Input",
|
focusInput: "Focus Input",
|
||||||
},
|
},
|
||||||
screenTooSmall: "Screen is too small to show the interface.",
|
screenTooSmall: "Screen is too small to show the interface.",
|
||||||
|
advancedModelSettings: "Advanced Model Settings",
|
||||||
|
fallbackModel: "Fallback Model",
|
||||||
|
fallbackWhenBlankResponse: "Fallback When Blank Response",
|
||||||
|
doNotChangeFallbackModels: "Do Not Change Fallback Models on Preset Change",
|
||||||
|
customModels: "Custom Models",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,8 @@
|
|||||||
import { Capacitor } from "@capacitor/core";
|
import { Capacitor } from "@capacitor/core";
|
||||||
import { capStorageInvestigation } from "src/ts/storage/mobileStorage";
|
import { capStorageInvestigation } from "src/ts/storage/mobileStorage";
|
||||||
import Arcodion from "src/lib/UI/Arcodion.svelte";
|
import Arcodion from "src/lib/UI/Arcodion.svelte";
|
||||||
|
import { PlusIcon, TrashIcon } from "lucide-svelte";
|
||||||
|
import { v4 } from "uuid";
|
||||||
|
|
||||||
let estaStorage:{
|
let estaStorage:{
|
||||||
key:string,
|
key:string,
|
||||||
@@ -211,11 +213,6 @@
|
|||||||
<Help key="unrecommended" unrecommended/>
|
<Help key="unrecommended" unrecommended/>
|
||||||
</Check>
|
</Check>
|
||||||
</div>
|
</div>
|
||||||
<div class="flex items-center mt-4">
|
|
||||||
<Check bind:check={DBState.db.doNotChangeSeperateModels} name={language.doNotChangeSeperateModels}>
|
|
||||||
<Help key="unrecommended" unrecommended/>
|
|
||||||
</Check>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center mt-4">
|
<div class="flex items-center mt-4">
|
||||||
<Check bind:check={DBState.db.claudeRetrivalCaching} name={language.claudeCachingRetrival}>
|
<Check bind:check={DBState.db.claudeRetrivalCaching} name={language.claudeCachingRetrival}>
|
||||||
<Help key="unrecommended" unrecommended/>
|
<Help key="unrecommended" unrecommended/>
|
||||||
@@ -259,6 +256,117 @@
|
|||||||
{/each}
|
{/each}
|
||||||
</Arcodion>
|
</Arcodion>
|
||||||
|
|
||||||
|
{#snippet CustomFlagButton(index:number,name:string,flag:number)}
|
||||||
|
<Button className="mt-2" onclick={(e) => {
|
||||||
|
if(DBState.db.customModels[index].flags.includes(flag)){
|
||||||
|
DBState.db.customModels[index].flags = DBState.db.customModels[index].flags.filter((f) => f !== flag)
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
DBState.db.customModels[index].flags.push(flag)
|
||||||
|
}
|
||||||
|
}} styled={DBState.db.customModels[index].flags.includes(flag) ? 'primary' : 'outlined'}>
|
||||||
|
{name}
|
||||||
|
</Button>
|
||||||
|
{/snippet}
|
||||||
|
|
||||||
|
<Arcodion styled name={language.customModels} className="overflow-x-auto">
|
||||||
|
|
||||||
|
{#each DBState.db.customModels as model, index}
|
||||||
|
<Arcodion styled name={model.name ?? "Unnamed"}>
|
||||||
|
<span class="text-textcolor">{language.name}</span>
|
||||||
|
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].name}/>
|
||||||
|
<span class="text-textcolor">{language.proxyRequestModel}</span>
|
||||||
|
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].internalId}/>
|
||||||
|
<span class="text-textcolor">URL</span>
|
||||||
|
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].url}/>
|
||||||
|
<span class="text-textcolor">{language.tokenizer}</span>
|
||||||
|
<SelectInput size={"sm"} value={DBState.db.customModels[index].tokenizer.toString()} onchange={(e) => {
|
||||||
|
DBState.db.customModels[index].tokenizer = parseInt(e.currentTarget.value)
|
||||||
|
}}>
|
||||||
|
<OptionInput value="0">tiktokenCl100kBase</OptionInput>
|
||||||
|
<OptionInput value="1">tiktokenO200Base</OptionInput>
|
||||||
|
<OptionInput value="2">Mistral</OptionInput>
|
||||||
|
<OptionInput value="3">Llama</OptionInput>
|
||||||
|
<OptionInput value="4">NovelAI</OptionInput>
|
||||||
|
<OptionInput value="5">Claude</OptionInput>
|
||||||
|
<OptionInput value="6">NovelList</OptionInput>
|
||||||
|
<OptionInput value="7">Llama3</OptionInput>
|
||||||
|
<OptionInput value="8">Gemma</OptionInput>
|
||||||
|
<OptionInput value="9">GoogleCloud</OptionInput>
|
||||||
|
<OptionInput value="10">Cohere</OptionInput>
|
||||||
|
<OptionInput value="12">DeepSeek</OptionInput>
|
||||||
|
</SelectInput>
|
||||||
|
<span class="text-textcolor">{language.format}</span>
|
||||||
|
<SelectInput size={"sm"} value={DBState.db.customModels[index].format.toString()} onchange={(e) => {
|
||||||
|
DBState.db.customModels[index].format = parseInt(e.currentTarget.value)
|
||||||
|
}}>
|
||||||
|
<OptionInput value="0">OpenAICompatible</OptionInput>
|
||||||
|
<OptionInput value="1">OpenAILegacyInstruct</OptionInput>
|
||||||
|
<OptionInput value="2">Anthropic</OptionInput>
|
||||||
|
<OptionInput value="3">AnthropicLegacy</OptionInput>
|
||||||
|
<OptionInput value="4">Mistral</OptionInput>
|
||||||
|
<OptionInput value="5">GoogleCloud</OptionInput>
|
||||||
|
<OptionInput value="6">VertexAIGemini</OptionInput>
|
||||||
|
<OptionInput value="7">NovelList</OptionInput>
|
||||||
|
<OptionInput value="8">Cohere</OptionInput>
|
||||||
|
<OptionInput value="9">NovelAI</OptionInput>
|
||||||
|
<OptionInput value="11">OobaLegacy</OptionInput>
|
||||||
|
<OptionInput value="13">Ooba</OptionInput>
|
||||||
|
<OptionInput value="14">Kobold</OptionInput>
|
||||||
|
<OptionInput value="17">AWSBedrockClaude</OptionInput>
|
||||||
|
<OptionInput value="18">OpenAIResponseAPI</OptionInput>
|
||||||
|
</SelectInput>
|
||||||
|
<span class="text-textcolor">{language.proxyAPIKey}</span>
|
||||||
|
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].key}/>
|
||||||
|
<span class="text-textcolor">{language.additionalParams}</span>
|
||||||
|
<TextInput size={"sm"} bind:value={DBState.db.customModels[index].params}/>
|
||||||
|
<Arcodion styled name={language.flags}>
|
||||||
|
{@render CustomFlagButton(index,'hasImageInput', 0)}
|
||||||
|
{@render CustomFlagButton(index,'hasImageOutput', 1)}
|
||||||
|
{@render CustomFlagButton(index,'hasAudioInput', 2)}
|
||||||
|
{@render CustomFlagButton(index,'hasAudioOutput', 3)}
|
||||||
|
{@render CustomFlagButton(index,'hasPrefill', 4)}
|
||||||
|
{@render CustomFlagButton(index,'hasCache', 5)}
|
||||||
|
{@render CustomFlagButton(index,'hasFullSystemPrompt', 6)}
|
||||||
|
{@render CustomFlagButton(index,'hasFirstSystemPrompt', 7)}
|
||||||
|
{@render CustomFlagButton(index,'hasStreaming', 8)}
|
||||||
|
{@render CustomFlagButton(index,'requiresAlternateRole', 9)}
|
||||||
|
{@render CustomFlagButton(index,'mustStartWithUserInput', 10)}
|
||||||
|
{@render CustomFlagButton(index,'hasVideoInput', 12)}
|
||||||
|
{@render CustomFlagButton(index,'OAICompletionTokens', 13)}
|
||||||
|
{@render CustomFlagButton(index,'DeveloperRole', 14)}
|
||||||
|
{@render CustomFlagButton(index,'geminiThinking', 15)}
|
||||||
|
{@render CustomFlagButton(index,'geminiBlockOff', 16)}
|
||||||
|
{@render CustomFlagButton(index,'deepSeekPrefix', 17)}
|
||||||
|
{@render CustomFlagButton(index,'deepSeekThinkingInput', 18)}
|
||||||
|
{@render CustomFlagButton(index,'deepSeekThinkingOutput', 19)}
|
||||||
|
</Arcodion>
|
||||||
|
</Arcodion>
|
||||||
|
{/each}
|
||||||
|
<div class="flex items-center mt-4">
|
||||||
|
<Button onclick={() => {
|
||||||
|
DBState.db.customModels.push({
|
||||||
|
internalId: "",
|
||||||
|
url: "",
|
||||||
|
tokenizer: 0,
|
||||||
|
format: 0,
|
||||||
|
id: 'xcustom:::' + v4(),
|
||||||
|
key: "",
|
||||||
|
name: "",
|
||||||
|
params: "",
|
||||||
|
flags: [],
|
||||||
|
})
|
||||||
|
}}>
|
||||||
|
<PlusIcon />
|
||||||
|
</Button>
|
||||||
|
<Button onclick={() => {
|
||||||
|
DBState.db.customModels.pop()
|
||||||
|
}}>
|
||||||
|
<TrashIcon />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</Arcodion>
|
||||||
|
|
||||||
<Button
|
<Button
|
||||||
className="mt-4"
|
className="mt-4"
|
||||||
onclick={async () => {
|
onclick={async () => {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { ArrowLeft, PlusIcon } from "lucide-svelte";
|
import { ArrowLeft, PlusIcon, TrashIcon } from "lucide-svelte";
|
||||||
import { language } from "src/lang";
|
import { language } from "src/lang";
|
||||||
import PromptDataItem from "src/lib/UI/PromptDataItem.svelte";
|
import PromptDataItem from "src/lib/UI/PromptDataItem.svelte";
|
||||||
import { tokenizePreset, type PromptItem } from "src/ts/process/prompt";
|
import { tokenizePreset, type PromptItem } from "src/ts/process/prompt";
|
||||||
@@ -174,6 +174,7 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
{#if DBState.db.seperateModelsForAxModels}
|
{#if DBState.db.seperateModelsForAxModels}
|
||||||
|
<Check bind:check={DBState.db.doNotChangeSeperateModels} name={language.doNotChangeSeperateModels}></Check>
|
||||||
<Arcodion name={language.axModelsDef} styled>
|
<Arcodion name={language.axModelsDef} styled>
|
||||||
<span class="text-textcolor mt-4">
|
<span class="text-textcolor mt-4">
|
||||||
Memory
|
Memory
|
||||||
@@ -200,4 +201,46 @@
|
|||||||
</Arcodion>
|
</Arcodion>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
{#snippet fallbackModelList(arg:'model'|'memory'|'translate'|'emotion'|'otherAx')}
|
||||||
|
{#each DBState.db.fallbackModels[arg] as model, i}
|
||||||
|
<span class="text-textcolor mt-4">
|
||||||
|
{language.model} {i + 1}
|
||||||
|
</span>
|
||||||
|
<ModelList bind:value={DBState.db.fallbackModels[arg][i]} blankable />
|
||||||
|
{/each}
|
||||||
|
<div class="flex gap-2">
|
||||||
|
<button class="bg-selected text-white p-2 rounded-md" onclick={() => {
|
||||||
|
let value = DBState.db.fallbackModels[arg] ?? []
|
||||||
|
value.push('')
|
||||||
|
DBState.db.fallbackModels[arg] = value
|
||||||
|
}}><PlusIcon /></button>
|
||||||
|
<button class="bg-red-500 text-white p-2 rounded-md" onclick={() => {
|
||||||
|
let value = DBState.db.fallbackModels[arg] ?? []
|
||||||
|
value.pop()
|
||||||
|
DBState.db.fallbackModels[arg] = value
|
||||||
|
}}><TrashIcon /></button>
|
||||||
|
</div>
|
||||||
|
{/snippet}
|
||||||
|
|
||||||
|
<Arcodion name={language.fallbackModel} styled>
|
||||||
|
<Check bind:check={DBState.db.fallbackWhenBlankResponse} name={language.fallbackWhenBlankResponse} className="mt-4"/>
|
||||||
|
<Check bind:check={DBState.db.doNotChangeFallbackModels} name={language.doNotChangeFallbackModels} className="mt-4"/>
|
||||||
|
|
||||||
|
<Arcodion name={language.model} styled>
|
||||||
|
{@render fallbackModelList('model')}
|
||||||
|
</Arcodion>
|
||||||
|
<Arcodion name={"Memory"} styled>
|
||||||
|
{@render fallbackModelList('memory')}
|
||||||
|
</Arcodion>
|
||||||
|
<Arcodion name={"Translations"} styled>
|
||||||
|
{@render fallbackModelList('translate')}
|
||||||
|
</Arcodion>
|
||||||
|
<Arcodion name={"Emotion"} styled>
|
||||||
|
{@render fallbackModelList('emotion')}
|
||||||
|
</Arcodion>
|
||||||
|
<Arcodion name={"OtherAx"} styled>
|
||||||
|
{@render fallbackModelList('otherAx')}
|
||||||
|
</Arcodion>
|
||||||
|
</Arcodion>
|
||||||
|
|
||||||
{/if}
|
{/if}
|
||||||
@@ -9,6 +9,7 @@
|
|||||||
help?: (keyof (typeof language.help))|'';
|
help?: (keyof (typeof language.help))|'';
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
children?: import('svelte').Snippet;
|
children?: import('svelte').Snippet;
|
||||||
|
className?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
let {
|
let {
|
||||||
@@ -16,7 +17,8 @@
|
|||||||
styled = false,
|
styled = false,
|
||||||
help = '',
|
help = '',
|
||||||
disabled = false,
|
disabled = false,
|
||||||
children
|
children,
|
||||||
|
className = ""
|
||||||
}: Props = $props();
|
}: Props = $props();
|
||||||
</script>
|
</script>
|
||||||
{#if disabled}
|
{#if disabled}
|
||||||
@@ -35,7 +37,7 @@
|
|||||||
<Help key={help} />
|
<Help key={help} />
|
||||||
{/if}</button>
|
{/if}</button>
|
||||||
{#if open}
|
{#if open}
|
||||||
<div class="flex flex-col border border-selected p-2 rounded-b-md">
|
<div class={"flex flex-col border border-selected p-2 rounded-b-md " + className}>
|
||||||
{@render children?.()}
|
{@render children?.()}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|||||||
@@ -74,6 +74,17 @@
|
|||||||
{/await}
|
{/await}
|
||||||
</Arcodion>
|
</Arcodion>
|
||||||
|
|
||||||
|
{#if DBState?.db.customModels?.length > 0}
|
||||||
|
<Arcodion name={language.customModels}>
|
||||||
|
{#each DBState.db.customModels as model}
|
||||||
|
<button class="hover:bg-selected px-6 py-2 text-lg" onclick={() => {changeModel(model.id)}}>{model.name ?? "Unnamed"}</button>
|
||||||
|
{/each}
|
||||||
|
</Arcodion>
|
||||||
|
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
{#if blankable}
|
{#if blankable}
|
||||||
<button class="hover:bg-selected px-6 py-2 text-lg" onclick={() => {changeModel('')}}>{language.none}</button>
|
<button class="hover:bg-selected px-6 py-2 text-lg" onclick={() => {changeModel('')}}>{language.none}</button>
|
||||||
{/if}
|
{/if}
|
||||||
|
|||||||
@@ -44,6 +44,7 @@
|
|||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
|
||||||
<!-- svelte-ignore a11y_no_noninteractive_element_interactions -->
|
<!-- svelte-ignore a11y_no_noninteractive_element_interactions -->
|
||||||
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
<!-- svelte-ignore a11y_click_events_have_key_events -->
|
||||||
<h2 class="text-4xl text-textcolor mb-0 mt-6 font-black relative" class:text-bordered={specialDay === 'newYear'} onclick={onClick}>
|
<h2 class="text-4xl text-textcolor mb-0 mt-6 font-black relative" class:text-bordered={specialDay === 'newYear'} onclick={onClick}>
|
||||||
|
|||||||
@@ -1050,36 +1050,6 @@ export const LLMModels: LLMModel[] = [
|
|||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "Gemini Exp 1121",
|
|
||||||
id: 'gemini-exp-1121-vertex',
|
|
||||||
internalID: 'gemini-exp-1121',
|
|
||||||
provider: LLMProvider.VertexAI,
|
|
||||||
format: LLMFormat.VertexAIGemini,
|
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
|
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
|
||||||
tokenizer: LLMTokenizer.Gemma
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Gemini Pro 1.5",
|
|
||||||
id: 'gemini-1.5-pro-latest-vertex',
|
|
||||||
internalID: 'gemini-1.5-pro-latest',
|
|
||||||
provider: LLMProvider.VertexAI,
|
|
||||||
format: LLMFormat.VertexAIGemini,
|
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
|
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
|
||||||
tokenizer: LLMTokenizer.Gemma
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Gemini Flash 1.5",
|
|
||||||
id: 'gemini-1.5-flash-vertex',
|
|
||||||
internalID: 'gemini-1.5-flash',
|
|
||||||
provider: LLMProvider.VertexAI,
|
|
||||||
format: LLMFormat.VertexAIGemini,
|
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
|
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
|
||||||
tokenizer: LLMTokenizer.Gemma
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "Gemini Exp 1114",
|
name: "Gemini Exp 1114",
|
||||||
id: 'gemini-exp-1114',
|
id: 'gemini-exp-1114',
|
||||||
@@ -1405,6 +1375,17 @@ for(let i=0; i<LLMModels.length; i++){
|
|||||||
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if(LLMModels[i].provider === LLMProvider.GoogleCloud){
|
||||||
|
LLMModels.push({
|
||||||
|
...LLMModels[i],
|
||||||
|
id: `${LLMModels[i].id}-vertex`,
|
||||||
|
name: `${LLMModels[i].name} Vertex`,
|
||||||
|
fullName: `${LLMModels[i].fullName ?? LLMModels[i].name} Vertex`,
|
||||||
|
flags: [...LLMModels[i].flags],
|
||||||
|
recommended: false,
|
||||||
|
provider: LLMProvider.VertexAI
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getModelInfo(id: string): LLMModel{
|
export function getModelInfo(id: string): LLMModel{
|
||||||
@@ -1450,6 +1431,24 @@ export function getModelInfo(id: string): LLMModel{
|
|||||||
tokenizer: LLMTokenizer.Unknown
|
tokenizer: LLMTokenizer.Unknown
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(id.startsWith('xcustom:::')){
|
||||||
|
const customModels = db?.customModels || []
|
||||||
|
const found = customModels.find((model) => model.id === id)
|
||||||
|
if(found){
|
||||||
|
return {
|
||||||
|
id: found.id,
|
||||||
|
name: found.name,
|
||||||
|
shortName: found.name,
|
||||||
|
fullName: found.name,
|
||||||
|
internalID: found.internalId,
|
||||||
|
provider: LLMProvider.AsIs,
|
||||||
|
format: found.format,
|
||||||
|
flags: found.flags,
|
||||||
|
parameters: ['temperature', 'top_p', 'frequency_penalty', 'presence_penalty', 'repetition_penalty', 'min_p', 'top_a', 'top_k', 'thinking_tokens'],
|
||||||
|
tokenizer: found.tokenizer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id,
|
id,
|
||||||
|
|||||||
@@ -1310,6 +1310,12 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
previewBody: arg.previewPrompt
|
previewBody: arg.previewPrompt
|
||||||
}, 'model', abortSignal)
|
}, 'model', abortSignal)
|
||||||
|
|
||||||
|
console.log(req)
|
||||||
|
if(req.model){
|
||||||
|
generationInfo.model = getGenerationModelString(req.model)
|
||||||
|
console.log(generationInfo.model, req.model)
|
||||||
|
}
|
||||||
|
|
||||||
if(arg.previewPrompt && req.type === 'success'){
|
if(arg.previewPrompt && req.type === 'success'){
|
||||||
previewBody = req.result
|
previewBody = req.result
|
||||||
return true
|
return true
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import { getDatabase } from "src/ts/storage/database.svelte";
|
import { getDatabase } from "src/ts/storage/database.svelte";
|
||||||
|
|
||||||
export function getGenerationModelString(){
|
export function getGenerationModelString(name?:string){
|
||||||
const db = getDatabase()
|
const db = getDatabase()
|
||||||
switch (db.aiModel){
|
switch (name ?? db.aiModel){
|
||||||
case 'reverse_proxy':
|
case 'reverse_proxy':
|
||||||
return 'custom-' + (db.reverseProxyOobaMode ? 'ooba' : db.customProxyRequestModel)
|
return 'custom-' + (db.reverseProxyOobaMode ? 'ooba' : db.customProxyRequestModel)
|
||||||
case 'openrouter':
|
case 'openrouter':
|
||||||
return 'openrouter-' + db.openrouterRequestModel
|
return 'openrouter-' + db.openrouterRequestModel
|
||||||
default:
|
default:
|
||||||
return db.aiModel
|
return name ?? db.aiModel
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -45,6 +45,7 @@ interface requestDataArgument{
|
|||||||
extractJson?:string
|
extractJson?:string
|
||||||
imageResponse?:boolean
|
imageResponse?:boolean
|
||||||
previewBody?:boolean
|
previewBody?:boolean
|
||||||
|
staticModel?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
interface RequestDataArgumentExtended extends requestDataArgument{
|
interface RequestDataArgumentExtended extends requestDataArgument{
|
||||||
@@ -54,6 +55,7 @@ interface RequestDataArgumentExtended extends requestDataArgument{
|
|||||||
modelInfo?:LLMModel
|
modelInfo?:LLMModel
|
||||||
customURL?:string
|
customURL?:string
|
||||||
mode?:ModelModeExtended
|
mode?:ModelModeExtended
|
||||||
|
key?:string
|
||||||
}
|
}
|
||||||
|
|
||||||
type requestDataResponse = {
|
type requestDataResponse = {
|
||||||
@@ -64,18 +66,21 @@ type requestDataResponse = {
|
|||||||
emotion?: string
|
emotion?: string
|
||||||
},
|
},
|
||||||
failByServerError?: boolean
|
failByServerError?: boolean
|
||||||
|
model?: string
|
||||||
}|{
|
}|{
|
||||||
type: "streaming",
|
type: "streaming",
|
||||||
result: ReadableStream<StreamResponseChunk>,
|
result: ReadableStream<StreamResponseChunk>,
|
||||||
special?: {
|
special?: {
|
||||||
emotion?: string
|
emotion?: string
|
||||||
}
|
}
|
||||||
|
model?: string
|
||||||
}|{
|
}|{
|
||||||
type: "multiline",
|
type: "multiline",
|
||||||
result: ['user'|'char',string][],
|
result: ['user'|'char',string][],
|
||||||
special?: {
|
special?: {
|
||||||
emotion?: string
|
emotion?: string
|
||||||
}
|
}
|
||||||
|
model?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
interface StreamResponseChunk{[key:string]:string}
|
interface StreamResponseChunk{[key:string]:string}
|
||||||
@@ -264,87 +269,112 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
|
|||||||
|
|
||||||
export async function requestChatData(arg:requestDataArgument, model:ModelModeExtended, abortSignal:AbortSignal=null):Promise<requestDataResponse> {
|
export async function requestChatData(arg:requestDataArgument, model:ModelModeExtended, abortSignal:AbortSignal=null):Promise<requestDataResponse> {
|
||||||
const db = getDatabase()
|
const db = getDatabase()
|
||||||
let trys = 0
|
const fallBackModels:string[] = safeStructuredClone(db?.fallbackModels?.[model] ?? [])
|
||||||
while(true){
|
fallBackModels.push('')
|
||||||
|
|
||||||
if(pluginV2.replacerbeforeRequest.size > 0){
|
const originalFormated = safeStructuredClone(arg.formated)
|
||||||
for(const replacer of pluginV2.replacerbeforeRequest){
|
for(let fallbackIndex=0;fallbackIndex<fallBackModels.length;fallbackIndex++){
|
||||||
arg.formated = await replacer(arg.formated, model)
|
let trys = 0
|
||||||
}
|
arg.formated = safeStructuredClone(originalFormated)
|
||||||
|
|
||||||
|
if(fallbackIndex !== 0 && !fallBackModels[fallbackIndex]){
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
try{
|
while(true){
|
||||||
const currentChar = getCurrentCharacter()
|
|
||||||
if(currentChar?.type !== 'group'){
|
|
||||||
const perf = performance.now()
|
|
||||||
const d = await runTrigger(currentChar, 'request', {
|
|
||||||
chat: getCurrentChat(),
|
|
||||||
displayMode: true,
|
|
||||||
displayData: JSON.stringify(arg.formated)
|
|
||||||
})
|
|
||||||
|
|
||||||
const got = JSON.parse(d.displayData)
|
if(pluginV2.replacerbeforeRequest.size > 0){
|
||||||
if(!got || !Array.isArray(got)){
|
for(const replacer of pluginV2.replacerbeforeRequest){
|
||||||
throw new Error('Invalid return')
|
arg.formated = await replacer(arg.formated, model)
|
||||||
}
|
}
|
||||||
arg.formated = got
|
|
||||||
console.log('Trigger time', performance.now() - perf)
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
catch(e){
|
|
||||||
console.error(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
try{
|
||||||
|
const currentChar = getCurrentCharacter()
|
||||||
|
if(currentChar?.type !== 'group'){
|
||||||
|
const perf = performance.now()
|
||||||
|
const d = await runTrigger(currentChar, 'request', {
|
||||||
|
chat: getCurrentChat(),
|
||||||
|
displayMode: true,
|
||||||
|
displayData: JSON.stringify(arg.formated)
|
||||||
|
})
|
||||||
|
|
||||||
const da = await requestChatDataMain(arg, model, abortSignal)
|
const got = JSON.parse(d.displayData)
|
||||||
|
if(!got || !Array.isArray(got)){
|
||||||
if(da.type === 'success' && pluginV2.replacerafterRequest.size > 0){
|
throw new Error('Invalid return')
|
||||||
for(const replacer of pluginV2.replacerafterRequest){
|
|
||||||
da.result = await replacer(da.result, model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(da.type === 'success' && db.banCharacterset?.length > 0){
|
|
||||||
let failed = false
|
|
||||||
for(const set of db.banCharacterset){
|
|
||||||
console.log(set)
|
|
||||||
const checkRegex = new RegExp(`\\p{Script=${set}}`, 'gu')
|
|
||||||
|
|
||||||
if(checkRegex.test(da.result)){
|
|
||||||
trys += 1
|
|
||||||
if(trys > db.requestRetrys){
|
|
||||||
return {
|
|
||||||
type: 'fail',
|
|
||||||
result: 'Banned character found, retry limit reached'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
arg.formated = got
|
||||||
|
console.log('Trigger time', performance.now() - perf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch(e){
|
||||||
|
console.error(e)
|
||||||
|
}
|
||||||
|
|
||||||
failed = true
|
|
||||||
|
const da = await requestChatDataMain({
|
||||||
|
...arg,
|
||||||
|
staticModel: fallBackModels[fallbackIndex]
|
||||||
|
}, model, abortSignal)
|
||||||
|
|
||||||
|
if(da.type === 'success' && pluginV2.replacerafterRequest.size > 0){
|
||||||
|
for(const replacer of pluginV2.replacerafterRequest){
|
||||||
|
da.result = await replacer(da.result, model)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(da.type === 'success' && db.banCharacterset?.length > 0){
|
||||||
|
let failed = false
|
||||||
|
for(const set of db.banCharacterset){
|
||||||
|
console.log(set)
|
||||||
|
const checkRegex = new RegExp(`\\p{Script=${set}}`, 'gu')
|
||||||
|
|
||||||
|
if(checkRegex.test(da.result)){
|
||||||
|
trys += 1
|
||||||
|
failed = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(failed){
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(da.type === 'success' && fallbackIndex !== fallBackModels.length-1 && db.fallbackWhenBlankResponse){
|
||||||
|
if(da.result.trim() === ''){
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(failed){
|
if(da.type !== 'fail' || da.noRetry){
|
||||||
continue
|
return {
|
||||||
|
...da,
|
||||||
|
model: fallBackModels[fallbackIndex]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(da.failByServerError){
|
||||||
|
await sleep(1000)
|
||||||
|
if(db.antiServerOverloads){
|
||||||
|
trys -= 0.5 // reduce trys by 0.5, so that it will retry twice as much
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trys += 1
|
||||||
|
if(trys > db.requestRetrys){
|
||||||
|
if(fallbackIndex === fallBackModels.length-1 || da.model === 'custom'){
|
||||||
|
return da
|
||||||
|
}
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
if(da.type !== 'fail' || da.noRetry){
|
return {
|
||||||
return da
|
type: 'fail',
|
||||||
}
|
result: "All models failed"
|
||||||
|
|
||||||
if(da.failByServerError){
|
|
||||||
await sleep(1000)
|
|
||||||
if(db.antiServerOverloads){
|
|
||||||
trys -= 0.5 // reduce trys by 0.5, so that it will retry twice as much
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trys += 1
|
|
||||||
if(trys > db.requestRetrys){
|
|
||||||
return da
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -475,7 +505,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
|
|||||||
targ.useStreaming = db.useStreaming && arg.useStreaming
|
targ.useStreaming = db.useStreaming && arg.useStreaming
|
||||||
targ.continue = arg.continue ?? false
|
targ.continue = arg.continue ?? false
|
||||||
targ.biasString = arg.biasString ?? []
|
targ.biasString = arg.biasString ?? []
|
||||||
targ.aiModel = (model === 'model' ? db.aiModel : db.subModel)
|
targ.aiModel = arg.staticModel ? arg.staticModel : (model === 'model' ? db.aiModel : db.subModel)
|
||||||
targ.multiGen = ((db.genTime > 1 && targ.aiModel.startsWith('gpt') && (!arg.continue)) && (!arg.noMultiGen))
|
targ.multiGen = ((db.genTime > 1 && targ.aiModel.startsWith('gpt') && (!arg.continue)) && (!arg.noMultiGen))
|
||||||
targ.abortSignal = abortSignal
|
targ.abortSignal = abortSignal
|
||||||
targ.modelInfo = getModelInfo(targ.aiModel)
|
targ.modelInfo = getModelInfo(targ.aiModel)
|
||||||
@@ -486,8 +516,13 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
|
|||||||
targ.modelInfo.format = db.customAPIFormat
|
targ.modelInfo.format = db.customAPIFormat
|
||||||
targ.customURL = db.forceReplaceUrl
|
targ.customURL = db.forceReplaceUrl
|
||||||
}
|
}
|
||||||
|
if(targ.aiModel.startsWith('xcustom:::')){
|
||||||
|
const found = db.customModels.find(m => m.id === targ.aiModel)
|
||||||
|
targ.customURL = found?.url
|
||||||
|
targ.key = found?.key
|
||||||
|
}
|
||||||
|
|
||||||
if(db.seperateModelsForAxModels){
|
if(db.seperateModelsForAxModels && !arg.staticModel){
|
||||||
if(db.seperateModels[model]){
|
if(db.seperateModels[model]){
|
||||||
targ.aiModel = db.seperateModels[model]
|
targ.aiModel = db.seperateModels[model]
|
||||||
targ.modelInfo = getModelInfo(targ.aiModel)
|
targ.modelInfo = getModelInfo(targ.aiModel)
|
||||||
@@ -512,6 +547,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
|
|||||||
return requestPlugin(targ)
|
return requestPlugin(targ)
|
||||||
case LLMFormat.Ooba:
|
case LLMFormat.Ooba:
|
||||||
return requestOoba(targ)
|
return requestOoba(targ)
|
||||||
|
case LLMFormat.VertexAIGemini:
|
||||||
case LLMFormat.GoogleCloud:
|
case LLMFormat.GoogleCloud:
|
||||||
return requestGoogleCloudVertex(targ)
|
return requestGoogleCloudVertex(targ)
|
||||||
case LLMFormat.Kobold:
|
case LLMFormat.Kobold:
|
||||||
@@ -746,7 +782,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
max_tokens: arg.maxTokens,
|
max_tokens: arg.maxTokens,
|
||||||
}, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
|
}, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
|
||||||
headers: {
|
headers: {
|
||||||
"Authorization": "Bearer " + db.mistralKey,
|
"Authorization": "Bearer " + (arg.key ?? db.mistralKey),
|
||||||
},
|
},
|
||||||
abortSignal: arg.abortSignal,
|
abortSignal: arg.abortSignal,
|
||||||
chatId: arg.chatId
|
chatId: arg.chatId
|
||||||
@@ -919,7 +955,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
}
|
}
|
||||||
|
|
||||||
let replacerURL = aiModel === 'openrouter' ? "https://openrouter.ai/api/v1/chat/completions" :
|
let replacerURL = aiModel === 'openrouter' ? "https://openrouter.ai/api/v1/chat/completions" :
|
||||||
(aiModel === 'reverse_proxy') ? (arg.customURL) : ('https://api.openai.com/v1/chat/completions')
|
(arg.customURL) ?? ('https://api.openai.com/v1/chat/completions')
|
||||||
|
|
||||||
if(arg.modelInfo?.endpoint){
|
if(arg.modelInfo?.endpoint){
|
||||||
replacerURL = arg.modelInfo.endpoint
|
replacerURL = arg.modelInfo.endpoint
|
||||||
@@ -949,7 +985,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
}
|
}
|
||||||
|
|
||||||
let headers = {
|
let headers = {
|
||||||
"Authorization": "Bearer " + (aiModel === 'reverse_proxy' ? db.proxyKey : (aiModel === 'openrouter' ? db.openrouterKey : db.openAIKey)),
|
"Authorization": "Bearer " + (arg.key ?? (aiModel === 'reverse_proxy' ? db.proxyKey : (aiModel === 'openrouter' ? db.openrouterKey : db.openAIKey))),
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1135,8 +1171,23 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(aiModel === 'reverse_proxy'){
|
if(aiModel === 'reverse_proxy' || aiModel.startsWith('xcustom:::')){
|
||||||
const additionalParams = db.additionalParams
|
let additionalParams = aiModel === 'reverse_proxy' ? db.additionalParams : []
|
||||||
|
|
||||||
|
if(aiModel.startsWith('xcustom:::')){
|
||||||
|
const found = db.customModels.find(m => m.id === aiModel)
|
||||||
|
const params = found?.params
|
||||||
|
if(params){
|
||||||
|
const lines = params.split('\n')
|
||||||
|
for(const line of lines){
|
||||||
|
const split = line.split('=')
|
||||||
|
if(split.length >= 2){
|
||||||
|
additionalParams.push([split[0], split.slice(1).join('=')])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for(let i=0;i<additionalParams.length;i++){
|
for(let i=0;i<additionalParams.length;i++){
|
||||||
let key = additionalParams[i][0]
|
let key = additionalParams[i][0]
|
||||||
let value = additionalParams[i][1]
|
let value = additionalParams[i][1]
|
||||||
@@ -1346,7 +1397,7 @@ async function requestOpenAILegacyInstruct(arg:RequestDataArgumentExtended):Prom
|
|||||||
},
|
},
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": "Bearer " + db.openAIKey,
|
"Authorization": "Bearer " + (arg.key ?? db.openAIKey)
|
||||||
},
|
},
|
||||||
chatId: arg.chatId
|
chatId: arg.chatId
|
||||||
});
|
});
|
||||||
@@ -1482,7 +1533,7 @@ async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise
|
|||||||
url: "https://api.openai.com/v1/responses",
|
url: "https://api.openai.com/v1/responses",
|
||||||
body: body,
|
body: body,
|
||||||
headers: {
|
headers: {
|
||||||
"Authorization": "Bearer " + db.openAIKey,
|
"Authorization": "Bearer " + (arg.key ?? db.openAIKey),
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json"
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -1497,7 +1548,7 @@ async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise
|
|||||||
body: body,
|
body: body,
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": "Bearer " + db.openAIKey,
|
"Authorization": "Bearer " + (arg.key ?? db.openAIKey),
|
||||||
},
|
},
|
||||||
chatId: arg.chatId
|
chatId: arg.chatId
|
||||||
});
|
});
|
||||||
@@ -1613,7 +1664,7 @@ async function requestNovelAI(arg:RequestDataArgumentExtended):Promise<requestDa
|
|||||||
const da = await globalFetch(aiModel === 'novelai_kayra' ? "https://text.novelai.net/ai/generate" : "https://api.novelai.net/ai/generate", {
|
const da = await globalFetch(aiModel === 'novelai_kayra' ? "https://text.novelai.net/ai/generate" : "https://api.novelai.net/ai/generate", {
|
||||||
body: body,
|
body: body,
|
||||||
headers: {
|
headers: {
|
||||||
"Authorization": "Bearer " + db.novelai.token
|
"Authorization": "Bearer " + (arg.key ?? db.novelai.token)
|
||||||
},
|
},
|
||||||
abortSignal,
|
abortSignal,
|
||||||
chatId: arg.chatId
|
chatId: arg.chatId
|
||||||
@@ -1873,13 +1924,15 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
if(!d){
|
if(!d){
|
||||||
return {
|
return {
|
||||||
type: 'fail',
|
type: 'fail',
|
||||||
result: (language.errors.unknownModel)
|
result: (language.errors.unknownModel),
|
||||||
|
model: 'custom'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if(!d.success){
|
else if(!d.success){
|
||||||
return {
|
return {
|
||||||
type: 'fail',
|
type: 'fail',
|
||||||
result: d.content instanceof ReadableStream ? await (new Response(d.content)).text() : d.content
|
result: d.content instanceof ReadableStream ? await (new Response(d.content)).text() : d.content,
|
||||||
|
model: 'custom'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if(d.content instanceof ReadableStream){
|
else if(d.content instanceof ReadableStream){
|
||||||
@@ -1896,20 +1949,23 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
type: 'streaming',
|
type: 'streaming',
|
||||||
result: d.content.pipeThrough(piper)
|
result: d.content.pipeThrough(piper),
|
||||||
|
model: 'custom'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
return {
|
return {
|
||||||
type: 'success',
|
type: 'success',
|
||||||
result: d.content
|
result: d.content,
|
||||||
|
model: 'custom'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(error)
|
console.error(error)
|
||||||
return {
|
return {
|
||||||
type: 'fail',
|
type: 'fail',
|
||||||
result: `Plugin Error from ${db.currentPluginProvider}: ` + JSON.stringify(error)
|
result: `Plugin Error from ${db.currentPluginProvider}: ` + JSON.stringify(error),
|
||||||
|
model: 'custom'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2439,15 +2495,10 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
|
|||||||
const imgHTML = new Image()
|
const imgHTML = new Image()
|
||||||
const id = crypto.randomUUID()
|
const id = crypto.randomUUID()
|
||||||
imgHTML.src = `data:${part.inlineData.mimeType};base64,${part.inlineData.data}`
|
imgHTML.src = `data:${part.inlineData.mimeType};base64,${part.inlineData.data}`
|
||||||
console.log('decoding', part.inlineData.mimeType, part.inlineData.data, id)
|
|
||||||
console.log('writing')
|
|
||||||
await writeInlayImage(imgHTML, {
|
await writeInlayImage(imgHTML, {
|
||||||
id: id
|
id: id
|
||||||
})
|
})
|
||||||
console.log(JSON.stringify(rDatas))
|
|
||||||
rDatas[rDatas.length-1] += (`\n{{inlayeddata::${id}}}\n`)
|
rDatas[rDatas.length-1] += (`\n{{inlayeddata::${id}}}\n`)
|
||||||
console.log(JSON.stringify(rDatas))
|
|
||||||
console.log('done', id)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2774,7 +2825,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
url: arg.customURL ?? 'https://api.cohere.com/v1/chat',
|
url: arg.customURL ?? 'https://api.cohere.com/v1/chat',
|
||||||
body: body,
|
body: body,
|
||||||
headers: {
|
headers: {
|
||||||
"Authorization": "Bearer " + db.cohereAPIKey,
|
"Authorization": "Bearer " + (arg.key ?? db.cohereAPIKey),
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json"
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -2784,7 +2835,7 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
const res = await globalFetch(arg.customURL ?? 'https://api.cohere.com/v1/chat', {
|
const res = await globalFetch(arg.customURL ?? 'https://api.cohere.com/v1/chat', {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Authorization": "Bearer " + db.cohereAPIKey,
|
"Authorization": "Bearer " + (arg.key ?? db.cohereAPIKey),
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json"
|
||||||
},
|
},
|
||||||
body: body
|
body: body
|
||||||
@@ -2817,7 +2868,7 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
const db = getDatabase()
|
const db = getDatabase()
|
||||||
const aiModel = arg.aiModel
|
const aiModel = arg.aiModel
|
||||||
const useStreaming = arg.useStreaming
|
const useStreaming = arg.useStreaming
|
||||||
let replacerURL = (aiModel === 'reverse_proxy') ? (arg.customURL) : ('https://api.anthropic.com/v1/messages')
|
let replacerURL = arg.customURL ?? ('https://api.anthropic.com/v1/messages')
|
||||||
let apiKey = (aiModel === 'reverse_proxy') ? db.proxyKey : db.claudeAPIKey
|
let apiKey = (aiModel === 'reverse_proxy') ? db.proxyKey : db.claudeAPIKey
|
||||||
const maxTokens = arg.maxTokens
|
const maxTokens = arg.maxTokens
|
||||||
if(aiModel === 'reverse_proxy' && db.autofillRequestUrl){
|
if(aiModel === 'reverse_proxy' && db.autofillRequestUrl){
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme';
|
|||||||
import type { PromptItem, PromptSettings } from '../process/prompt';
|
import type { PromptItem, PromptSettings } from '../process/prompt';
|
||||||
import type { OobaChatCompletionRequestParams } from '../model/ooba';
|
import type { OobaChatCompletionRequestParams } from '../model/ooba';
|
||||||
|
|
||||||
export let appVer = "156.0.0"
|
export let appVer = "158.1.0"
|
||||||
export let webAppSubVer = ''
|
export let webAppSubVer = ''
|
||||||
|
|
||||||
|
|
||||||
@@ -498,6 +498,21 @@ export function setDatabase(data:Database){
|
|||||||
data.doNotChangeSeperateModels ??= false
|
data.doNotChangeSeperateModels ??= false
|
||||||
data.modelTools ??= []
|
data.modelTools ??= []
|
||||||
data.hotkeys ??= structuredClone(defaultHotkeys)
|
data.hotkeys ??= structuredClone(defaultHotkeys)
|
||||||
|
data.fallbackModels ??= {
|
||||||
|
memory: [],
|
||||||
|
emotion: [],
|
||||||
|
translate: [],
|
||||||
|
otherAx: [],
|
||||||
|
model: []
|
||||||
|
}
|
||||||
|
data.fallbackModels = {
|
||||||
|
model: data.fallbackModels.model.filter((v) => v !== ''),
|
||||||
|
memory: data.fallbackModels.memory.filter((v) => v !== ''),
|
||||||
|
emotion: data.fallbackModels.emotion.filter((v) => v !== ''),
|
||||||
|
translate: data.fallbackModels.translate.filter((v) => v !== ''),
|
||||||
|
otherAx: data.fallbackModels.otherAx.filter((v) => v !== '')
|
||||||
|
}
|
||||||
|
data.customModels ??= []
|
||||||
changeLanguage(data.language)
|
changeLanguage(data.language)
|
||||||
setDatabaseLite(data)
|
setDatabaseLite(data)
|
||||||
}
|
}
|
||||||
@@ -945,6 +960,26 @@ export interface Database{
|
|||||||
doNotChangeSeperateModels:boolean
|
doNotChangeSeperateModels:boolean
|
||||||
modelTools: string[]
|
modelTools: string[]
|
||||||
hotkeys:Hotkey[]
|
hotkeys:Hotkey[]
|
||||||
|
fallbackModels: {
|
||||||
|
memory: string[],
|
||||||
|
emotion: string[],
|
||||||
|
translate: string[],
|
||||||
|
otherAx: string[]
|
||||||
|
model: string[]
|
||||||
|
}
|
||||||
|
doNotChangeFallbackModels: boolean
|
||||||
|
fallbackWhenBlankResponse: boolean
|
||||||
|
customModels: {
|
||||||
|
id: string
|
||||||
|
internalId: string
|
||||||
|
url: string
|
||||||
|
format: LLMFormat
|
||||||
|
tokenizer: LLMTokenizer
|
||||||
|
key: string
|
||||||
|
name: string
|
||||||
|
params: string
|
||||||
|
flags: LLMFlags[]
|
||||||
|
}[]
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SeparateParameters{
|
interface SeparateParameters{
|
||||||
@@ -1288,6 +1323,14 @@ export interface botPreset{
|
|||||||
otherAx: string
|
otherAx: string
|
||||||
}
|
}
|
||||||
modelTools?:string[]
|
modelTools?:string[]
|
||||||
|
fallbackModels?: {
|
||||||
|
memory: string[],
|
||||||
|
emotion: string[],
|
||||||
|
translate: string[],
|
||||||
|
otherAx: string[]
|
||||||
|
model: string[]
|
||||||
|
}
|
||||||
|
fallbackWhenBlankResponse?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -1608,6 +1651,8 @@ export function saveCurrentPreset(){
|
|||||||
seperateModelsForAxModels: db.doNotChangeSeperateModels ? false : db.seperateModelsForAxModels ?? false,
|
seperateModelsForAxModels: db.doNotChangeSeperateModels ? false : db.seperateModelsForAxModels ?? false,
|
||||||
seperateModels: db.doNotChangeSeperateModels ? null : safeStructuredClone(db.seperateModels),
|
seperateModels: db.doNotChangeSeperateModels ? null : safeStructuredClone(db.seperateModels),
|
||||||
modelTools: safeStructuredClone(db.modelTools),
|
modelTools: safeStructuredClone(db.modelTools),
|
||||||
|
fallbackModels: safeStructuredClone(db.fallbackModels),
|
||||||
|
fallbackWhenBlankResponse: db.fallbackWhenBlankResponse ?? false,
|
||||||
}
|
}
|
||||||
db.botPresets = pres
|
db.botPresets = pres
|
||||||
setDatabase(db)
|
setDatabase(db)
|
||||||
@@ -1729,6 +1774,16 @@ export function setPreset(db:Database, newPres: botPreset){
|
|||||||
otherAx: ''
|
otherAx: ''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(!db.doNotChangeFallbackModels){
|
||||||
|
db.fallbackModels = safeStructuredClone(newPres.fallbackModels) ?? {
|
||||||
|
memory: [],
|
||||||
|
emotion: [],
|
||||||
|
translate: [],
|
||||||
|
otherAx: [],
|
||||||
|
model: []
|
||||||
|
}
|
||||||
|
db.fallbackWhenBlankResponse = newPres.fallbackWhenBlankResponse ?? false
|
||||||
|
}
|
||||||
db.modelTools = safeStructuredClone(newPres.modelTools ?? [])
|
db.modelTools = safeStructuredClone(newPres.modelTools ?? [])
|
||||||
|
|
||||||
return db
|
return db
|
||||||
@@ -1741,7 +1796,7 @@ import type { RisuModule } from '../process/modules';
|
|||||||
import type { SerializableHypaV2Data } from '../process/memory/hypav2';
|
import type { SerializableHypaV2Data } from '../process/memory/hypav2';
|
||||||
import { decodeRPack, encodeRPack } from '../rpack/rpack_bg';
|
import { decodeRPack, encodeRPack } from '../rpack/rpack_bg';
|
||||||
import { DBState, selectedCharID } from '../stores.svelte';
|
import { DBState, selectedCharID } from '../stores.svelte';
|
||||||
import { LLMFlags, LLMFormat } from '../model/modellist';
|
import { LLMFlags, LLMFormat, LLMTokenizer } from '../model/modellist';
|
||||||
import type { Parameter } from '../process/request';
|
import type { Parameter } from '../process/request';
|
||||||
import type { HypaModel } from '../process/memory/hypamemory';
|
import type { HypaModel } from '../process/memory/hypamemory';
|
||||||
import type { SerializableHypaV3Data } from '../process/memory/hypav3';
|
import type { SerializableHypaV3Data } from '../process/memory/hypav3';
|
||||||
|
|||||||
@@ -276,7 +276,7 @@ export async function translateHTML(html: string, reverse:boolean, charArg:simpl
|
|||||||
if(db.translatorType === 'llm'){
|
if(db.translatorType === 'llm'){
|
||||||
const tr = db.translator || 'en'
|
const tr = db.translator || 'en'
|
||||||
const from = db.translatorInputLanguage
|
const from = db.translatorInputLanguage
|
||||||
const r = translateLLM(html, {to: tr, from: from, regenerate})
|
const r = await translateLLM(html, {to: tr, from: from, regenerate})
|
||||||
if(db.playMessageOnTranslateEnd){
|
if(db.playMessageOnTranslateEnd){
|
||||||
const audio = new Audio(sendSound);
|
const audio = new Audio(sendSound);
|
||||||
audio.play();
|
audio.play();
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":"156.0.0"}
|
{"version":"158.1.0"}
|
||||||
Reference in New Issue
Block a user