From a999d6d78033cec6dd11f35e057841addce8edbf Mon Sep 17 00:00:00 2001 From: kwaroran Date: Wed, 24 May 2023 08:49:35 +0900 Subject: [PATCH] [feat] added horde support, added spec2 requirements that didn't implemented --- src/lib/Setting/Pages/BotSettings.svelte | 43 +++------ src/lib/SideBars/CharConfig.svelte | 20 ++--- src/lib/UI/ModelList.svelte | 35 ++++++++ src/ts/characterCards.ts | 17 ++-- src/ts/characters.ts | 10 ++- src/ts/database.ts | 5 +- src/ts/horde/getModels.ts | 34 ++++++++ src/ts/process/index.ts | 6 +- src/ts/process/request.ts | 106 ++++++++++++++++++++++- 9 files changed, 214 insertions(+), 62 deletions(-) create mode 100644 src/lib/UI/ModelList.svelte create mode 100644 src/ts/horde/getModels.ts diff --git a/src/lib/Setting/Pages/BotSettings.svelte b/src/lib/Setting/Pages/BotSettings.svelte index 6e78e188..b1798739 100644 --- a/src/lib/Setting/Pages/BotSettings.svelte +++ b/src/lib/Setting/Pages/BotSettings.svelte @@ -6,6 +6,7 @@ import { customProviderStore, getCurrentPluginMax } from "src/ts/process/plugins"; import { isTauri } from "src/ts/globalApi"; import { tokenize } from "src/ts/tokenizer"; + import ModelList from "src/lib/UI/ModelList.svelte"; import DropList from "src/lib/SideBars/DropList.svelte"; import { PlusIcon, TrashIcon } from "lucide-svelte"; let tokens = { @@ -36,40 +37,10 @@

{language.chatBot}

{language.model} - - + {language.submodel} - + + {#if $DataBase.aiModel === 'palm2' || $DataBase.subModel === 'palm2'} Palm2 {language.apiKey} @@ -96,6 +67,12 @@ NovelAI Bearer Token +{/if} + +{#if $DataBase.aiModel.startsWith("horde") || $DataBase.subModel.startsWith("horde") } + Horde {language.apiKey} + + {/if} {#if $DataBase.aiModel === 'textgen_webui' || $DataBase.subModel === 'textgen_webui'} TextGen {language.providerURL} diff --git a/src/lib/SideBars/CharConfig.svelte b/src/lib/SideBars/CharConfig.svelte index 9f3bf036..137ffeb2 100644 --- a/src/lib/SideBars/CharConfig.svelte +++ b/src/lib/SideBars/CharConfig.svelte @@ -16,7 +16,7 @@ import RegexData from "./RegexData.svelte"; import { exportChar } from "src/ts/characterCards"; import { getElevenTTSVoices, getWebSpeechTTSVoices } from "src/ts/process/tts"; - import { checkCharOrder } from "src/ts/globalApi"; + import { checkCharOrder } from "src/ts/globalApi"; let subMenu = 0 let subberMenu = 0 @@ -181,10 +181,10 @@ {language.firstMessage} {tokens.firstMsg} {language.tokens} - {language.authorNote} - - {tokens.charaNote} {language.tokens} - + {language.authorNote} + + {tokens.localNote} {language.tokens} + {:else} {language.character} @@ -213,11 +213,6 @@ - {language.chatNotes} - - {tokens.localNote} {language.tokens} - - {/if}
@@ -520,10 +515,7 @@ {language.systemPrompt} - {language.chatNotes} - - {tokens.localNote} {language.tokens} - + {#if currentChar.data.chats[currentChar.data.chatPage].supaMemoryData && currentChar.data.chats[currentChar.data.chatPage].supaMemoryData.length > 4} {language.SuperMemory} diff --git a/src/lib/UI/ModelList.svelte b/src/lib/UI/ModelList.svelte new file mode 100644 index 00000000..0d20bd3a --- /dev/null +++ b/src/lib/UI/ModelList.svelte @@ -0,0 +1,35 @@ + + +{#await getHordeModels()} + +{:then models} + +{/await} \ No newline at end of file diff --git a/src/ts/characterCards.ts b/src/ts/characterCards.ts index 30bf445a..c9e25b8a 100644 --- a/src/ts/characterCards.ts +++ b/src/ts/characterCards.ts @@ -203,7 +203,8 @@ function convertOldTavernAndJSON(charaData:OldTavernChar, imgp:string|undefined characterVersion: 0, personality: charaData.personality ?? '', scenario:charaData.scenario ?? '', - firstMsgIndex: -1 + firstMsgIndex: -1, + replaceGlobalNote: "" } } @@ -381,7 +382,7 @@ async function importSpecv2(card:CharacterCardV2, img?:Uint8Array):Promise // see field `selective`. ignored if selective == false constant?: boolean // if true, always inserted in the prompt (within budget limit) position?: 'before_char' | 'after_char' // whether the entry is placed before or after the character defs - + case_sensitive?:boolean } \ No newline at end of file diff --git a/src/ts/characters.ts b/src/ts/characters.ts index 83774b78..e78c3bc9 100644 --- a/src/ts/characters.ts +++ b/src/ts/characters.ts @@ -275,7 +275,6 @@ export function characterFormatUpdate(index:number|character){ cha.exampleMessage = cha.exampleMessage ?? '' cha.creatorNotes = cha.creatorNotes ?? '' cha.systemPrompt = cha.systemPrompt ?? '' - cha.postHistoryInstructions = cha.postHistoryInstructions ?? '' cha.tags = cha.tags ?? [] cha.creator = cha.creator ?? '' cha.characterVersion = cha.characterVersion ?? 0 @@ -288,6 +287,12 @@ export function characterFormatUpdate(index:number|character){ character_version: 0 } + if(cha.postHistoryInstructions){ + cha.chats[cha.chatPage].note += "\n" + cha.postHistoryInstructions + cha.chats[cha.chatPage].note = cha.chats[cha.chatPage].note.trim() + cha.postHistoryInstructions = null + } + } if(checkNullish(cha.customscript)){ cha.customscript = [] @@ -332,7 +337,8 @@ export function createBlankChar():character{ characterVersion: 0, personality:"", scenario:"", - firstMsgIndex: -1 + firstMsgIndex: -1, + replaceGlobalNote: "" } } diff --git a/src/ts/database.ts b/src/ts/database.ts index ba23de50..eac656cf 100644 --- a/src/ts/database.ts +++ b/src/ts/database.ts @@ -259,7 +259,9 @@ export interface loreBook{ mode: 'multiple'|'constant'|'normal', alwaysActive: boolean selective:boolean - extentions?:{} + extentions?:{ + risu_case_sensitive:boolean + } } export interface character{ @@ -303,6 +305,7 @@ export interface character{ supaMemory?:boolean additionalAssets?:[string, string][] ttsReadOnlyQuoted?:boolean + replaceGlobalNote:string } diff --git a/src/ts/horde/getModels.ts b/src/ts/horde/getModels.ts new file mode 100644 index 00000000..b2b4e6d8 --- /dev/null +++ b/src/ts/horde/getModels.ts @@ -0,0 +1,34 @@ +import { sleep } from "../util" + +let modelList:string[]|'loading' = null + +//until horde is ready +modelList = [] + +export async function getHordeModels():Promise { + + if(modelList === null){ + try { + modelList = 'loading' + const models = await fetch("https://stablehorde.net/api/v2/status/models?type=text") + modelList = ((await models.json()).map((a) => { + return a.name + }) as string[]) + return modelList + } catch (error) { + modelList = null + return [] + } + } + else if(modelList === 'loading'){ + while(true){ + if(modelList !== 'loading'){ + return getHordeModels() + } + await sleep(10) + } + } + else{ + return modelList + } +} \ No newline at end of file diff --git a/src/ts/process/index.ts b/src/ts/process/index.ts index 93d537e4..26c7b18b 100644 --- a/src/ts/process/index.ts +++ b/src/ts/process/index.ts @@ -105,7 +105,7 @@ export async function sendChat(chatProcessIndex = -1):Promise { } if(!currentChar.utilityBot){ - const mainp = currentChar.systemPrompt.length > 3 ? currentChar.systemPrompt : db.mainPrompt + const mainp = currentChar.systemPrompt || db.mainPrompt unformated.main.push({ role: 'system', @@ -121,14 +121,14 @@ export async function sendChat(chatProcessIndex = -1):Promise { unformated.globalNote.push({ role: 'system', - content: replacePlaceholders(db.globalNote, currentChar.name) + content: replacePlaceholders(currentChar.replaceGlobalNote || db.globalNote, currentChar.name) }) } if(currentChat.note !== ''){ unformated.authorNote.push({ role: 'system', - content: replacePlaceholders(currentChar.postHistoryInstructions, currentChat.note) + content: replacePlaceholders(currentChat.note, currentChar.name) }) } diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 438561a0..83dab435 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -5,6 +5,8 @@ import { pluginProcess } from "./plugins"; import { language } from "../../lang"; import { stringlizeChat, unstringlizeChat } from "./stringlize"; import { globalFetch, isTauri } from "../globalApi"; +import { alertError } from "../alert"; +import { sleep } from "../util"; interface requestDataArgument{ formated: OpenAIChat[] @@ -34,7 +36,7 @@ export async function requestChatData(arg:requestDataArgument, model:'model'|'su return da } trys += 1 - if(trys > db.requestRetrys){ + if(trys > db.requestRetrys || model.startsWith('horde')){ return da } } @@ -411,7 +413,107 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' } } } - default:{ + default:{ + if(aiModel.startsWith("horde:::")){ + const realModel = aiModel.split(":::")[1].trim() + + const workers = ((await (await fetch("https://stablehorde.net/api/v2/workers")).json()) as {id:string,models:string[]}[]).filter((a) => { + + if(a && a.models && a.id){ + console.log(a) + return a.models.includes(realModel) + } + return false + }).map((a) => { + return a.id + }) + + const argument = { + "prompt": "string", + "params": { + "n": 1, + "frmtadsnsp": false, + "frmtrmblln": false, + "frmtrmspch": false, + "frmttriminc": false, + "max_context_length": 200, + "max_length": 20, + "rep_pen": 3, + "rep_pen_range": 0, + "rep_pen_slope": 10, + "singleline": false, + "temperature": db.temperature / 25, + "tfs": 1, + "top_a": 1, + "top_k": 100, + "top_p": 1, + "typical": 1, + "sampler_order": [ + 0 + ] + }, + "trusted_workers": false, + "slow_workers": true, + "worker_blacklist": false, + "dry_run": false + } + + const da = await fetch("https://stablehorde.net/api/v2/generate/text/async", { + body: JSON.stringify(argument), + method: "POST", + headers: { + "content-type": "application/json", + "apikey": db.hordeConfig.apiKey + } + }) + + if(da.status !== 202){ + return { + type: "fail", + result: await da.text() + } + } + + const json:{ + id:string, + kudos:number, + message:string + } = await da.json() + + let warnMessage = "" + if(json.message && json.message.startsWith("Warning:")){ + warnMessage = "with " + json.message + } + + while(true){ + await sleep(1000) + const data = await (await fetch("https://stablehorde.net/api/v2/generate/text/status/" + json.id)).json() + if(!data.is_possible){ + fetch("https://stablehorde.net/api/v2/generate/text/status/" + json.id, { + method: "DELETE" + }) + return { + type: 'fail', + result: "Response not possible" + warnMessage + } + } + if(data.done){ + const generations:{text:string}[] = data.generations + if(generations && generations.length > 0){ + return { + type: "success", + result: generations[0].text + } + } + return { + type: 'fail', + result: "No Generations when done" + } + } + } + + + } return { type: 'fail', result: (language.errors.unknownModel)