diff --git a/package.json b/package.json index c1f94594..c280a80d 100644 --- a/package.json +++ b/package.json @@ -18,6 +18,7 @@ "dependencies": { "@adobe/css-tools": "4.3.2", "@aws-crypto/sha256-js": "^5.2.0", + "@breezystack/lamejs": "^1.2.7", "@capacitor/android": "^5.6.0", "@capacitor/core": "^5.6.0", "@capacitor/filesystem": "^5.2.0", diff --git a/plugins.md b/plugins.md new file mode 100644 index 00000000..9e194888 --- /dev/null +++ b/plugins.md @@ -0,0 +1,162 @@ + +# Plugins + +RisuAI uses a plugin system to allow for easy extension of the functionality. + +## Creating a Plugin + +A plugin is a js file with a header. for example: + +```js +//@name exampleplugin +//display-name: Example Plugin + +// Plugin code here +``` + +## Header Fields + +- `@name ` - The name of the plugin. This is used to identify the plugin. required. +- `@display-name ` - The display name of the plugin. This is used to display the plugin in the UI. +- `@arg ` Argument definition. This is used to define the arguments that the plugin takes. The type can be `int` or `string`. + +## API Reference + + +### `risuFetch(url: string, arg: GlobalFetchArgs = {}): Promise` + +> Note: `nativeFetch` is recommended for fetching URLs, as it has the same functionality as `risuFetch`, but with a similar API to `fetch` with more predictable behavior. + +Fetches a URL with a native API, which doesn't have CORS restrictions. + +#### Arguments + +- `url: string` - The URL to fetch. +- `arg: GlobalFetchArgs` - The fetch arguments. + - `body: string|Object` - The body to send with the request. if it's an object, it will be converted to JSON. + - `headers: Record` - The headers to send with the request. + - `method: string` - The method to use for the request `GET` and `POST` are supported. Default: `POST`. + - `abortSignal: AbortSignal` - The signal to use for aborting the request. + - `rawResponse: boolean` - If true, the response will be returned as Uint8Array. Default: `false`. + +#### Returns + +- `Promise` - The fetch result. + - `ok: boolean` - If the request was successful. + - `data: any` - The response data which is parsed JSON if possible. if `rawResponse` is true, it will be a Uint8Array. + - `headers: Record` - The response headers. + +### `nativeFetch(url: string, arg: NativeFetchArg = {}): Promise` + +Fetches a URL with the native API, which doesn't have CORS restrictions. this API is designed as a subset of [fetch api](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API), except it doesn't have CORS restrictions and default method is `POST`. + +#### Arguments + +- `url: string` - The URL to fetch. +- `arg: NativeFetchArg` - The fetch arguments. + - `body: string|Uint8Array|ArrayBuffer` - The body to send with the request. + - `headers: Record` - The headers to send with the request. + - `method: string` - The method to use for the request. `GET`, `POST`, `PUT`, `DELETE` are supported. Default: `POST`. + - `signal: AbortSignal` - The signal to use for aborting the request. + +#### Returns + +- `Promise` - The fetch result. + - `body: ReadableStream` - The response body. + - `headers: Headers` - The response headers. + - `status: number` - The response status. + - `json: () => Promise` - A function that returns a promise that resolves to the JSON representation of the response body. + - `text: () => Promise` - A function that returns a promise that resolves to the text representation of the response body. + - `arrayBuffer: () => Promise` - A function that returns a promise that resolves to the ArrayBuffer representation of the response body. + +### `getArg(name: string): string|number` + +Gets the argument value by name. + +#### Arguments + +- `name: string` - The argument name. must be format of `::` like `exampleplugin::arg1`. + +#### Returns + +- `string|number` - The argument value. + +### `getChar(): character` + +Gets the current character. + +### `setChar(char: character): void` + +Sets the current character. + +### `addProvider(type: string, func: (arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>): void` + +Adds a provider to the plugin. + +#### Arguments + +- `type: string` - The provider name. +- `func: (arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>` - The provider function. + - `arg: PluginV2ProviderArgument` - The provider argument. + - `prompt_chat: Chat[]` - The chat prompt. + - `frequency_penalty?: number` - The frequency penalty. + - `min_p?: number` - The minimum p value. + - `presence_penalty?: number` - The presence penalty. + - `repetition_penalty?: number` - The repetition penalty. + - `top_k?: number` - The top k value. + - `top_p?: number` - The top p value. + - `temperature?: number` - The temperature value. + - `mode: string` - The mode. one of `model`, `submodel`, `memory`, `emotion`, `otherAx`, `translate` + - `Promise<{success:boolean,content:string}>` - The provider result. + - `success: boolean` - If the provider was successful. + - `content: string` - The provider content. + +### `addRisuScriptHandler(type: string, func: (content:string) => string|null|undefined|Promise): void` + +Adds a risu script handler to the plugin. + +#### Arguments + +- `type: string` - The handler type. one of `display`, `output`, `input`, `process` +- `func: (content:string) => string|null|undefined|Promise` - The handler function. + - `content: string` - The content to handle. + - `string|null|undefined|Promise` - The handler result. if it is a string or string promise, the data will be replaced with the result. + +### `removeRisuScriptHandler(type: string, func: (content:string) => string|null|undefined|Promise): void` + +Removes a risu script handler from the plugin. + +### `addRisuReplacer(type: string, func: ReplacerFunction): void` + +Adds a risu replacer to the plugin. + +#### Arguments + +- `type: string` - The replacer type. one of `beforeRequest`, `afterRequest`. +- `func: ReplacerFunction` - The replacer function. vary depending on the type. + - If the type is `afterRequest`, the function should be `(content: string, mode:string) => string`. + - If the type is `beforeRequest`, the function should be `(content: Chat[], mode:string) => Chat[]`. + - mode is one of `model`, `submodel`, `memory`, `emotion`, `otherAx`, `translate`. + +### `removeRisuReplacer(type: string, func: ReplacerFunction): void` + +Removes a risu replacer from the plugin. + +### `onUnload(func: () => void): void` + +Adds an unload handler to the plugin. + + +## Migration from Plugin V1 + +The plugin system has been updated to V2. The following changes have been made: + - Now runs in same context as the main script rather than in a sandbox, making it accessible to the main script and DOM. + - Added `nativeFetch`, `addRisuScriptHandler`, `removeRisuScriptHandler`, `addRisuReplacer`, `removeRisuReplacer`, `onUnload` functions. + - `method`, `abortSignal`, `rawResponse` arguments has been added to `risuFetch`. + - `min_p`, `top_k`, `top_p`, `mode` arguments has been added to `addProvider`. + - `bias` argument has been removed from `addProvider`. however for compatibility, it still calls with empty array. + - Now plugin doesn't automatically terminates itself. you have to manually unload the plugin using `onUnload` function. + - `addCharaJs` function has been removed. use `addRisuScriptHandler` instead. + - `risuLog` function has been removed. use `console.log` instead. + - Many security restrictions have been removed. + - `@risu-name`, `@risu-display-name`, `@risu-arg` headers has been removed. use `@name`, `@display-name`, `@arg` instead. if it's not present, it will be ran as V1 plugin. \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8bb3a889..737a7b4c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,6 +14,9 @@ importers: '@aws-crypto/sha256-js': specifier: ^5.2.0 version: 5.2.0 + '@breezystack/lamejs': + specifier: ^1.2.7 + version: 1.2.7 '@capacitor/android': specifier: ^5.6.0 version: 5.6.0(@capacitor/core@5.6.0) @@ -361,6 +364,9 @@ packages: resolution: {integrity: sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==} engines: {node: '>=6.9.0'} + '@breezystack/lamejs@1.2.7': + resolution: {integrity: sha512-6wc7ck65ctA75Hq7FYHTtTvGnYs6msgdxiSUICQ+A01nVOWg6rqouZB8IdyteRlfpYYiFovkf67dIeOgWIUzTA==} + '@capacitor/android@5.6.0': resolution: {integrity: sha512-6O7xV6K6c8WvQzKxOe7fnhRyoVpS3TNDXy1FyfhvOvclBvu+1JddSdFvW4e4dSL60s2c00sCzNRgYhm+cn0/dQ==} peerDependencies: @@ -4061,6 +4067,8 @@ snapshots: chalk: 2.4.2 js-tokens: 4.0.0 + '@breezystack/lamejs@1.2.7': {} + '@capacitor/android@5.6.0(@capacitor/core@5.6.0)': dependencies: '@capacitor/core': 5.6.0 diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 291a28c6..323a3cf0 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -333,6 +333,7 @@ async fn streamed_fetch( headers: String, body: String, app: AppHandle, + method: String, ) -> String { //parse headers let headers_json: Value = match serde_json::from_str(&headers) { @@ -358,11 +359,50 @@ async fn streamed_fetch( } let client = reqwest::Client::new(); - let response = client + let builder: reqwest::RequestBuilder; + if method == "POST" { + + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + + builder = client .post(&url) .headers(headers) .timeout(Duration::from_secs(240)) - .body(body) + .body(body_decoded) + } + else if method == "GET" { + builder = client + .get(&url) + .headers(headers) + .timeout(Duration::from_secs(240)); + } + else if method == "PUT" { + + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + + builder = client + .put(&url) + .headers(headers) + .timeout(Duration::from_secs(240)) + .body(body_decoded) + } + else if method == "DELETE" { + + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + + builder = client + .delete(&url) + .headers(headers) + .timeout(Duration::from_secs(240)) + .body(body_decoded) + } + else { + return format!(r#"{{"success":false, body:"Invalid method"}}"#); + } + + + + let response = builder .send() .await; diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 3c36d6c7..0532b386 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -29,7 +29,7 @@ }, "productName": "RisuAI", "mainBinaryName": "RisuAI", - "version": "143.5.0", + "version": "144.1.0", "identifier": "co.aiclient.risu", "plugins": { "updater": { diff --git a/src/App.svelte b/src/App.svelte index 5f1d529c..b2a760df 100644 --- a/src/App.svelte +++ b/src/App.svelte @@ -36,7 +36,7 @@ await importCharacterProcess({ name: file.name, data: file - }) + }) checkCharOrder() } }}> diff --git a/src/lang/cn.ts b/src/lang/cn.ts index f24a5f20..008ddce3 100644 --- a/src/lang/cn.ts +++ b/src/lang/cn.ts @@ -298,7 +298,7 @@ export const languageChinese = { "singleView": "单角色模式", "SpacedView": "多角色模式", "emphasizedView": "双角色模式", - "pluginWarn": "插件可在隔离环境中运行,但安装恶意插件可能导致问题。", + "pluginWarn": "但安装恶意插件可能导致问题。", "createGroupImg": "产生群组头像", "waifuWidth": "角色对话框宽度", "savebackup": "备份至 Google", diff --git a/src/lang/de.ts b/src/lang/de.ts index 8cf4e451..8d0d8bc6 100644 --- a/src/lang/de.ts +++ b/src/lang/de.ts @@ -210,7 +210,7 @@ export const languageGerman = { singleView: "Einzelansicht", SpacedView: "Mehrere Charakteransicht", emphasizedView: "Doppelte Charakteransicht", - pluginWarn: "Plugins werden in einer isolierten Umgebung ausgeführten, aber das Installieren von Plugins unbekannter Herkunft könnte Probleme verursachen oder sogar schädlichen Code enthalten", + pluginWarn: "Installieren von Plugins unbekannter Herkunft könnte Probleme verursachen oder sogar schädlichen Code enthalten", createGroupImg: "Gruppenicon generieren", waifuWidth: "Breite des Waifu Chat-Bereichs", savebackup: "Erstellen und laden Sie ein Backup auf Google hoch", diff --git a/src/lang/en.ts b/src/lang/en.ts index ae61f590..e51b5b18 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -341,7 +341,7 @@ export const languageEnglish = { singleView: "Single View", SpacedView: "Multiple Character View", emphasizedView: "Double Character View", - pluginWarn: "Plugins run in an isolated environment, but installing malicious plugins can cause problems.", + pluginWarn: "Installing malicious plugins can cause problems.", createGroupImg: "Generate group icon", waifuWidth: "Waifu Chat Width", savebackup: "Save Backup to google", @@ -823,4 +823,14 @@ export const languageEnglish = { presetChain: "Preset Chain", legacyMediaFindings: "Legacy Media Findings", staticsDisclaimer: "The statistics are based on the data from after July 2024. the data may not be accurate.", + subtitles: "Subtitles", + subtitlesWarning1: "You must use model with audio/video input to use this feature.", + subtitlesWarning2: "You must use model with streaming feature to use this feature.", + reset: "Reset", + assetMaxDifference: "Asset Max Difference", + sourceLanguage: "Source Language", + destinationLanguage: "Destination Language", + noWebGPU: "Your Browser or OS doesn't support WebGPU. this will slow down the performance significantly.", + menuSideBar: "Menu Side Bar", + home: "Home", } \ No newline at end of file diff --git a/src/lang/es.ts b/src/lang/es.ts index 5d351e62..9d36fbcf 100644 --- a/src/lang/es.ts +++ b/src/lang/es.ts @@ -260,7 +260,7 @@ export const languageSpanish = { singleView: "Vista Única", SpacedView: "Vista de Múltiples Personajes", emphasizedView: "Vista de Personajes Doble", - pluginWarn: "Los plugins se ejecutan en un entorno aislado, pero instalar plugins maliciosos puede causar problemas.", + pluginWarn: "Instalar plugins maliciosos puede causar problemas.", createGroupImg: "Generar icono de grupo", waifuWidth: "Ancho del Chat Waifu", savebackup: "Guardar Respaldo en Google", diff --git a/src/lang/ko.ts b/src/lang/ko.ts index a9b055d6..cb8ae195 100644 --- a/src/lang/ko.ts +++ b/src/lang/ko.ts @@ -293,7 +293,7 @@ export const languageKorean = { "singleView": "싱글", "SpacedView": "멀티플", "emphasizedView": "더블", - "pluginWarn": "플러그인은 기본적으로 분리된 환경에서 실행되지만, 악성 플러그인 설치 시 문제가 생길 수 있습니다.", + "pluginWarn": "악성 플러그인 설치 시 문제가 생길 수 있습니다.", "createGroupImg": "그룹 아이콘 자동생성", "waifuWidth": "Waifu 채팅창 넓이", "savebackup": "구글 백업 저장", diff --git a/src/lang/vi.ts b/src/lang/vi.ts index 0b1bb5cf..6d7133a9 100644 --- a/src/lang/vi.ts +++ b/src/lang/vi.ts @@ -181,7 +181,7 @@ export const LanguageVietnamese = { "singleView": "Chế độ xem đơn", "SpacedView": "Xem nhiều ký tự", "emphasizedView": "Chế độ xem nhân vật đôi", - "pluginWarn": "Các plugin chạy trong môi trường biệt lập nhưng việc cài đặt các plugin độc hại có thể gây ra sự cố.", + "pluginWarn": "Các plugin có thể gây ra sự cố khi cài đặt các plugin độc hại.", "createGroupImg": "Tạo biểu tượng nhóm", "waifuWidth": "Chiều rộng trò chuyện Waifu", "savebackup": "Lưu Sao lưu vào google", diff --git a/src/lang/zh-Hant.ts b/src/lang/zh-Hant.ts index dd4811bb..5d074e7d 100644 --- a/src/lang/zh-Hant.ts +++ b/src/lang/zh-Hant.ts @@ -300,7 +300,7 @@ export const languageChineseTraditional = { "singleView": "單角色模式", "SpacedView": "多角色模式", "emphasizedView": "雙角色模式", - "pluginWarn": "外掛程式可在隔離環境中運行,但安裝惡意外掛可能導致問題。", + "pluginWarn": "但安裝惡意外掛可能導致問題。", "createGroupImg": "產生群組頭像", "waifuWidth": "角色對話框寬度", "savebackup": "備份至 Google", diff --git a/src/lib/Mobile/MobileCharacters.svelte b/src/lib/Mobile/MobileCharacters.svelte index e1c1981a..db35cc3c 100644 --- a/src/lib/Mobile/MobileCharacters.svelte +++ b/src/lib/Mobile/MobileCharacters.svelte @@ -7,6 +7,8 @@ import { MessageSquareIcon, PlusIcon } from "lucide-svelte"; const agoFormatter = new Intl.RelativeTimeFormat(navigator.languages, { style: 'short' }); + + let {gridMode = false, endGrid = () => {}} = $props(); function makeAgoText(time:number){ if(time === 0){ @@ -60,6 +62,7 @@ {#if char.name.toLocaleLowerCase().includes($MobileSearch.toLocaleLowerCase())} \ No newline at end of file +{#if gridMode} + +{/if} \ No newline at end of file diff --git a/src/lib/Others/GridCatalog.svelte b/src/lib/Others/GridCatalog.svelte index 01d1be54..a5a18311 100644 --- a/src/lib/Others/GridCatalog.svelte +++ b/src/lib/Others/GridCatalog.svelte @@ -10,13 +10,14 @@ import { language } from "src/lang"; import { parseMultilangString } from "src/ts/util"; import { checkCharOrder } from "src/ts/globalApi.svelte"; + import MobileCharacters from "../Mobile/MobileCharacters.svelte"; interface Props { endGrid?: any; } let { endGrid = () => {} }: Props = $props(); let search = $state('') - let selected = $state(0) + let selected = $state(3) function changeChar(index = -1){ characterFormatUpdate(index) @@ -57,22 +58,19 @@
-

- - Catalog -

- + - -
@@ -143,6 +141,8 @@
{/each} + {:else if selected === 3} + {/if}
\ No newline at end of file diff --git a/src/lib/Playground/PlaygroundImageGen.svelte b/src/lib/Playground/PlaygroundImageGen.svelte index 38f1e4bd..059256a8 100644 --- a/src/lib/Playground/PlaygroundImageGen.svelte +++ b/src/lib/Playground/PlaygroundImageGen.svelte @@ -1,34 +1,44 @@ -
-
- - Card - -
-
- -
- -

- Card Title -

- - -

- Lorem ipsum dolor sit amet consectetur adipisicing elit. Voluptates rerum quisquam, temporibus quasi distinctio magnam. -

- - -
- - - - - 5 min read - -
-
-
\ No newline at end of file + + +

{language.imageGeneration}

+ +Prompt + + +Neg. Prompt + + +{#if img} + Generated + Generated +{/if} + + \ No newline at end of file diff --git a/src/lib/Playground/PlaygroundImageTrans.svelte b/src/lib/Playground/PlaygroundImageTrans.svelte new file mode 100644 index 00000000..99a8152e --- /dev/null +++ b/src/lib/Playground/PlaygroundImageTrans.svelte @@ -0,0 +1,15 @@ + + + +{language.destinationLanguage} + + +{language.prompt} + diff --git a/src/lib/Playground/PlaygroundMenu.svelte b/src/lib/Playground/PlaygroundMenu.svelte index ac7756a3..3d007539 100644 --- a/src/lib/Playground/PlaygroundMenu.svelte +++ b/src/lib/Playground/PlaygroundMenu.svelte @@ -14,6 +14,7 @@ import PlaygroundParser from "./PlaygroundParser.svelte"; import ToolConvertion from "./ToolConvertion.svelte"; import { joinMultiuserRoom } from "src/ts/sync/multiuser"; + import PlaygroundSubtitle from "./PlaygroundSubtitle.svelte"; let easterEggTouch = $state(0) @@ -83,6 +84,11 @@ }}>

Parser

+ +{:else if vttB64 && fileB64} +
+
{outputText}
+
+{:else} +
{outputText}
+{/if} + +{#if vttB64 && fileB64} +
+ {#key vttB64} + + {/key} +
+ + {language.download} + + + + +{/if} \ No newline at end of file diff --git a/src/lib/Setting/Pages/AdvancedSettings.svelte b/src/lib/Setting/Pages/AdvancedSettings.svelte index b93d59ae..1cd0b6dd 100644 --- a/src/lib/Setting/Pages/AdvancedSettings.svelte +++ b/src/lib/Setting/Pages/AdvancedSettings.svelte @@ -51,6 +51,9 @@ {language.genTimes} +{language.assetMaxDifference} + + GPT Vision Quality Low diff --git a/src/lib/Setting/Pages/BotSettings.svelte b/src/lib/Setting/Pages/BotSettings.svelte index 7f04094a..6da2252b 100644 --- a/src/lib/Setting/Pages/BotSettings.svelte +++ b/src/lib/Setting/Pages/BotSettings.svelte @@ -271,7 +271,7 @@ {#if submenu === 1 || submenu === -1} {language.maxContextSize} - + {language.maxResponseSize} diff --git a/src/lib/Setting/Pages/DisplaySettings.svelte b/src/lib/Setting/Pages/DisplaySettings.svelte index 1d7ee9dd..2bdd38f1 100644 --- a/src/lib/Setting/Pages/DisplaySettings.svelte +++ b/src/lib/Setting/Pages/DisplaySettings.svelte @@ -363,6 +363,10 @@ +
+ +
+
{ let hasPermission = {state: 'denied'} diff --git a/src/lib/Setting/Pages/LanguageSettings.svelte b/src/lib/Setting/Pages/LanguageSettings.svelte index 7f313af1..171432bc 100644 --- a/src/lib/Setting/Pages/LanguageSettings.svelte +++ b/src/lib/Setting/Pages/LanguageSettings.svelte @@ -121,7 +121,7 @@ {/if} {#if DBState.db.translatorType === 'google'} - Translator Input Language + {language.sourceLanguage} Auto English diff --git a/src/lib/SideBars/Scripts/RegexData.svelte b/src/lib/SideBars/Scripts/RegexData.svelte index 5bff0328..9485360e 100644 --- a/src/lib/SideBars/Scripts/RegexData.svelte +++ b/src/lib/SideBars/Scripts/RegexData.svelte @@ -121,6 +121,7 @@ {language.editProcess} {language.editDisplay} {language.editTranslationDisplay} + {language.disabled} IN: diff --git a/src/lib/SideBars/Sidebar.svelte b/src/lib/SideBars/Sidebar.svelte index ac63849f..9d7dcbe6 100644 --- a/src/lib/SideBars/Sidebar.svelte +++ b/src/lib/SideBars/Sidebar.svelte @@ -23,6 +23,7 @@ FolderOpenIcon, HomeIcon, WrenchIcon, + User2Icon, } from "lucide-svelte"; import { addCharacter, @@ -292,7 +293,79 @@ return false } +{#if DBState.db.menuSideBar} +
+ + + + +
+{:else}
+{/if}
{ + if(e.ctrlKey && e.altKey && e.key === 'o'){ + opened = !opened + } + } + + onMount(() => { + document.addEventListener('keydown', EL) + }) + + onDestroy(() => { + document.removeEventListener('keydown', EL) + }) +
{ diff --git a/src/ts/characters.ts b/src/ts/characters.ts index 8c9f2b66..fd3c6c52 100644 --- a/src/ts/characters.ts +++ b/src/ts/characters.ts @@ -5,12 +5,13 @@ import { language } from "../lang"; import { checkNullish, findCharacterbyId, getUserName, selectMultipleFile, selectSingleFile, sleep } from "./util"; import { v4 as uuidv4 } from 'uuid'; import { MobileGUIStack, OpenRealmStore, selectedCharID } from "./stores.svelte"; -import { checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte"; +import { AppendableBuffer, checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte"; import { updateInlayScreen } from "./process/inlayScreen"; -import { parseMarkdownSafe } from "./parser.svelte"; +import { checkImageType, parseMarkdownSafe } from "./parser.svelte"; import { translateHTML } from "./translator/translator"; import { doingChat } from "./process/index.svelte"; import { importCharacter } from "./characterCards"; +import { PngChunk } from "./pngChunk"; export function createNewCharacter() { let db = getDatabase() @@ -81,6 +82,41 @@ export async function selectCharImg(charIndex:number) { } const img = selected.data let db = getDatabase() + + const type = checkImageType(img) + console.log(type) + + try { + if(type === 'PNG' && db.characters[charIndex].type === 'character'){ + const gen = PngChunk.readGenerator(img) + const allowedChunk = [ + 'parameters', 'Comment', 'Title', 'Description', 'Author', 'Software', 'Source', 'Disclaimer', 'Warning', 'Copyright', + ] + for await (const chunk of gen){ + if(chunk instanceof AppendableBuffer){ + continue + } + if(!chunk){ + continue + } + if(chunk.value.length > 20_000){ + continue + } + if(allowedChunk.includes(chunk.key)){ + console.log(chunk.key, chunk.value) + db.characters[charIndex].extentions ??= {} + db.characters[charIndex].extentions.pngExif ??= {} + db.characters[charIndex].extentions.pngExif[chunk.key] = chunk.value + } + } + console.log(db.characters[charIndex].extentions) + } + } catch (error) { + console.error(error) + } + + + const imgp = await saveImage(img) dumpCharImage(charIndex) db.characters[charIndex].image = imgp diff --git a/src/ts/globalApi.svelte.ts b/src/ts/globalApi.svelte.ts index 1e971a5e..225db6c9 100644 --- a/src/ts/globalApi.svelte.ts +++ b/src/ts/globalApi.svelte.ts @@ -1809,24 +1809,86 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream; headers: Headers; status: number }> { +}):Promise<{ + body: ReadableStream; + headers: Headers; + status: number; + json: () => Promise; + text: () => Promise; + arrayBuffer: () => Promise; +}> { + + const jsonizer = (body:ReadableStream) => { + return async () => { + const text = await textifyReadableStream(body) + return JSON.parse(text) + } + } + const textizer = (body:ReadableStream) => { + return async () => { + const text = await textifyReadableStream(body) + return text + } + } + const arrayBufferizer = (body:ReadableStream) => { + return async () => { + const chunks:Uint8Array[] = [] + const reader = body.getReader() + while(true){ + const {done, value} = await reader.read() + if(done){ + break + } + chunks.push(value) + } + const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0) + const arrayBuffer = new Uint8Array(totalLength) + let offset = 0 + for(const chunk of chunks){ + arrayBuffer.set(chunk, offset) + offset += chunk.length + } + return arrayBuffer.buffer + } + } + + arg.method = arg.method ?? 'POST' + let headers = arg.headers ?? {} + let realBody:Uint8Array + + if(arg.method === 'GET' || arg.method === 'DELETE'){ + realBody = new Uint8Array(0) + } + if(typeof arg.body === 'string'){ + realBody = new TextEncoder().encode(arg.body) + } + else if(arg.body instanceof Uint8Array){ + realBody = arg.body + } + else if(arg.body instanceof ArrayBuffer){ + realBody = new Uint8Array(arg.body) + } + else{ + throw new Error('Invalid body type') + } + const db = getDatabase() let throughProxy = (!isTauri) && (!isNodeServer) && (!db.usePlainFetch) let fetchLogIndex = addFetchLog({ - body: arg.body, + body: new TextDecoder().decode(realBody), headers: arg.headers, response: 'Streamed Fetch', success: true, url: url, resType: 'stream', - chatId: arg.chatId + chatId: arg.chatId, }) if(isTauri){ fetchIndex++ @@ -1849,7 +1911,8 @@ export async function fetchNative(url:string, arg:{ id: fetchId, url: url, headers: JSON.stringify(headers), - body: arg.body, + body: Buffer.from(realBody).toString('base64'), + method: arg.method }).then((res) => { try { const parsedRes = JSON.parse(res as string) @@ -1868,7 +1931,7 @@ export async function fetchNative(url:string, arg:{ id: fetchId, url: url, headers: headers, - body: Buffer.from(arg.body).toString('base64'), + body: Buffer.from(realBody).toString('base64'), }).then((res) => { if(!res.success){ error = res.error @@ -1918,14 +1981,17 @@ export async function fetchNative(url:string, arg:{ return { body: readableStream, headers: new Headers(resHeaders), - status: status + status: status, + json: jsonizer(readableStream), + text: textizer(readableStream), + arrayBuffer: arrayBufferizer(readableStream) } } else if(throughProxy){ const r = await fetch(hubURL + `/proxy2`, { - body: arg.body, + body: realBody, headers: arg.useRisuTk ? { "risu-header": encodeURIComponent(JSON.stringify(headers)), "risu-url": encodeURIComponent(url), @@ -1936,22 +2002,25 @@ export async function fetchNative(url:string, arg:{ "risu-url": encodeURIComponent(url), "Content-Type": "application/json" }, - method: "POST", + method: arg.method, signal: arg.signal }) return { body: pipeFetchLog(fetchLogIndex, r.body), headers: r.headers, - status: r.status + status: r.status, + json: jsonizer(r.body), + text: textizer(r.body), + arrayBuffer: arrayBufferizer(r.body) } } else{ return await fetch(url, { - body: arg.body, + body: realBody, headers: headers, method: arg.method, - signal: arg.signal + signal: arg.signal, }) } } diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index 680ac909..30d4425f 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -14,7 +14,10 @@ export enum LLMFlags{ requiresAlternateRole, mustStartWithUserInput, poolSupported, - hasVideoInput + hasVideoInput, + OAICompletionTokens, + DeveloperRole, + geminiThinking } export enum LLMProvider{ @@ -409,7 +412,7 @@ export const LLMModels: LLMModel[] = [ flags: [ LLMFlags.hasImageInput, LLMFlags.hasFullSystemPrompt, - LLMFlags.hasStreaming + LLMFlags.hasStreaming, ], parameters: OpenAIParameters, tokenizer: LLMTokenizer.tiktokenO200Base @@ -421,8 +424,8 @@ export const LLMModels: LLMModel[] = [ provider: LLMProvider.OpenAI, format: LLMFormat.OpenAICompatible, flags: [ - LLMFlags.hasFullSystemPrompt, - LLMFlags.hasStreaming + LLMFlags.hasStreaming, + LLMFlags.OAICompletionTokens ], parameters: OpenAIParameters, tokenizer: LLMTokenizer.tiktokenO200Base @@ -434,8 +437,24 @@ export const LLMModels: LLMModel[] = [ provider: LLMProvider.OpenAI, format: LLMFormat.OpenAICompatible, flags: [ + LLMFlags.hasStreaming, + LLMFlags.OAICompletionTokens + ], + parameters: OpenAIParameters, + tokenizer: LLMTokenizer.tiktokenO200Base + }, + { + id: 'o1', + internalID: 'o1', + name: 'o1', + provider: LLMProvider.OpenAI, + format: LLMFormat.OpenAICompatible, + flags: [ + LLMFlags.hasStreaming, + LLMFlags.OAICompletionTokens, LLMFlags.hasFullSystemPrompt, - LLMFlags.hasStreaming + LLMFlags.hasImageInput, + LLMFlags.DeveloperRole ], parameters: OpenAIParameters, tokenizer: LLMTokenizer.tiktokenO200Base @@ -759,7 +778,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-pro-exp-0827', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -768,7 +787,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-exp-1121', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud, }, @@ -777,7 +796,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-exp-1206', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -786,7 +805,17 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-2.0-flash-exp', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], + parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], + tokenizer: LLMTokenizer.GoogleCloud, + recommended: true + }, + { + name: "Gemini Flash 2.0 Thinking 1219", + id: 'gemini-2.0-flash-thinking-exp-1219', + provider: LLMProvider.GoogleCloud, + format: LLMFormat.GoogleCloud, + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.geminiThinking, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], tokenizer: LLMTokenizer.GoogleCloud, recommended: true @@ -796,7 +825,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-pro-latest', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], recommended: true, parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud @@ -806,7 +835,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-flash', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], recommended: true, parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud @@ -817,7 +846,7 @@ export const LLMModels: LLMModel[] = [ internalID: 'gemini-exp-1121', provider: LLMProvider.VertexAI, format: LLMFormat.VertexAIGemini, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.Gemma }, @@ -827,7 +856,7 @@ export const LLMModels: LLMModel[] = [ internalID: 'gemini-1.5-pro-latest', provider: LLMProvider.VertexAI, format: LLMFormat.VertexAIGemini, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.Gemma }, @@ -837,7 +866,7 @@ export const LLMModels: LLMModel[] = [ internalID: 'gemini-1.5-flash', provider: LLMProvider.VertexAI, format: LLMFormat.VertexAIGemini, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.Gemma }, @@ -846,7 +875,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-exp-1114', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -855,7 +884,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-pro-002', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -864,7 +893,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-flash-002', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -873,7 +902,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-pro', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -882,7 +911,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-pro-vision', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -891,7 +920,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-ultra', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -900,7 +929,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-ultra-vision', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, diff --git a/src/ts/observer.ts b/src/ts/observer.ts index a40f870c..b868cb9d 100644 --- a/src/ts/observer.ts +++ b/src/ts/observer.ts @@ -8,6 +8,7 @@ function nodeObserve(node:HTMLElement){ const triggerName = node.getAttribute('risu-trigger'); const btnEvent = node.getAttribute('risu-btn'); const observerAdded = node.getAttribute('risu-observer'); + const hlLang = node.getAttribute('x-hl-lang'); if(observerAdded){ return @@ -45,13 +46,65 @@ function nodeObserve(node:HTMLElement){ node.setAttribute('risu-observer', 'true'); return } + + if(hlLang){ + node.addEventListener('contextmenu', (e)=>{ + e.preventDefault(); + const menu = document.createElement('div'); + menu.setAttribute('class', 'fixed z-50 min-w-[160px] py-2 bg-gray-800 rounded-lg border border-gray-700') + + const copyOption = document.createElement('div'); + copyOption.textContent = 'Copy'; + copyOption.setAttribute('class', 'px-4 py-2 text-sm text-gray-300 hover:bg-gray-700 cursor-pointer') + copyOption.addEventListener('click', ()=>{ + navigator.clipboard.writeText(node.getAttribute('x-hl-text')); + menu.remove(); + }) + + const downloadOption = document.createElement('div'); + downloadOption.textContent = 'Download'; + downloadOption.setAttribute('class', 'px-4 py-2 text-sm text-gray-300 hover:bg-gray-700 cursor-pointer') + downloadOption.addEventListener('click', ()=>{ + const a = document.createElement('a'); + a.href = URL.createObjectURL(new Blob([node.getAttribute('x-hl-text')], {type: 'text/plain'})); + a.download = 'code.' + hlLang; + a.click(); + menu.remove(); + }) + + menu.appendChild(copyOption); + menu.appendChild(downloadOption); + + menu.style.left = e.clientX + 'px'; + menu.style.top = e.clientY + 'px'; + + document.body.appendChild(menu); + + document.addEventListener('click', ()=>{ + menu.remove(); + }, {once: true}) + }) + } } export async function startObserveDom(){ + //For codeblock we are using MutationObserver since it doesn't appear well + + const observer = new MutationObserver((mutations) => { + mutations.forEach((mutation) => { + mutation.addedNodes.forEach((node) => { + if(node instanceof HTMLElement){ + nodeObserve(node); + } + }) + }) + }) + //We are using a while loop intead of MutationObserver because MutationObserver is expensive for just a few elements while(true){ document.querySelectorAll('[risu-trigger]').forEach(nodeObserve); document.querySelectorAll('[risu-btn]').forEach(nodeObserve); + document.querySelectorAll('[x-hl-lang]').forEach(nodeObserve); await sleep(100); } } \ No newline at end of file diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index 12b0528d..2bf650da 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -14,6 +14,7 @@ import { getModuleAssets, getModuleLorebooks } from './process/modules'; import type { OpenAIChat } from './process/index.svelte'; import hljs from 'highlight.js/lib/core' import 'highlight.js/styles/atom-one-dark.min.css' +import { language } from 'src/lang'; const markdownItOptions = { html: true, @@ -117,18 +118,30 @@ async function renderHighlightableMarkdown(data:string) { //import language if not already loaded //we do not refactor this to a function because we want to keep vite to only import the languages that are needed let languageModule:any = null + let shotLang = '' switch(lang){ case 'js': case 'javascript':{ lang = 'javascript' + shotLang = 'js' if(!hljs.getLanguage('javascript')){ languageModule = await import('highlight.js/lib/languages/javascript') } break } + case 'txt': + case 'vtt':{ + shotLang = lang + lang = 'plaintext' + if(!hljs.getLanguage('plaintext')){ + languageModule = await import('highlight.js/lib/languages/plaintext') + } + break + } case 'py': case 'python':{ lang = 'python' + shotLang = 'py' if(!hljs.getLanguage('python')){ languageModule = await import('highlight.js/lib/languages/python') } @@ -136,6 +149,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'css':{ lang = 'css' + shotLang = 'css' if(!hljs.getLanguage('css')){ languageModule = await import('highlight.js/lib/languages/css') } @@ -144,6 +158,7 @@ async function renderHighlightableMarkdown(data:string) { case 'xml': case 'html':{ lang = 'xml' + shotLang = 'xml' if(!hljs.getLanguage('xml')){ languageModule = await import('highlight.js/lib/languages/xml') } @@ -151,6 +166,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'lua':{ lang = 'lua' + shotLang = 'lua' if(!hljs.getLanguage('lua')){ languageModule = await import('highlight.js/lib/languages/lua') } @@ -158,6 +174,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'dart':{ lang = 'dart' + shotLang = 'dart' if(!hljs.getLanguage('dart')){ languageModule = await import('highlight.js/lib/languages/dart') } @@ -165,6 +182,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'java':{ lang = 'java' + shotLang = 'java' if(!hljs.getLanguage('java')){ languageModule = await import('highlight.js/lib/languages/java') } @@ -172,6 +190,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'rust':{ lang = 'rust' + shotLang = 'rs' if(!hljs.getLanguage('rust')){ languageModule = await import('highlight.js/lib/languages/rust') } @@ -180,6 +199,7 @@ async function renderHighlightableMarkdown(data:string) { case 'c': case 'cpp':{ lang = 'cpp' + shotLang = 'cpp' if(!hljs.getLanguage('cpp')){ languageModule = await import('highlight.js/lib/languages/cpp') } @@ -188,6 +208,7 @@ async function renderHighlightableMarkdown(data:string) { case 'csharp': case 'cs':{ lang = 'csharp' + shotLang = 'cs' if(!hljs.getLanguage('csharp')){ languageModule = await import('highlight.js/lib/languages/csharp') } @@ -196,6 +217,7 @@ async function renderHighlightableMarkdown(data:string) { case 'ts': case 'typescript':{ lang = 'typescript' + shotLang = 'ts' if(!hljs.getLanguage('typescript')){ languageModule = await import('highlight.js/lib/languages/typescript') } @@ -203,6 +225,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'json':{ lang = 'json' + shotLang = 'json' if(!hljs.getLanguage('json')){ languageModule = await import('highlight.js/lib/languages/json') } @@ -210,6 +233,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'yaml':{ lang = 'yaml' + shotLang = 'yml' if(!hljs.getLanguage('yaml')){ languageModule = await import('highlight.js/lib/languages/yaml') } @@ -217,6 +241,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'shell':{ lang = 'shell' + shotLang = 'sh' if(!hljs.getLanguage('shell')){ languageModule = await import('highlight.js/lib/languages/shell') } @@ -224,6 +249,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'bash':{ lang = 'bash' + shotLang = 'sh' if(!hljs.getLanguage('bash')){ languageModule = await import('highlight.js/lib/languages/bash') } @@ -231,6 +257,7 @@ async function renderHighlightableMarkdown(data:string) { } default:{ lang = 'none' + shotLang = 'none' } } if(languageModule){ @@ -244,7 +271,9 @@ async function renderHighlightableMarkdown(data:string) { language: lang, ignoreIllegals: true }).value - rendered = rendered.replace(placeholder, `
${highlighted}
`) + rendered = rendered.replace(placeholder, `
${highlighted}
`) } } catch (error) { @@ -425,6 +454,9 @@ function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext closestDist = dist } } + if(closestDist > DBState.db.assetMaxDifference){ + return null + } return assetPaths[closest] } @@ -464,6 +496,11 @@ export interface simpleCharacterArgument{ triggerscript?: triggerscript[] } +function parseThoughts(data:string){ + return data.replace(/(.+)<\/Thoughts>/gms, (full, txt) => { + return `
${language.cot}${txt}
` + }) +} export async function ParseMarkdown( data:string, @@ -475,25 +512,31 @@ export async function ParseMarkdown( let firstParsed = '' const additionalAssetMode = (mode === 'back') ? 'back' : 'normal' let char = (typeof(charArg) === 'string') ? (findCharacterbyId(charArg)) : (charArg) + if(char && char.type !== 'group'){ data = await parseAdditionalAssets(data, char, additionalAssetMode, 'pre') firstParsed = data } + if(char){ data = (await processScriptFull(char, data, 'editdisplay', chatID, cbsConditions)).data } + if(firstParsed !== data && char && char.type !== 'group'){ data = await parseAdditionalAssets(data, char, additionalAssetMode, 'post') } + data = await parseInlayAssets(data ?? '') + data = parseThoughts(data) + data = encodeStyle(data) if(mode === 'normal'){ data = await renderHighlightableMarkdown(data) } return decodeStyle(DOMPurify.sanitize(data, { - ADD_TAGS: ["iframe", "style", "risu-style", "x-em"], - ADD_ATTR: ["allow", "allowfullscreen", "frameborder", "scrolling", "risu-btn", 'risu-trigger', 'risu-mark'], + ADD_TAGS: ["iframe", "style", "risu-style", "x-em",], + ADD_ATTR: ["allow", "allowfullscreen", "frameborder", "scrolling", "risu-btn", 'risu-trigger', 'risu-mark', 'x-hl-lang', 'x-hl-text'], })) } @@ -1941,6 +1984,7 @@ export function risuChatParser(da:string, arg:{ callStack: arg.callStack, } + da = da.replace(/\<(user|char|bot)\>/gi, '{{$1}}') const isPureMode = () => { return pureModeNest.size > 0 @@ -1963,15 +2007,6 @@ export function risuChatParser(da:string, arg:{ stackType[nested.length] = 1 break } - case '<':{ - if(stackType[nested.length] === 1){ - nested[0] += da[pointer] - break - } - nested.unshift('') - stackType[nested.length] = 2 - break - } case '#':{ //legacy if statement, deprecated if(da[pointer + 1] !== '}' || nested.length === 1 || stackType[nested.length] !== 1){ @@ -2101,79 +2136,6 @@ export function risuChatParser(da:string, arg:{ } break } - case '>':{ - if(stackType[nested.length] === 1){ - nested[0] += da[pointer] - break - } - if(nested.length === 1 || stackType[nested.length] !== 2){ - break - } - const dat = nested.shift() - if(isPureMode() && pureModeType() !== 'pureSyntax' && pureModeType() !== ''){ - nested[0] += `<${dat}>` - break - } - switch(dat){ - case 'Comment':{ - if(arg.runVar){ - break - } - if(!commentMode){ - thinkingMode = false - commentMode = true - commentLatest = nested.map((f) => f) - if(commentLatest[0].endsWith('\n')){ - commentLatest[0] = commentLatest[0].substring(0, commentLatest[0].length - 1) - } - commentV = new Uint8Array(stackType) - } - break - } - case '/Comment':{ - if(commentMode){ - nested = commentLatest - stackType = commentV - commentMode = false - } - break - } - case 'Thoughts':{ - if(!visualize){ - nested[0] += `<${dat}>` - break - } - if(!commentMode){ - thinkingMode = true - commentMode = true - commentLatest = nested.map((f) => f) - if(commentLatest[0].endsWith('\n')){ - commentLatest[0] = commentLatest[0].substring(0, commentLatest[0].length - 1) - } - commentV = new Uint8Array(stackType) - } - break - } - case '/Thoughts':{ - if(!visualize){ - nested[0] += `<${dat}>` - break - } - if(commentMode){ - nested = commentLatest - stackType = commentV - commentMode = false - } - break - } - default:{ - const mc = isPureMode() ? null : smMatcher(dat, matcherObj) - nested[0] += mc ?? `<${dat}>` - break - } - } - break - } default:{ nested[0] += da[pointer] break diff --git a/src/ts/plugins/plugins.ts b/src/ts/plugins/plugins.ts index 07d4e1a0..5553c87d 100644 --- a/src/ts/plugins/plugins.ts +++ b/src/ts/plugins/plugins.ts @@ -1,21 +1,16 @@ import { get, writable } from "svelte/store"; import { language } from "../../lang"; import { alertError } from "../alert"; -import { getDatabase, setDatabaseLite } from "../storage/database.svelte"; +import { getCurrentCharacter, getDatabase, setDatabaseLite } from "../storage/database.svelte"; import { checkNullish, selectSingleFile, sleep } from "../util"; import type { OpenAIChat } from "../process/index.svelte"; -import { globalFetch } from "../globalApi.svelte"; +import { fetchNative, globalFetch } from "../globalApi.svelte"; import { selectedCharID } from "../stores.svelte"; import { addAdditionalCharaJS } from "./embedscript"; +import type { ScriptMode } from "../process/scripts"; export const customProviderStore = writable([] as string[]) -interface PluginRequest{ - url: string - header?:{[key:string]:string} - body: any, - res: string -} interface ProviderPlugin{ name:string @@ -23,6 +18,7 @@ interface ProviderPlugin{ script:string arguments:{[key:string]:'int'|'string'|string[]} realArg:{[key:string]:number|string} + version?:1|2 } export type RisuPlugin = ProviderPlugin @@ -37,6 +33,7 @@ export async function importPlugin(){ const jsFile = Buffer.from(f.data).toString('utf-8').replace(/^\uFEFF/gm, ""); const splitedJs = jsFile.split('\n') let name = '' + let version:1|2 = 1 let displayName:string = undefined let arg:{[key:string]:'int'|'string'|string[]} = {} let realArg:{[key:string]:number|string} = {} @@ -49,15 +46,32 @@ export async function importPlugin(){ } name = provied.trim() } + if(line.startsWith('//@name')){ + const provied = line.slice(7) + if(provied === ''){ + alertError('plugin name must be longer than "", did you put it correctly?') + return + } + version = 2 + name = provied.trim() + } if(line.startsWith('//@risu-display-name')){ const provied = line.slice('//@risu-display-name'.length + 1) if(provied === ''){ alertError('plugin display name must be longer than "", did you put it correctly?') return } - name = provied.trim() + displayName = provied.trim() } - if(line.startsWith('//@risu-arg')){ + if(line.startsWith('//@display-name')){ + const provied = line.slice('//@display-name'.length + 1) + if(provied === ''){ + alertError('plugin display name must be longer than "", did you put it correctly?') + return + } + displayName = provied.trim() + } + if(line.startsWith('//@risu-arg') || line.startsWith('//@arg')){ const provied = line.trim().split(' ') if(provied.length < 3){ alertError('plugin argument is incorrect, did you put space in argument name?') @@ -90,7 +104,8 @@ export async function importPlugin(){ script: jsFile, realArg: realArg, arguments: arg, - displayName: displayName + displayName: displayName, + version: version } db.plugins ??= [] @@ -124,11 +139,18 @@ let pluginTranslator = false export async function loadPlugins() { let db = getDatabase() + if(pluginWorker){ pluginWorker.terminate() pluginWorker = null } - if(db.plugins.length > 0){ + + const plugins = safeStructuredClone(db.plugins).filter((a:RisuPlugin) => a.version === 1) + const pluginV2 = safeStructuredClone(db.plugins).filter((a:RisuPlugin) => a.version === 2) + + await loadV2Plugin(pluginV2) + + if(plugins.length > 0){ const da = await fetch("/pluginApi.js") const pluginApiString = await da.text() @@ -267,6 +289,140 @@ export async function loadPlugins() { } } +type PluginV2ProviderArgument = { + prompt_chat: OpenAIChat[], + frequency_penalty: number + min_p: number + presence_penalty: number + repetition_penalty: number + top_k: number + top_p: number + temperature: number + mode: string +} + +type EditFunction = (content:string) => string|null|undefined|Promise +type ReplacerFunction = (content:OpenAIChat[], type:string) => OpenAIChat[]|Promise + +export const pluginV2 = { + providers: new Map Promise<{success:boolean,content:string}> >(), + editdisplay: new Set(), + editoutput: new Set(), + editprocess: new Set(), + editinput: new Set(), + replacerbeforeRequest: new Set(), + replacerafterRequest: new Set<(content:string, type:string) => string|Promise>(), + unload: new Set<() => void|Promise>(), + loaded: false +} + +export async function loadV2Plugin(plugins:RisuPlugin[]){ + + if(pluginV2.loaded){ + for(const unload of pluginV2.unload){ + await unload() + } + + pluginV2.providers.clear() + pluginV2.editdisplay.clear() + pluginV2.editoutput.clear() + pluginV2.editprocess.clear() + pluginV2.editinput.clear() + } + + pluginV2.loaded = true + + globalThis.__pluginApis__ = { + risuFetch: globalFetch, + nativeFetch: fetchNative, + getArg: (arg:string) => { + const [name, realArg] = arg.split('::') + for(const plug of plugins){ + if(plug.name === name){ + return plug.realArg[realArg] + } + } + }, + getChar: () => { + return getCurrentCharacter() + }, + setChar: (char:any) => { + const db = getDatabase() + const charid = get(selectedCharID) + db.characters[charid] = char + setDatabaseLite(db) + }, + addProvider: (name:string, func:(arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>) => { + let provs = get(customProviderStore) + provs.push(name) + pluginV2.providers.set(name, func) + customProviderStore.set(provs) + }, + addRisuScriptHandler: (name:ScriptMode, func:EditFunction) => { + if(pluginV2['edit' + name]){ + pluginV2['edit' + name].add(func) + } + else{ + throw (`script handler named ${name} not found`) + } + }, + removeRisuScriptHandler: (name:ScriptMode, func:EditFunction) => { + if(pluginV2['edit' + name]){ + pluginV2['edit' + name].delete(func) + } + else{ + throw (`script handler named ${name} not found`) + } + }, + addRisuReplacer: (name:string, func:ReplacerFunction) => { + if(pluginV2['replacer' + name]){ + pluginV2['replacer' + name].add(func) + } + else{ + throw (`replacer handler named ${name} not found`) + } + }, + removeRisuReplacer: (name:string, func:ReplacerFunction) => { + if(pluginV2['replacer' + name]){ + pluginV2['replacer' + name].delete(func) + } + else{ + throw (`replacer handler named ${name} not found`) + } + }, + onUnload: (func:() => void|Promise) => { + pluginV2.unload.add(func) + } + } + + for(const plugin of plugins){ + const data = plugin.script + + const realScript = `(async () => { + const risuFetch = globalThis.__pluginApis__.risuFetch + const nativeFetch = globalThis.__pluginApis__.nativeFetch + const getArg = globalThis.__pluginApis__.getArg + const printLog = globalThis.__pluginApis__.printLog + const getChar = globalThis.__pluginApis__.getChar + const setChar = globalThis.__pluginApis__.setChar + const addProvider = globalThis.__pluginApis__.addProvider + const addRisuEventHandler = globalThis.__pluginApis__.addRisuEventHandler + const onUnload = globalThis.__pluginApis__.onUnload + + ${data} + })();` + + try { + eval(realScript) + } catch (error) { + console.error(error) + } + + console.log('Loaded V2 Plugin', plugin.name) + + } +} + export async function translatorPlugin(text:string, from:string, to:string) { if(!pluginTranslator){ return false diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts index 0ce5f932..d1727745 100644 --- a/src/ts/process/index.svelte.ts +++ b/src/ts/process/index.svelte.ts @@ -30,6 +30,7 @@ import { hypaMemoryV2 } from "./memory/hypav2"; import { runLuaEditTrigger } from "./lua"; import { parseChatML } from "../parser.svelte"; import { getModelInfo, LLMFlags } from "../model/modellist"; +import { pluginV2 } from "../plugins/plugins"; export interface OpenAIChat{ role: 'system'|'user'|'assistant'|'function' @@ -39,6 +40,7 @@ export interface OpenAIChat{ removable?:boolean attr?:string[] multimodals?: MultiModal[] + thoughts?: string[] } export interface MultiModal{ @@ -752,19 +754,19 @@ export async function sendChat(chatProcessIndex = -1,arg:{ break } } - if(usingPromptTemplate && DBState.db.promptSettings.maxThoughtTagDepth !== -1){ - const depth = ms.length - index - if(depth >= DBState.db.promptSettings.maxThoughtTagDepth){ - formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') - } - } + let thoughts:string[] = [] + formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, (match, p1) => { + thoughts.push(p1) + return '' + }) const chat:OpenAIChat = { role: role, content: formatedChat, memo: msg.chatId, attr: attr, - multimodals: multimodal + multimodals: multimodal, + thoughts: thoughts } if(chat.multimodals.length === 0){ delete chat.multimodals diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 2942bf5d..3aa9d242 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1,6 +1,6 @@ import type { MultiModal, OpenAIChat, OpenAIChatFull } from "./index.svelte"; import { getCurrentCharacter, getDatabase, setDatabase, type character } from "../storage/database.svelte"; -import { pluginProcess } from "../plugins/plugins"; +import { pluginProcess, pluginV2 } from "../plugins/plugins"; import { language } from "../../lang"; import { stringlizeAINChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize"; import { addFetchLog, fetchNative, globalFetch, isNodeServer, isTauri, textifyReadableStream } from "../globalApi.svelte"; @@ -105,11 +105,46 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[], } for(const parameter of parameters){ + + let value = 0 if(parameter === 'top_k' && arg.ignoreTopKIfZero && db.seperateParameters[ModelMode][parameter] === 0){ continue } - let value = db.seperateParameters[ModelMode][parameter] + switch(parameter){ + case 'temperature':{ + value = db.seperateParameters[ModelMode].temperature === -1000 ? -1000 : (db.seperateParameters[ModelMode].temperature / 100) + break + } + case 'top_k':{ + value = db.seperateParameters[ModelMode].top_k + break + } + case 'repetition_penalty':{ + value = db.seperateParameters[ModelMode].repetition_penalty + break + } + case 'min_p':{ + value = db.seperateParameters[ModelMode].min_p + break + } + case 'top_a':{ + value = db.seperateParameters[ModelMode].top_a + break + } + case 'top_p':{ + value = db.seperateParameters[ModelMode].top_p + break + } + case 'frequency_penalty':{ + value = db.seperateParameters[ModelMode].frequency_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].frequency_penalty / 100) + break + } + case 'presence_penalty':{ + value = db.seperateParameters[ModelMode].presence_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].presence_penalty / 100) + break + } + } if(value === -1000 || value === undefined){ continue @@ -174,7 +209,22 @@ export async function requestChatData(arg:requestDataArgument, model:ModelModeEx const db = getDatabase() let trys = 0 while(true){ + + if(pluginV2.replacerbeforeRequest.size > 0){ + for(const replacer of pluginV2.replacerbeforeRequest){ + arg.formated = await replacer(arg.formated, model) + } + } + const da = await requestChatDataMain(arg, model, abortSignal) + + if(da.type === 'success' && pluginV2.replacerafterRequest.size > 0){ + for(const replacer of pluginV2.replacerafterRequest){ + da.result = await replacer(da.result, model) + } + } + + if(da.type !== 'fail' || da.noRetry){ return da } @@ -202,7 +252,7 @@ interface OpenAIImageContents { type OpenAIContents = OpenAITextContents|OpenAIImageContents export interface OpenAIChatExtra { - role: 'system'|'user'|'assistant'|'function' + role: 'system'|'user'|'assistant'|'function'|'developer' content: string|OpenAIContents[] memo?:string name?:string @@ -247,7 +297,23 @@ function reformater(formated:OpenAIChat[],modelInfo:LLMModel){ } if(newFormated[newFormated.length-1].role === m.role){ + newFormated[newFormated.length-1].content += '\n' + m.content + + if(m.multimodals){ + if(!newFormated[newFormated.length-1].multimodals){ + newFormated[newFormated.length-1].multimodals = [] + } + newFormated[newFormated.length-1].multimodals.push(...m.multimodals) + } + + if(m.thoughts){ + if(!newFormated[newFormated.length-1].thoughts){ + newFormated[newFormated.length-1].thoughts = [] + } + newFormated[newFormated.length-1].thoughts.push(...m.thoughts) + } + continue } else{ @@ -409,15 +475,6 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise${formatedChat[i].content}` - formatedChat[i].role = 'user' - } - } - } - for(let i=0;i { + if(v.role === 'system'){ + v.role = 'developer' + } + return v + }) + } + console.log(formatedChat) if(arg.modelInfo.format === LLMFormat.Mistral){ requestModel = aiModel @@ -500,12 +566,12 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise 0 && chat.role === "user") { + let geminiParts: GeminiPart[] = []; + + geminiParts.push({ + text: chat.content, + }); + + for (const modal of chat.multimodals) { + if ( + (modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) || + (modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) || + (modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput)) + ) { + const dataurl = modal.base64; + const base64 = dataurl.split(",")[1]; + const mediaType = dataurl.split(";")[0].split(":")[1]; + + geminiParts.push({ + inlineData: { + mimeType: mediaType, + data: base64, + } + }); + } + } + + reformatedChat.push({ + role: "USER", + parts: geminiParts, + }); + } else if (prevChat?.role === qRole) { + reformatedChat[reformatedChat.length-1].parts[ + reformatedChat[reformatedChat.length-1].parts.length-1 + ].text += '\n' + chat.content + continue + } + else if(chat.role === 'system'){ + if(prevChat?.role === 'USER'){ + reformatedChat[reformatedChat.length-1].parts[0].text += '\nsystem:' + chat.content } else{ reformatedChat.push({ @@ -1405,78 +1517,32 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise }) } } + else if(chat.role === 'assistant' && arg.modelInfo.flags.includes(LLMFlags.geminiThinking)){ + reformatedChat.push({ + role: 'MODEL', + parts: [chat.thoughts?.length > 0 ? { + text: chat.thoughts.join('\n\n') + } : null, { + text: chat.content + }] + }) + } + + else if(chat.role === 'assistant' || chat.role === 'user'){ + reformatedChat.push({ + role: chat.role === 'user' ? 'USER' : 'MODEL', + parts: [{ + text: chat.content + }] + }) + } else{ - const prevChat = reformatedChat[reformatedChat.length-1] - const qRole = - chat.role === 'user' ? 'USER' : - chat.role === 'assistant' ? 'MODEL' : - chat.role - - if (chat.multimodals && chat.multimodals.length > 0 && chat.role === "user") { - let geminiParts: GeminiPart[] = []; - - geminiParts.push({ - text: chat.content, - }); - - for (const modal of chat.multimodals) { - if ( - (modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) || - (modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) || - (modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput)) - ) { - const dataurl = modal.base64; - const base64 = dataurl.split(",")[1]; - const mediaType = dataurl.split(";")[0].split(":")[1]; - - geminiParts.push({ - inlineData: { - mimeType: mediaType, - data: base64, - } - }); - } - } - - reformatedChat.push({ - role: "USER", - parts: geminiParts, - }); - - } else if (prevChat.role === qRole) { - reformatedChat[reformatedChat.length-1].parts[0].text += '\n' + chat.content - continue - } - else if(chat.role === 'system'){ - if(prevChat.role === 'USER'){ - reformatedChat[reformatedChat.length-1].parts[0].text += '\nsystem:' + chat.content - } - else{ - reformatedChat.push({ - role: "USER", - parts: [{ - text: chat.role + ':' + chat.content - }] - }) - } - } - - else if(chat.role === 'assistant' || chat.role === 'user'){ - reformatedChat.push({ - role: chat.role === 'user' ? 'USER' : 'MODEL', - parts: [{ - text: chat.content - }] - }) - } - else{ - reformatedChat.push({ - role: "USER", - parts: [{ - text: chat.role + ':' + chat.content - }] - }) - } + reformatedChat.push({ + role: "USER", + parts: [{ + text: chat.role + ':' + chat.content + }] + }) } } @@ -1649,12 +1715,25 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise const data = JSON.parse(reformatted) - let r = '' + let rDatas:string[] = [''] for(const d of data){ - r += d.candidates[0].content.parts[0].text + const parts = d.candidates[0].content?.parts + for(let i=0;i 0){ + rDatas.push('') + } + + rDatas[rDatas.length-1] += part.text + } + } + + if(rDatas.length > 1){ + const thought = rDatas.splice(rDatas.length-2, 1)[0] + rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n\n')}` } control.enqueue({ - '0': r + '0': rDatas[rDatas.length-1], }) } catch (error) { console.log(error) @@ -1682,13 +1761,22 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise } } - let fullRes = '' - + let rDatas:string[] = [''] const processDataItem = (data:any) => { - if(data?.candidates?.[0]?.content?.parts?.[0]?.text){ - fullRes += data.candidates[0].content.parts[0].text + const parts = data?.candidates?.[0]?.content?.parts + if(parts){ + + for(let i=0;i 0){ + rDatas.push('') + } + + rDatas[rDatas.length-1] += part.text + } } - else if(data?.errors){ + + if(data?.errors){ return { type: 'fail', result: `${JSON.stringify(data.errors)}` @@ -1711,9 +1799,15 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise processDataItem(res.data) } + + if(rDatas.length > 1){ + const thought = rDatas.splice(rDatas.length-2, 1)[0] + rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n\n')}` + } + return { type: 'success', - result: fullRes + result: rDatas[rDatas.length-1] } } diff --git a/src/ts/process/scripts.ts b/src/ts/process/scripts.ts index 17607dbe..63ad29af 100644 --- a/src/ts/process/scripts.ts +++ b/src/ts/process/scripts.ts @@ -10,6 +10,7 @@ import { runCharacterJS } from "../plugins/embedscript"; import { getModuleAssets, getModuleRegexScripts } from "./modules"; import { HypaProcesser } from "./memory/hypamemory"; import { runLuaEditTrigger } from "./lua"; +import { pluginV2 } from "../plugins/plugins"; const dreg = /{{data}}/g const randomness = /\|\|\|/g @@ -109,6 +110,15 @@ export async function processScriptFull(char:character|groupChat|simpleCharacter data, }) data = await runLuaEditTrigger(char, mode, data) + if(pluginV2[mode].size > 0){ + for(const plugin of pluginV2[mode]){ + const res = await plugin(data) + if(res !== null && res !== undefined){ + data = res + } + } + } + if(scripts.length === 0){ cacheScript(scripts, originalData, data, mode) return {data, emoChanged} diff --git a/src/ts/process/transformers.ts b/src/ts/process/transformers.ts index 6d5c3752..4e238fa2 100644 --- a/src/ts/process/transformers.ts +++ b/src/ts/process/transformers.ts @@ -134,7 +134,6 @@ export const runVITS = async (text: string, modelData:string|OnnxModelFiles = 'X }); } - export const registerOnnxModel = async ():Promise => { const id = v4().replace(/-/g, '') diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index 5232471d..9cc9d167 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; -export let appVer = "143.5.0" +export let appVer = "144.1.0" export let webAppSubVer = '' @@ -464,14 +464,12 @@ export function setDatabase(data:Database){ } data.customFlags ??= [] data.enableCustomFlags ??= false + data.assetMaxDifference ??= 4 changeLanguage(data.language) setDatabaseLite(data) } export function setDatabaseLite(data:Database){ - if(import.meta.env.DEV){ - console.trace('setDatabaseLite executed') - } DBState.db = data } @@ -861,6 +859,9 @@ export interface Database{ presetChain: string legacyMediaFindings?:boolean geminiStream?:boolean + assetMaxDifference:number + menuSideBar:boolean + pluginV2: RisuPlugin[] } interface SeparateParameters{ diff --git a/src/ts/translator/translator.ts b/src/ts/translator/translator.ts index b8ce3041..587c4582 100644 --- a/src/ts/translator/translator.ts +++ b/src/ts/translator/translator.ts @@ -317,8 +317,15 @@ export async function translateHTML(html: string, reverse:boolean, charArg:simpl return } - // node.textContent = await translate(node.textContent || '', reverse); - let translated = await translate(node.textContent || "", reverse); + const translateChunks = (node.textContent || '').split(/\n\n+/g); + let translatedChunksPromises: Promise[] = []; + for (const chunk of translateChunks) { + const translatedPromise = translate(chunk, reverse); + translatedChunksPromises.push(translatedPromise); + } + + const translatedChunks = await Promise.all(translatedChunksPromises); + let translated = translatedChunks.join("\n\n"); if (!reprocessDisplayScript) { node.textContent = translated; return; diff --git a/version.json b/version.json index dec9cba6..2b666a0d 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"143.5.0"} \ No newline at end of file +{"version":"144.1.0"} \ No newline at end of file