From e2149ced1b96e6825af9a570bccb8c41c8804fd2 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sun, 15 Dec 2024 01:00:48 +0900 Subject: [PATCH 01/33] Refactor risuChatParser --- src/ts/parser.svelte.ts | 83 +---------------------------------------- 1 file changed, 1 insertion(+), 82 deletions(-) diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index 12b0528d..48f8f3f5 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -1941,6 +1941,7 @@ export function risuChatParser(da:string, arg:{ callStack: arg.callStack, } + da = da.replace(/\<(user|char|bot)\>/gi, '{{$1}}') const isPureMode = () => { return pureModeNest.size > 0 @@ -1963,15 +1964,6 @@ export function risuChatParser(da:string, arg:{ stackType[nested.length] = 1 break } - case '<':{ - if(stackType[nested.length] === 1){ - nested[0] += da[pointer] - break - } - nested.unshift('') - stackType[nested.length] = 2 - break - } case '#':{ //legacy if statement, deprecated if(da[pointer + 1] !== '}' || nested.length === 1 || stackType[nested.length] !== 1){ @@ -2101,79 +2093,6 @@ export function risuChatParser(da:string, arg:{ } break } - case '>':{ - if(stackType[nested.length] === 1){ - nested[0] += da[pointer] - break - } - if(nested.length === 1 || stackType[nested.length] !== 2){ - break - } - const dat = nested.shift() - if(isPureMode() && pureModeType() !== 'pureSyntax' && pureModeType() !== ''){ - nested[0] += `<${dat}>` - break - } - switch(dat){ - case 'Comment':{ - if(arg.runVar){ - break - } - if(!commentMode){ - thinkingMode = false - commentMode = true - commentLatest = nested.map((f) => f) - if(commentLatest[0].endsWith('\n')){ - commentLatest[0] = commentLatest[0].substring(0, commentLatest[0].length - 1) - } - commentV = new Uint8Array(stackType) - } - break - } - case '/Comment':{ - if(commentMode){ - nested = commentLatest - stackType = commentV - commentMode = false - } - break - } - case 'Thoughts':{ - if(!visualize){ - nested[0] += `<${dat}>` - break - } - if(!commentMode){ - thinkingMode = true - commentMode = true - commentLatest = nested.map((f) => f) - if(commentLatest[0].endsWith('\n')){ - commentLatest[0] = commentLatest[0].substring(0, commentLatest[0].length - 1) - } - commentV = new Uint8Array(stackType) - } - break - } - case '/Thoughts':{ - if(!visualize){ - nested[0] += `<${dat}>` - break - } - if(commentMode){ - nested = commentLatest - stackType = commentV - commentMode = false - } - break - } - default:{ - const mc = isPureMode() ? null : smMatcher(dat, matcherObj) - nested[0] += mc ?? `<${dat}>` - break - } - } - break - } default:{ nested[0] += da[pointer] break From dcc48d5cb33251b187aaa91a62b07845bc03c085 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sun, 15 Dec 2024 19:33:26 +0900 Subject: [PATCH 02/33] Add subtitle --- package.json | 1 + pnpm-lock.yaml | 8 + src/App.svelte | 2 +- src/lang/en.ts | 4 + src/lib/Playground/PlaygroundMenu.svelte | 9 + src/lib/Playground/PlaygroundSubtitle.svelte | 443 +++++++++++++++++++ src/ts/observer.ts | 53 +++ src/ts/parser.svelte.ts | 34 +- src/ts/process/request.ts | 142 +++--- 9 files changed, 611 insertions(+), 85 deletions(-) create mode 100644 src/lib/Playground/PlaygroundSubtitle.svelte diff --git a/package.json b/package.json index c1f94594..c280a80d 100644 --- a/package.json +++ b/package.json @@ -18,6 +18,7 @@ "dependencies": { "@adobe/css-tools": "4.3.2", "@aws-crypto/sha256-js": "^5.2.0", + "@breezystack/lamejs": "^1.2.7", "@capacitor/android": "^5.6.0", "@capacitor/core": "^5.6.0", "@capacitor/filesystem": "^5.2.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8bb3a889..737a7b4c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,6 +14,9 @@ importers: '@aws-crypto/sha256-js': specifier: ^5.2.0 version: 5.2.0 + '@breezystack/lamejs': + specifier: ^1.2.7 + version: 1.2.7 '@capacitor/android': specifier: ^5.6.0 version: 5.6.0(@capacitor/core@5.6.0) @@ -361,6 +364,9 @@ packages: resolution: {integrity: sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==} engines: {node: '>=6.9.0'} + '@breezystack/lamejs@1.2.7': + resolution: {integrity: sha512-6wc7ck65ctA75Hq7FYHTtTvGnYs6msgdxiSUICQ+A01nVOWg6rqouZB8IdyteRlfpYYiFovkf67dIeOgWIUzTA==} + '@capacitor/android@5.6.0': resolution: {integrity: sha512-6O7xV6K6c8WvQzKxOe7fnhRyoVpS3TNDXy1FyfhvOvclBvu+1JddSdFvW4e4dSL60s2c00sCzNRgYhm+cn0/dQ==} peerDependencies: @@ -4061,6 +4067,8 @@ snapshots: chalk: 2.4.2 js-tokens: 4.0.0 + '@breezystack/lamejs@1.2.7': {} + '@capacitor/android@5.6.0(@capacitor/core@5.6.0)': dependencies: '@capacitor/core': 5.6.0 diff --git a/src/App.svelte b/src/App.svelte index 5f1d529c..b2a760df 100644 --- a/src/App.svelte +++ b/src/App.svelte @@ -36,7 +36,7 @@ await importCharacterProcess({ name: file.name, data: file - }) + }) checkCharOrder() } }}> diff --git a/src/lang/en.ts b/src/lang/en.ts index ae61f590..aaba8e64 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -823,4 +823,8 @@ export const languageEnglish = { presetChain: "Preset Chain", legacyMediaFindings: "Legacy Media Findings", staticsDisclaimer: "The statistics are based on the data from after July 2024. the data may not be accurate.", + subtitles: "Subtitles", + subtitlesWarning1: "You must use model with audio/video input to use this feature.", + subtitlesWarning2: "You must use model with streaming feature to use this feature.", + reset: "Reset", } \ No newline at end of file diff --git a/src/lib/Playground/PlaygroundMenu.svelte b/src/lib/Playground/PlaygroundMenu.svelte index ac7756a3..3d007539 100644 --- a/src/lib/Playground/PlaygroundMenu.svelte +++ b/src/lib/Playground/PlaygroundMenu.svelte @@ -14,6 +14,7 @@ import PlaygroundParser from "./PlaygroundParser.svelte"; import ToolConvertion from "./ToolConvertion.svelte"; import { joinMultiuserRoom } from "src/ts/sync/multiuser"; + import PlaygroundSubtitle from "./PlaygroundSubtitle.svelte"; let easterEggTouch = $state(0) @@ -83,6 +84,11 @@ }}>

Parser

+ +{:else if vttB64 && fileB64} +
+
{outputText}
+
+{:else} +
{outputText}
+{/if} + +{#if vttB64 && fileB64} +
+ {#key vttB64} + + {/key} +
+ + {language.download} + + + + +{/if} \ No newline at end of file diff --git a/src/ts/observer.ts b/src/ts/observer.ts index a40f870c..b868cb9d 100644 --- a/src/ts/observer.ts +++ b/src/ts/observer.ts @@ -8,6 +8,7 @@ function nodeObserve(node:HTMLElement){ const triggerName = node.getAttribute('risu-trigger'); const btnEvent = node.getAttribute('risu-btn'); const observerAdded = node.getAttribute('risu-observer'); + const hlLang = node.getAttribute('x-hl-lang'); if(observerAdded){ return @@ -45,13 +46,65 @@ function nodeObserve(node:HTMLElement){ node.setAttribute('risu-observer', 'true'); return } + + if(hlLang){ + node.addEventListener('contextmenu', (e)=>{ + e.preventDefault(); + const menu = document.createElement('div'); + menu.setAttribute('class', 'fixed z-50 min-w-[160px] py-2 bg-gray-800 rounded-lg border border-gray-700') + + const copyOption = document.createElement('div'); + copyOption.textContent = 'Copy'; + copyOption.setAttribute('class', 'px-4 py-2 text-sm text-gray-300 hover:bg-gray-700 cursor-pointer') + copyOption.addEventListener('click', ()=>{ + navigator.clipboard.writeText(node.getAttribute('x-hl-text')); + menu.remove(); + }) + + const downloadOption = document.createElement('div'); + downloadOption.textContent = 'Download'; + downloadOption.setAttribute('class', 'px-4 py-2 text-sm text-gray-300 hover:bg-gray-700 cursor-pointer') + downloadOption.addEventListener('click', ()=>{ + const a = document.createElement('a'); + a.href = URL.createObjectURL(new Blob([node.getAttribute('x-hl-text')], {type: 'text/plain'})); + a.download = 'code.' + hlLang; + a.click(); + menu.remove(); + }) + + menu.appendChild(copyOption); + menu.appendChild(downloadOption); + + menu.style.left = e.clientX + 'px'; + menu.style.top = e.clientY + 'px'; + + document.body.appendChild(menu); + + document.addEventListener('click', ()=>{ + menu.remove(); + }, {once: true}) + }) + } } export async function startObserveDom(){ + //For codeblock we are using MutationObserver since it doesn't appear well + + const observer = new MutationObserver((mutations) => { + mutations.forEach((mutation) => { + mutation.addedNodes.forEach((node) => { + if(node instanceof HTMLElement){ + nodeObserve(node); + } + }) + }) + }) + //We are using a while loop intead of MutationObserver because MutationObserver is expensive for just a few elements while(true){ document.querySelectorAll('[risu-trigger]').forEach(nodeObserve); document.querySelectorAll('[risu-btn]').forEach(nodeObserve); + document.querySelectorAll('[x-hl-lang]').forEach(nodeObserve); await sleep(100); } } \ No newline at end of file diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index 48f8f3f5..03b56482 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -117,18 +117,30 @@ async function renderHighlightableMarkdown(data:string) { //import language if not already loaded //we do not refactor this to a function because we want to keep vite to only import the languages that are needed let languageModule:any = null + let shotLang = '' switch(lang){ case 'js': case 'javascript':{ lang = 'javascript' + shotLang = 'js' if(!hljs.getLanguage('javascript')){ languageModule = await import('highlight.js/lib/languages/javascript') } break } + case 'txt': + case 'vtt':{ + shotLang = lang + lang = 'plaintext' + if(!hljs.getLanguage('plaintext')){ + languageModule = await import('highlight.js/lib/languages/plaintext') + } + break + } case 'py': case 'python':{ lang = 'python' + shotLang = 'py' if(!hljs.getLanguage('python')){ languageModule = await import('highlight.js/lib/languages/python') } @@ -136,6 +148,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'css':{ lang = 'css' + shotLang = 'css' if(!hljs.getLanguage('css')){ languageModule = await import('highlight.js/lib/languages/css') } @@ -144,6 +157,7 @@ async function renderHighlightableMarkdown(data:string) { case 'xml': case 'html':{ lang = 'xml' + shotLang = 'xml' if(!hljs.getLanguage('xml')){ languageModule = await import('highlight.js/lib/languages/xml') } @@ -151,6 +165,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'lua':{ lang = 'lua' + shotLang = 'lua' if(!hljs.getLanguage('lua')){ languageModule = await import('highlight.js/lib/languages/lua') } @@ -158,6 +173,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'dart':{ lang = 'dart' + shotLang = 'dart' if(!hljs.getLanguage('dart')){ languageModule = await import('highlight.js/lib/languages/dart') } @@ -165,6 +181,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'java':{ lang = 'java' + shotLang = 'java' if(!hljs.getLanguage('java')){ languageModule = await import('highlight.js/lib/languages/java') } @@ -172,6 +189,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'rust':{ lang = 'rust' + shotLang = 'rs' if(!hljs.getLanguage('rust')){ languageModule = await import('highlight.js/lib/languages/rust') } @@ -180,6 +198,7 @@ async function renderHighlightableMarkdown(data:string) { case 'c': case 'cpp':{ lang = 'cpp' + shotLang = 'cpp' if(!hljs.getLanguage('cpp')){ languageModule = await import('highlight.js/lib/languages/cpp') } @@ -188,6 +207,7 @@ async function renderHighlightableMarkdown(data:string) { case 'csharp': case 'cs':{ lang = 'csharp' + shotLang = 'cs' if(!hljs.getLanguage('csharp')){ languageModule = await import('highlight.js/lib/languages/csharp') } @@ -196,6 +216,7 @@ async function renderHighlightableMarkdown(data:string) { case 'ts': case 'typescript':{ lang = 'typescript' + shotLang = 'ts' if(!hljs.getLanguage('typescript')){ languageModule = await import('highlight.js/lib/languages/typescript') } @@ -203,6 +224,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'json':{ lang = 'json' + shotLang = 'json' if(!hljs.getLanguage('json')){ languageModule = await import('highlight.js/lib/languages/json') } @@ -210,6 +232,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'yaml':{ lang = 'yaml' + shotLang = 'yml' if(!hljs.getLanguage('yaml')){ languageModule = await import('highlight.js/lib/languages/yaml') } @@ -217,6 +240,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'shell':{ lang = 'shell' + shotLang = 'sh' if(!hljs.getLanguage('shell')){ languageModule = await import('highlight.js/lib/languages/shell') } @@ -224,6 +248,7 @@ async function renderHighlightableMarkdown(data:string) { } case 'bash':{ lang = 'bash' + shotLang = 'sh' if(!hljs.getLanguage('bash')){ languageModule = await import('highlight.js/lib/languages/bash') } @@ -231,6 +256,7 @@ async function renderHighlightableMarkdown(data:string) { } default:{ lang = 'none' + shotLang = 'none' } } if(languageModule){ @@ -244,7 +270,9 @@ async function renderHighlightableMarkdown(data:string) { language: lang, ignoreIllegals: true }).value - rendered = rendered.replace(placeholder, `
${highlighted}
`) + rendered = rendered.replace(placeholder, `
${highlighted}
`) } } catch (error) { @@ -492,8 +520,8 @@ export async function ParseMarkdown( data = await renderHighlightableMarkdown(data) } return decodeStyle(DOMPurify.sanitize(data, { - ADD_TAGS: ["iframe", "style", "risu-style", "x-em"], - ADD_ATTR: ["allow", "allowfullscreen", "frameborder", "scrolling", "risu-btn", 'risu-trigger', 'risu-mark'], + ADD_TAGS: ["iframe", "style", "risu-style", "x-em",], + ADD_ATTR: ["allow", "allowfullscreen", "frameborder", "scrolling", "risu-btn", 'risu-trigger', 'risu-mark', 'x-hl-lang', 'x-hl-text'], })) } diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 2942bf5d..adb27280 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -500,12 +500,12 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise 0 && chat.role === "user") { + let geminiParts: GeminiPart[] = []; + + geminiParts.push({ + text: chat.content, + }); + + for (const modal of chat.multimodals) { + if ( + (modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) || + (modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) || + (modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput)) + ) { + const dataurl = modal.base64; + const base64 = dataurl.split(",")[1]; + const mediaType = dataurl.split(";")[0].split(":")[1]; + + geminiParts.push({ + inlineData: { + mimeType: mediaType, + data: base64, + } + }); + } + } + + reformatedChat.push({ + role: "USER", + parts: geminiParts, + }); + + } else if (prevChat?.role === qRole) { + reformatedChat[reformatedChat.length-1].parts[0].text += '\n' + chat.content + continue + } + else if(chat.role === 'system'){ + if(prevChat?.role === 'USER'){ + reformatedChat[reformatedChat.length-1].parts[0].text += '\nsystem:' + chat.content } else{ reformatedChat.push({ @@ -1405,78 +1441,22 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise }) } } + + else if(chat.role === 'assistant' || chat.role === 'user'){ + reformatedChat.push({ + role: chat.role === 'user' ? 'USER' : 'MODEL', + parts: [{ + text: chat.content + }] + }) + } else{ - const prevChat = reformatedChat[reformatedChat.length-1] - const qRole = - chat.role === 'user' ? 'USER' : - chat.role === 'assistant' ? 'MODEL' : - chat.role - - if (chat.multimodals && chat.multimodals.length > 0 && chat.role === "user") { - let geminiParts: GeminiPart[] = []; - - geminiParts.push({ - text: chat.content, - }); - - for (const modal of chat.multimodals) { - if ( - (modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) || - (modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) || - (modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput)) - ) { - const dataurl = modal.base64; - const base64 = dataurl.split(",")[1]; - const mediaType = dataurl.split(";")[0].split(":")[1]; - - geminiParts.push({ - inlineData: { - mimeType: mediaType, - data: base64, - } - }); - } - } - - reformatedChat.push({ - role: "USER", - parts: geminiParts, - }); - - } else if (prevChat.role === qRole) { - reformatedChat[reformatedChat.length-1].parts[0].text += '\n' + chat.content - continue - } - else if(chat.role === 'system'){ - if(prevChat.role === 'USER'){ - reformatedChat[reformatedChat.length-1].parts[0].text += '\nsystem:' + chat.content - } - else{ - reformatedChat.push({ - role: "USER", - parts: [{ - text: chat.role + ':' + chat.content - }] - }) - } - } - - else if(chat.role === 'assistant' || chat.role === 'user'){ - reformatedChat.push({ - role: chat.role === 'user' ? 'USER' : 'MODEL', - parts: [{ - text: chat.content - }] - }) - } - else{ - reformatedChat.push({ - role: "USER", - parts: [{ - text: chat.role + ':' + chat.content - }] - }) - } + reformatedChat.push({ + role: "USER", + parts: [{ + text: chat.role + ':' + chat.content + }] + }) } } From 943e2738d5d19e11d9fcc56d8c748678c0fdda16 Mon Sep 17 00:00:00 2001 From: bangonicdd <157843588+bangonicdd2@users.noreply.github.com> Date: Sun, 15 Dec 2024 21:09:31 +0900 Subject: [PATCH 03/33] suggestion: disabling regex script --- src/lib/SideBars/Scripts/RegexData.svelte | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/SideBars/Scripts/RegexData.svelte b/src/lib/SideBars/Scripts/RegexData.svelte index 5bff0328..9485360e 100644 --- a/src/lib/SideBars/Scripts/RegexData.svelte +++ b/src/lib/SideBars/Scripts/RegexData.svelte @@ -121,6 +121,7 @@ {language.editProcess} {language.editDisplay} {language.editTranslationDisplay} + {language.disabled} IN: From 94501dbd7747016ba48bcf234768183791c69e5d Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Mon, 16 Dec 2024 06:05:57 +0900 Subject: [PATCH 04/33] Enhance character image handling with PNG chunk --- src/ts/characters.ts | 40 ++++++++++++++++++++++++++++++++++++++-- version.json | 2 +- 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/src/ts/characters.ts b/src/ts/characters.ts index 8c9f2b66..fd3c6c52 100644 --- a/src/ts/characters.ts +++ b/src/ts/characters.ts @@ -5,12 +5,13 @@ import { language } from "../lang"; import { checkNullish, findCharacterbyId, getUserName, selectMultipleFile, selectSingleFile, sleep } from "./util"; import { v4 as uuidv4 } from 'uuid'; import { MobileGUIStack, OpenRealmStore, selectedCharID } from "./stores.svelte"; -import { checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte"; +import { AppendableBuffer, checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte"; import { updateInlayScreen } from "./process/inlayScreen"; -import { parseMarkdownSafe } from "./parser.svelte"; +import { checkImageType, parseMarkdownSafe } from "./parser.svelte"; import { translateHTML } from "./translator/translator"; import { doingChat } from "./process/index.svelte"; import { importCharacter } from "./characterCards"; +import { PngChunk } from "./pngChunk"; export function createNewCharacter() { let db = getDatabase() @@ -81,6 +82,41 @@ export async function selectCharImg(charIndex:number) { } const img = selected.data let db = getDatabase() + + const type = checkImageType(img) + console.log(type) + + try { + if(type === 'PNG' && db.characters[charIndex].type === 'character'){ + const gen = PngChunk.readGenerator(img) + const allowedChunk = [ + 'parameters', 'Comment', 'Title', 'Description', 'Author', 'Software', 'Source', 'Disclaimer', 'Warning', 'Copyright', + ] + for await (const chunk of gen){ + if(chunk instanceof AppendableBuffer){ + continue + } + if(!chunk){ + continue + } + if(chunk.value.length > 20_000){ + continue + } + if(allowedChunk.includes(chunk.key)){ + console.log(chunk.key, chunk.value) + db.characters[charIndex].extentions ??= {} + db.characters[charIndex].extentions.pngExif ??= {} + db.characters[charIndex].extentions.pngExif[chunk.key] = chunk.value + } + } + console.log(db.characters[charIndex].extentions) + } + } catch (error) { + console.error(error) + } + + + const imgp = await saveImage(img) dumpCharImage(charIndex) db.characters[charIndex].image = imgp diff --git a/version.json b/version.json index dec9cba6..d607b770 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"143.5.0"} \ No newline at end of file +{"version":"143.6.0"} \ No newline at end of file From 9bdd354f7ce1629a390f5462f973fa6df747c29f Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Mon, 16 Dec 2024 07:56:15 +0900 Subject: [PATCH 05/33] Add asset max difference setting and update related logic --- src/lang/en.ts | 1 + src/lib/Setting/Pages/AdvancedSettings.svelte | 3 +++ src/ts/parser.svelte.ts | 4 ++++ src/ts/storage/database.svelte.ts | 2 ++ 4 files changed, 10 insertions(+) diff --git a/src/lang/en.ts b/src/lang/en.ts index aaba8e64..83a8a3f9 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -827,4 +827,5 @@ export const languageEnglish = { subtitlesWarning1: "You must use model with audio/video input to use this feature.", subtitlesWarning2: "You must use model with streaming feature to use this feature.", reset: "Reset", + assetMaxDifference: "Asset Max Difference", } \ No newline at end of file diff --git a/src/lib/Setting/Pages/AdvancedSettings.svelte b/src/lib/Setting/Pages/AdvancedSettings.svelte index b93d59ae..1cd0b6dd 100644 --- a/src/lib/Setting/Pages/AdvancedSettings.svelte +++ b/src/lib/Setting/Pages/AdvancedSettings.svelte @@ -51,6 +51,9 @@ {language.genTimes} +{language.assetMaxDifference} + + GPT Vision Quality Low diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index 03b56482..e6503e8c 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -453,6 +453,10 @@ function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext closestDist = dist } } + console.log(closestDist, closest) + if(closestDist > DBState.db.assetMaxDifference){ + return null + } return assetPaths[closest] } diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index 272f29a2..95e1e833 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -461,6 +461,7 @@ export function setDatabase(data:Database){ } data.customFlags ??= [] data.enableCustomFlags ??= false + data.assetMaxDifference ??= 4 changeLanguage(data.language) setDatabaseLite(data) } @@ -857,6 +858,7 @@ export interface Database{ presetChain: string legacyMediaFindings?:boolean geminiStream?:boolean + assetMaxDifference:number } interface SeparateParameters{ From 5caa4806efb1e3324321e024656ef2ac9f1a93f7 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Mon, 16 Dec 2024 07:56:43 +0900 Subject: [PATCH 06/33] Remove debug trace from setDatabaseLite function --- src/ts/storage/database.svelte.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index 95e1e833..46da0e58 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -467,9 +467,6 @@ export function setDatabase(data:Database){ } export function setDatabaseLite(data:Database){ - if(import.meta.env.DEV){ - console.trace('setDatabaseLite executed') - } DBState.db = data } From 25cd5af07455c34d8a403c581d6907491194942c Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Mon, 16 Dec 2024 07:58:54 +0900 Subject: [PATCH 07/33] Remove debug logging from getClosestMatch function --- src/ts/parser.svelte.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index e6503e8c..71ae8af0 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -453,7 +453,6 @@ function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext closestDist = dist } } - console.log(closestDist, closest) if(closestDist > DBState.db.assetMaxDifference){ return null } From a092d1f4825de9e37874960206e6b484002c6083 Mon Sep 17 00:00:00 2001 From: shirosaki-hana Date: Tue, 17 Dec 2024 11:30:16 +0900 Subject: [PATCH 08/33] =?UTF-8?q?Fixed=20=E2=80=9CSeparate=20Parameters?= =?UTF-8?q?=E2=80=9D=20scaling=20issue?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixed an issue where the “Temperature” parameter was not scaled correctly when using the “Separate Parameters” feature. --- src/ts/process/request.ts | 37 ++++++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index adb27280..3fc63ce4 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -105,11 +105,46 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[], } for(const parameter of parameters){ + + let value = 0 if(parameter === 'top_k' && arg.ignoreTopKIfZero && db.seperateParameters[ModelMode][parameter] === 0){ continue } - let value = db.seperateParameters[ModelMode][parameter] + switch(parameter){ + case 'temperature':{ + value = db.seperateParameters[ModelMode].temperature === -1000 ? -1000 : (db.seperateParameters[ModelMode].temperature / 100) + break + } + case 'top_k':{ + value = db.seperateParameters[ModelMode].top_k + break + } + case 'repetition_penalty':{ + value = db.seperateParameters[ModelMode].repetition_penalty + break + } + case 'min_p':{ + value = db.seperateParameters[ModelMode].min_p + break + } + case 'top_a':{ + value = db.seperateParameters[ModelMode].top_a + break + } + case 'top_p':{ + value = db.seperateParameters[ModelMode].top_p + break + } + case 'frequency_penalty':{ + value = db.seperateParameters[ModelMode].frequency_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].frequency_penalty / 100) + break + } + case 'presence_penalty':{ + value = db.seperateParameters[ModelMode].presence_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].presence_penalty / 100) + break + } + } if(value === -1000 || value === undefined){ continue From 5df9e68baee1799a39e312d6440b234020cd72b0 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Wed, 18 Dec 2024 03:26:02 +0900 Subject: [PATCH 09/33] Add o1 and fix some mistakes --- src/lang/en.ts | 3 + src/lib/Playground/PlaygroundImageGen.svelte | 78 ++++---- .../Playground/PlaygroundImageTrans.svelte | 15 ++ src/lib/Playground/PlaygroundSubtitle.svelte | 172 +++++++++++++++--- src/lib/Setting/Pages/BotSettings.svelte | 2 +- src/lib/Setting/Pages/LanguageSettings.svelte | 2 +- src/ts/model/modellist.ts | 26 ++- src/ts/process/request.ts | 11 +- src/ts/process/transformers.ts | 1 - 9 files changed, 233 insertions(+), 77 deletions(-) create mode 100644 src/lib/Playground/PlaygroundImageTrans.svelte diff --git a/src/lang/en.ts b/src/lang/en.ts index 83a8a3f9..dc720fc9 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -828,4 +828,7 @@ export const languageEnglish = { subtitlesWarning2: "You must use model with streaming feature to use this feature.", reset: "Reset", assetMaxDifference: "Asset Max Difference", + sourceLanguage: "Source Language", + destinationLanguage: "Destination Language", + noWebGPU: "Your Browser or OS doesn't support WebGPU. this will slow down the performance significantly.", } \ No newline at end of file diff --git a/src/lib/Playground/PlaygroundImageGen.svelte b/src/lib/Playground/PlaygroundImageGen.svelte index 38f1e4bd..059256a8 100644 --- a/src/lib/Playground/PlaygroundImageGen.svelte +++ b/src/lib/Playground/PlaygroundImageGen.svelte @@ -1,34 +1,44 @@ -
-
- - Card - -
-
- -
- -

- Card Title -

- - -

- Lorem ipsum dolor sit amet consectetur adipisicing elit. Voluptates rerum quisquam, temporibus quasi distinctio magnam. -

- - -
- - - - - 5 min read - -
-
-
\ No newline at end of file + + +

{language.imageGeneration}

+ +Prompt + + +Neg. Prompt + + +{#if img} + Generated + Generated +{/if} + + \ No newline at end of file diff --git a/src/lib/Playground/PlaygroundImageTrans.svelte b/src/lib/Playground/PlaygroundImageTrans.svelte new file mode 100644 index 00000000..99a8152e --- /dev/null +++ b/src/lib/Playground/PlaygroundImageTrans.svelte @@ -0,0 +1,15 @@ + + + +{language.destinationLanguage} + + +{language.prompt} + diff --git a/src/lib/Playground/PlaygroundSubtitle.svelte b/src/lib/Playground/PlaygroundSubtitle.svelte index e9504d3f..432d3a40 100644 --- a/src/lib/Playground/PlaygroundSubtitle.svelte +++ b/src/lib/Playground/PlaygroundSubtitle.svelte @@ -14,6 +14,7 @@ import SliderInput from "../UI/GUI/SliderInput.svelte"; import SelectInput from "../UI/GUI/SelectInput.svelte"; import OptionInput from "../UI/GUI/OptionInput.svelte"; + import sendSound from '../../etc/send.mp3' @@ -28,6 +29,39 @@ let vttB64 = $state('') let vobj:TranscribeObj[] = $state([]) let mode = $state('llm') + let sourceLang:string|null = $state(null) + + function getLanguageCodes(){ + let languageCodes:{ + code: string + name: string + }[] = [] + + for(let i=0x41;i<=0x5A;i++){ + for(let j=0x41;j<=0x5A;j++){ + languageCodes.push({ + code: String.fromCharCode(i) + String.fromCharCode(j), + name: '' + }) + } + } + + languageCodes = languageCodes.map(v => { + return { + code: v.code, + name: new Intl.DisplayNames([ + DBState.db.language === 'cn' ? 'zh' : DBState.db.language + ], { + type: 'language', + fallback: 'none' + }).of(v.code) + } + }).filter((a) => { + return a.name + }).sort((a, b) => a.name.localeCompare(b.name)) + + return languageCodes + } @@ -120,6 +154,9 @@ vobj = convertTransToObj(latest) outputText = makeWebVtt(vobj) vttB64 = `data:text/vtt;base64,${Buffer.from(outputText).toString('base64')}` + + const audio = new Audio(sendSound); + audio.play(); } async function runWhisperMode() { @@ -133,13 +170,12 @@ const file = files?.[0] + let requestFile:File = null + if(!file){ outputText = '' return } - - const formData = new FormData() - const videos = [ 'mp4', 'webm', 'mkv', 'avi', 'mov' ] @@ -198,28 +234,100 @@ }) outputText = 'Transcribing audio...\n\n' - formData.append('file', file2) + requestFile = file2 } else{ - formData.append('file', file) + requestFile = file } - formData.append('model', 'whisper-1') - formData.append('response_format', 'vtt') + + if(mode === 'whisperLocal'){ + try { + const {pipeline} = await import('@huggingface/transformers') + let stats:{ + [key:string]:{ + name:string + status:string + file:string + progress?:number + } + } = {} + + const device = ('gpu' in navigator) ? 'webgpu' : 'wasm' + + const transcriber = await pipeline( + "automatic-speech-recognition", + "onnx-community/whisper-large-v3-turbo_timestamped", + { + device: device, + progress_callback: (progress) => { + stats[progress.name + progress.file] = progress + outputText = Object.values(stats).map(v => `${v.name}-${v.file}: ${progress.status} ${v.progress ? `[${v.progress.toFixed(2)}%]` : ''}`).join('\n') + }, + dtype: 'q8' + }, + ); + + const audioContext = new AudioContext() + const audioBuffer = await audioContext.decodeAudioData(await requestFile.arrayBuffer()) + const combined = new Float32Array(audioBuffer.getChannelData(0).length) + for(let j = 0; j < audioBuffer.getChannelData(0).length; j++){ + for(let i = 0; i < audioBuffer.numberOfChannels; i++){ + combined[j] += audioBuffer.getChannelData(i)[j] + } + + if(combined[j] > 1){ + combined[j] = 1 + } + if(combined[j] < -1){ + combined[j] = -1 + } + } + + outputText = ('Transcribing... (This may take a while. Do not close the tab.)') + if(device !== 'webgpu'){ + outputText += `\nYour browser or OS do not support WebGPU, so the transcription may be slower.` + } + await sleep(10) + const res1 = await transcriber(combined, { + return_timestamps: true, + language: sourceLang, + }) + const res2 = Array.isArray(res1) ? res1[0] : res1 + const chunks = res2.chunks + + outputText = 'WEBVTT\n\n' + + for(const chunk of chunks){ + outputText += `${chunk.timestamp[0]} --> ${chunk.timestamp[1]}\n${chunk.text}\n\n` + } + + console.log(outputText) + + } catch (error) { + alertError(JSON.stringify(error)) + outputText = '' + return + } + } + else{ + const formData = new FormData() + formData.append('file', requestFile) + formData.append('model', 'whisper-1') + formData.append('response_format', 'vtt') - const d = await fetch('https://api.openai.com/v1/audio/transcriptions', { - method: 'POST', - headers: { - 'Authorization': `Bearer ${DBState.db.openAIKey}` - }, - body: formData + const d = await fetch('https://api.openai.com/v1/audio/transcriptions', { + method: 'POST', + headers: { + 'Authorization': `Bearer ${DBState.db.openAIKey}` + }, + body: formData - }) + }) + outputText = await d.text() + } - const fileBuffer = await file.arrayBuffer() - - outputText = await d.text() const v = await requestChatData({ formated: [{ @@ -254,9 +362,6 @@ outputText = value[firstKey] } - - console.log(outputText) - if(!outputText.trim().endsWith('```')){ outputText = outputText.trim() + '\n```' } @@ -268,12 +373,14 @@ latest = match[3].trim() } + const fileBuffer = await file.arrayBuffer() outputText = latest vttB64 = `data:text/vtt;base64,${Buffer.from(outputText).toString('base64')}` fileB64 = `data:audio/wav;base64,${Buffer.from(fileBuffer).toString('base64')}` vobj = convertWebVTTtoObj(outputText) - + const audio = new Audio(sendSound); + audio.play(); } @@ -355,7 +462,18 @@

{language.subtitles}

-{language.language} +{#if mode === 'whisperLocal'} + {language.sourceLanguage} + + Auto + {#each getLanguageCodes() as lang} + {lang.name} + {/each} + +{/if} + + +{language.destinationLanguage} {language.prompt} @@ -366,27 +484,31 @@ if(mode === 'llm'){ prompt = LLMModePrompt } - if(mode === 'whisper'){ + if(mode === 'whisper' || mode === 'whisperLocal'){ prompt = WhisperModePrompt } }}> LLM Whisper + Whisper Local
-{#if !(modelInfo.flags.includes(LLMFlags.hasAudioInput) && modelInfo.flags.includes(LLMFlags.hasVideoInput))} +{#if !(modelInfo.flags.includes(LLMFlags.hasAudioInput) && modelInfo.flags.includes(LLMFlags.hasVideoInput)) && mode === 'llm'} {language.subtitlesWarning1} {/if} {#if !(modelInfo.flags.includes(LLMFlags.hasStreaming) && DBState.db.useStreaming)} {language.subtitlesWarning2} {/if} +{#if !('gpu' in navigator) && mode === 'whisperLocal'} + {language.noWebGPU} +{/if} {#if !outputText} \ No newline at end of file +{#if gridMode} + +{/if} \ No newline at end of file diff --git a/src/lib/Others/GridCatalog.svelte b/src/lib/Others/GridCatalog.svelte index 01d1be54..a5a18311 100644 --- a/src/lib/Others/GridCatalog.svelte +++ b/src/lib/Others/GridCatalog.svelte @@ -10,13 +10,14 @@ import { language } from "src/lang"; import { parseMultilangString } from "src/ts/util"; import { checkCharOrder } from "src/ts/globalApi.svelte"; + import MobileCharacters from "../Mobile/MobileCharacters.svelte"; interface Props { endGrid?: any; } let { endGrid = () => {} }: Props = $props(); let search = $state('') - let selected = $state(0) + let selected = $state(3) function changeChar(index = -1){ characterFormatUpdate(index) @@ -57,22 +58,19 @@
-

- - Catalog -

- + - -
@@ -143,6 +141,8 @@
{/each} + {:else if selected === 3} + {/if}
\ No newline at end of file diff --git a/src/lib/Setting/Pages/DisplaySettings.svelte b/src/lib/Setting/Pages/DisplaySettings.svelte index 1d7ee9dd..2bdd38f1 100644 --- a/src/lib/Setting/Pages/DisplaySettings.svelte +++ b/src/lib/Setting/Pages/DisplaySettings.svelte @@ -363,6 +363,10 @@ +
+ +
+
{ let hasPermission = {state: 'denied'} diff --git a/src/lib/SideBars/Sidebar.svelte b/src/lib/SideBars/Sidebar.svelte index ac63849f..9d7dcbe6 100644 --- a/src/lib/SideBars/Sidebar.svelte +++ b/src/lib/SideBars/Sidebar.svelte @@ -23,6 +23,7 @@ FolderOpenIcon, HomeIcon, WrenchIcon, + User2Icon, } from "lucide-svelte"; import { addCharacter, @@ -292,7 +293,79 @@ return false } +{#if DBState.db.menuSideBar} +
+ + + + +
+{:else}
+{/if}
Date: Thu, 19 Dec 2024 05:40:52 +0900 Subject: [PATCH 15/33] Improve translation handling by processing text in chunks for better performance --- src/ts/translator/translator.ts | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/ts/translator/translator.ts b/src/ts/translator/translator.ts index b8ce3041..587c4582 100644 --- a/src/ts/translator/translator.ts +++ b/src/ts/translator/translator.ts @@ -317,8 +317,15 @@ export async function translateHTML(html: string, reverse:boolean, charArg:simpl return } - // node.textContent = await translate(node.textContent || '', reverse); - let translated = await translate(node.textContent || "", reverse); + const translateChunks = (node.textContent || '').split(/\n\n+/g); + let translatedChunksPromises: Promise[] = []; + for (const chunk of translateChunks) { + const translatedPromise = translate(chunk, reverse); + translatedChunksPromises.push(translatedPromise); + } + + const translatedChunks = await Promise.all(translatedChunksPromises); + let translated = translatedChunks.join("\n\n"); if (!reprocessDisplayScript) { node.textContent = translated; return; From 73e94658fdd97ea845c4d711b009975eb8fa539d Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:15:48 +0900 Subject: [PATCH 16/33] Add thoughts --- src/ts/model/modellist.ts | 12 ++++++++++- src/ts/parser.svelte.ts | 12 +++++++++++ src/ts/process/index.svelte.ts | 7 +------ src/ts/process/request.ts | 38 +++++++++++++++++++++++++++++----- 4 files changed, 57 insertions(+), 12 deletions(-) diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index bb335228..223dac58 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -16,7 +16,7 @@ export enum LLMFlags{ poolSupported, hasVideoInput, OAICompletionTokens, - DeveloperRole + DeveloperRole, } export enum LLMProvider{ @@ -809,6 +809,16 @@ export const LLMModels: LLMModel[] = [ tokenizer: LLMTokenizer.GoogleCloud, recommended: true }, + { + name: "Gemini Flash 2.0 Thinking 1219", + id: 'gemini-2.0-flash-thinking-exp-1219', + provider: LLMProvider.GoogleCloud, + format: LLMFormat.GoogleCloud, + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming], + parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], + tokenizer: LLMTokenizer.GoogleCloud, + recommended: true + }, { name: "Gemini Pro 1.5", id: 'gemini-1.5-pro-latest', diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index 71ae8af0..2bf650da 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -14,6 +14,7 @@ import { getModuleAssets, getModuleLorebooks } from './process/modules'; import type { OpenAIChat } from './process/index.svelte'; import hljs from 'highlight.js/lib/core' import 'highlight.js/styles/atom-one-dark.min.css' +import { language } from 'src/lang'; const markdownItOptions = { html: true, @@ -495,6 +496,11 @@ export interface simpleCharacterArgument{ triggerscript?: triggerscript[] } +function parseThoughts(data:string){ + return data.replace(/(.+)<\/Thoughts>/gms, (full, txt) => { + return `
${language.cot}${txt}
` + }) +} export async function ParseMarkdown( data:string, @@ -506,18 +512,24 @@ export async function ParseMarkdown( let firstParsed = '' const additionalAssetMode = (mode === 'back') ? 'back' : 'normal' let char = (typeof(charArg) === 'string') ? (findCharacterbyId(charArg)) : (charArg) + if(char && char.type !== 'group'){ data = await parseAdditionalAssets(data, char, additionalAssetMode, 'pre') firstParsed = data } + if(char){ data = (await processScriptFull(char, data, 'editdisplay', chatID, cbsConditions)).data } + if(firstParsed !== data && char && char.type !== 'group'){ data = await parseAdditionalAssets(data, char, additionalAssetMode, 'post') } + data = await parseInlayAssets(data ?? '') + data = parseThoughts(data) + data = encodeStyle(data) if(mode === 'normal'){ data = await renderHighlightableMarkdown(data) diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts index c2edba4d..b10d701d 100644 --- a/src/ts/process/index.svelte.ts +++ b/src/ts/process/index.svelte.ts @@ -752,12 +752,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{ break } } - if(usingPromptTemplate && DBState.db.promptSettings.maxThoughtTagDepth !== -1){ - const depth = ms.length - index - if(depth >= DBState.db.promptSettings.maxThoughtTagDepth){ - formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') - } - } + formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') const chat:OpenAIChat = { role: role, diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 2120535e..e79ed17c 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1665,8 +1665,28 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise const data = JSON.parse(reformatted) let r = '' + let r2 = '' + let bump = false for(const d of data){ - r += d.candidates[0].content.parts[0].text + const parts = d.candidates[0].content?.parts + for(let i=0;i${r}\n\n${r2}` } control.enqueue({ '0': r @@ -1697,11 +1717,14 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise } } - let fullRes = '' - + let r = '' + let r2 = '' const processDataItem = (data:any) => { if(data?.candidates?.[0]?.content?.parts?.[0]?.text){ - fullRes += data.candidates[0].content.parts[0].text + r += data.candidates[0].content.parts[0].text + } + if(data?.candidates?.[0]?.content?.parts?.[1]?.text){ + r2 += data.candidates[0].content.parts[1].text } else if(data?.errors){ return { @@ -1726,9 +1749,14 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise processDataItem(res.data) } + + if(r2){ + r = `${r}\n\n${r2}` + } + return { type: 'success', - result: fullRes + result: r } } From 68873452a0a2e0350a94beae4e12915917045d20 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:16:05 +0900 Subject: [PATCH 17/33] Update version to 143.8.0 --- version.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.json b/version.json index 8b54713a..d42e1c5c 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"143.7.1"} \ No newline at end of file +{"version":"143.8.0"} \ No newline at end of file From bd14dcd0385bbd93eb9f4844a161488862c4ccfa Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:21:00 +0900 Subject: [PATCH 18/33] Update version to 143.8.0 in configuration and database files --- src-tauri/tauri.conf.json | 2 +- src/ts/storage/database.svelte.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 34dedf62..0f33ad84 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -29,7 +29,7 @@ }, "productName": "RisuAI", "mainBinaryName": "RisuAI", - "version": "143.7.0", + "version": "143.8.0", "identifier": "co.aiclient.risu", "plugins": { "updater": { diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index e3a728b5..cd57bdef 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; -export let appVer = "143.7.0" +export let appVer = "143.8.0" export let webAppSubVer = '' From 4afe32a2c26a332b2a322730c513c3ee36aa5959 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:53:58 +0900 Subject: [PATCH 19/33] Add geminiThinking flag and enhance thoughts handling in chat processing --- src/ts/model/modellist.ts | 3 +- src/ts/process/index.svelte.ts | 10 ++++-- src/ts/process/request.ts | 60 +++++++++++++++++++--------------- 3 files changed, 43 insertions(+), 30 deletions(-) diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index 223dac58..f5ce78db 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -17,6 +17,7 @@ export enum LLMFlags{ hasVideoInput, OAICompletionTokens, DeveloperRole, + geminiThinking } export enum LLMProvider{ @@ -814,7 +815,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-2.0-flash-thinking-exp-1219', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.geminiThinking], parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], tokenizer: LLMTokenizer.GoogleCloud, recommended: true diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts index b10d701d..e57f8788 100644 --- a/src/ts/process/index.svelte.ts +++ b/src/ts/process/index.svelte.ts @@ -39,6 +39,7 @@ export interface OpenAIChat{ removable?:boolean attr?:string[] multimodals?: MultiModal[] + thoughts?: string[] } export interface MultiModal{ @@ -752,14 +753,19 @@ export async function sendChat(chatProcessIndex = -1,arg:{ break } } - formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') + let thoughts:string[] = [] + formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, (match, p1) => { + thoughts.push(p1) + return '' + }) const chat:OpenAIChat = { role: role, content: formatedChat, memo: msg.chatId, attr: attr, - multimodals: multimodal + multimodals: multimodal, + thoughts: thoughts } if(chat.multimodals.length === 0){ delete chat.multimodals diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index e79ed17c..6c7587f1 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1458,7 +1458,6 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise role: "USER", parts: geminiParts, }); - } else if (prevChat?.role === qRole) { reformatedChat[reformatedChat.length-1].parts[0].text += '\n' + chat.content continue @@ -1476,6 +1475,16 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise }) } } + else if(chat.role === 'assistant' && arg.modelInfo.flags.includes(LLMFlags.geminiThinking) && chat.thoughts?.length > 0){ + reformatedChat.push({ + role: 'MODEL', + parts: [{ + text: chat.thoughts.join('\n\n') + }, { + text: chat.content + }] + }) + } else if(chat.role === 'assistant' || chat.role === 'user'){ reformatedChat.push({ @@ -1664,32 +1673,25 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise const data = JSON.parse(reformatted) - let r = '' - let r2 = '' - let bump = false + let rDatas:string[] = [''] for(const d of data){ const parts = d.candidates[0].content?.parts for(let i=0;i 0){ + rDatas.push('') } - if(!bump){ - r += part.text - } - else{ - r2 += part.text - } + rDatas[rDatas.length-1] += part.text } } - console.log(data) - if(r2){ - r = `${r}\n\n${r2}` + if(rDatas.length > 1){ + const thought = rDatas.splice(rDatas.length-2, 1)[0] + rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n')}` } control.enqueue({ - '0': r + '0': rDatas[rDatas.length-1], }) } catch (error) { console.log(error) @@ -1717,16 +1719,19 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise } } - let r = '' - let r2 = '' + let rDatas:string[] = [''] const processDataItem = (data:any) => { - if(data?.candidates?.[0]?.content?.parts?.[0]?.text){ - r += data.candidates[0].content.parts[0].text + const parts = data?.candidates?.[0]?.content?.parts + for(let i=0;i 0){ + rDatas.push('') + } + + rDatas[rDatas.length-1] += part.text } - if(data?.candidates?.[0]?.content?.parts?.[1]?.text){ - r2 += data.candidates[0].content.parts[1].text - } - else if(data?.errors){ + + if(data?.errors){ return { type: 'fail', result: `${JSON.stringify(data.errors)}` @@ -1750,13 +1755,14 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise } - if(r2){ - r = `${r}\n\n${r2}` + if(rDatas.length > 1){ + const thought = rDatas.splice(rDatas.length-2, 1)[0] + rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n')}` } return { type: 'success', - result: r + result: rDatas[rDatas.length-1] } } From 37d0870f26ab3fde2f678dd14b24958ddf1287ed Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:54:13 +0900 Subject: [PATCH 20/33] Update version to 143.9.0 in configuration and related files --- src-tauri/tauri.conf.json | 2 +- src/ts/storage/database.svelte.ts | 2 +- version.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 0f33ad84..fd6fc810 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -29,7 +29,7 @@ }, "productName": "RisuAI", "mainBinaryName": "RisuAI", - "version": "143.8.0", + "version": "143.9.0", "identifier": "co.aiclient.risu", "plugins": { "updater": { diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index cd57bdef..6ec13a54 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; -export let appVer = "143.8.0" +export let appVer = "143.9.0" export let webAppSubVer = '' diff --git a/version.json b/version.json index d42e1c5c..4d809053 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"143.8.0"} \ No newline at end of file +{"version":"143.9.0"} \ No newline at end of file From c937a65b1f66111833005d8c44c4f047f661f9a9 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:55:10 +0900 Subject: [PATCH 21/33] Fix formatting of thoughts in request processing to ensure proper line breaks --- src/ts/process/request.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 6c7587f1..c7260155 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1688,7 +1688,7 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise if(rDatas.length > 1){ const thought = rDatas.splice(rDatas.length-2, 1)[0] - rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n')}` + rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n\n')}` } control.enqueue({ '0': rDatas[rDatas.length-1], @@ -1757,7 +1757,7 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise if(rDatas.length > 1){ const thought = rDatas.splice(rDatas.length-2, 1)[0] - rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n')}` + rDatas[rDatas.length-1] = `${thought}\n\n${rDatas.join('\n\n')}` } return { From 01da904c9b9e55e9c68e270c0f35c1a6359e7e21 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 03:14:06 +0900 Subject: [PATCH 22/33] Gemini change thinking --- src/ts/process/request.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index c7260155..d36cc9d9 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1459,7 +1459,9 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise parts: geminiParts, }); } else if (prevChat?.role === qRole) { - reformatedChat[reformatedChat.length-1].parts[0].text += '\n' + chat.content + reformatedChat[reformatedChat.length-1].parts[ + reformatedChat[reformatedChat.length-1].parts.length-1 + ].text += '\n' + chat.content continue } else if(chat.role === 'system'){ @@ -1475,12 +1477,12 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise }) } } - else if(chat.role === 'assistant' && arg.modelInfo.flags.includes(LLMFlags.geminiThinking) && chat.thoughts?.length > 0){ + else if(chat.role === 'assistant' && arg.modelInfo.flags.includes(LLMFlags.geminiThinking)){ reformatedChat.push({ role: 'MODEL', - parts: [{ + parts: [chat.thoughts?.length > 0 ? { text: chat.thoughts.join('\n\n') - }, { + } : null, { text: chat.content }] }) From 280fd313166439f990560c133b07b309aa79e794 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 03:14:35 +0900 Subject: [PATCH 23/33] Update version to 143.9.1 in configuration and related files --- src-tauri/tauri.conf.json | 2 +- src/ts/storage/database.svelte.ts | 2 +- version.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index fd6fc810..0b39f0a6 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -29,7 +29,7 @@ }, "productName": "RisuAI", "mainBinaryName": "RisuAI", - "version": "143.9.0", + "version": "143.9.1", "identifier": "co.aiclient.risu", "plugins": { "updater": { diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index 6ec13a54..7ce18d48 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; -export let appVer = "143.9.0" +export let appVer = "143.9.1" export let webAppSubVer = '' diff --git a/version.json b/version.json index 4d809053..a45d30d0 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"143.9.0"} \ No newline at end of file +{"version":"143.9.1"} \ No newline at end of file From cc4bbdc8e24308e8906d44c99028d0b3e7415767 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 04:31:23 +0900 Subject: [PATCH 24/33] Enhance LLM model flags to include requiresAlternateRole and update request processing to handle multimodal and thoughts data --- src/ts/model/modellist.ts | 34 +++++++++++++++++----------------- src/ts/process/request.ts | 16 ++++++++++++++++ 2 files changed, 33 insertions(+), 17 deletions(-) diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index f5ce78db..30d4425f 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -778,7 +778,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-pro-exp-0827', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -787,7 +787,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-exp-1121', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud, }, @@ -796,7 +796,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-exp-1206', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -805,7 +805,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-2.0-flash-exp', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], tokenizer: LLMTokenizer.GoogleCloud, recommended: true @@ -815,7 +815,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-2.0-flash-thinking-exp-1219', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.geminiThinking], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.geminiThinking, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], tokenizer: LLMTokenizer.GoogleCloud, recommended: true @@ -825,7 +825,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-pro-latest', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], recommended: true, parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud @@ -835,7 +835,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-flash', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], recommended: true, parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud @@ -846,7 +846,7 @@ export const LLMModels: LLMModel[] = [ internalID: 'gemini-exp-1121', provider: LLMProvider.VertexAI, format: LLMFormat.VertexAIGemini, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.Gemma }, @@ -856,7 +856,7 @@ export const LLMModels: LLMModel[] = [ internalID: 'gemini-1.5-pro-latest', provider: LLMProvider.VertexAI, format: LLMFormat.VertexAIGemini, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.Gemma }, @@ -866,7 +866,7 @@ export const LLMModels: LLMModel[] = [ internalID: 'gemini-1.5-flash', provider: LLMProvider.VertexAI, format: LLMFormat.VertexAIGemini, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.Gemma }, @@ -875,7 +875,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-exp-1114', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -884,7 +884,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-pro-002', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -893,7 +893,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-1.5-flash-002', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -902,7 +902,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-pro', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -911,7 +911,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-pro-vision', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -920,7 +920,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-ultra', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, @@ -929,7 +929,7 @@ export const LLMModels: LLMModel[] = [ id: 'gemini-ultra-vision', provider: LLMProvider.GoogleCloud, format: LLMFormat.GoogleCloud, - flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming], + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole], parameters: ['temperature', 'top_k', 'top_p'], tokenizer: LLMTokenizer.GoogleCloud }, diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index d36cc9d9..e618f841 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -282,7 +282,23 @@ function reformater(formated:OpenAIChat[],modelInfo:LLMModel){ } if(newFormated[newFormated.length-1].role === m.role){ + newFormated[newFormated.length-1].content += '\n' + m.content + + if(m.multimodals){ + if(!newFormated[newFormated.length-1].multimodals){ + newFormated[newFormated.length-1].multimodals = [] + } + newFormated[newFormated.length-1].multimodals.push(...m.multimodals) + } + + if(m.thoughts){ + if(!newFormated[newFormated.length-1].thoughts){ + newFormated[newFormated.length-1].thoughts = [] + } + newFormated[newFormated.length-1].thoughts.push(...m.thoughts) + } + continue } else{ From e5e01d068835fc516c10ca12c17e64e4a36c2078 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 18:11:11 +0900 Subject: [PATCH 25/33] Fix error handling in gemini and add dataItem hotkey --- src/lib/UI/PromptDataItem.svelte | 17 ++++++++++++++++- src/ts/process/request.ts | 15 +++++++++------ 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/src/lib/UI/PromptDataItem.svelte b/src/lib/UI/PromptDataItem.svelte index c1b2656d..1c65920c 100644 --- a/src/lib/UI/PromptDataItem.svelte +++ b/src/lib/UI/PromptDataItem.svelte @@ -8,8 +8,9 @@ import CheckInput from "./GUI/CheckInput.svelte"; import { ArrowDown, ArrowUp, XIcon } from "lucide-svelte"; import TextInput from "./GUI/TextInput.svelte"; - import { DBState } from 'src/ts/stores.svelte'; + import { onDestroy, onMount } from "svelte"; + let opened = $state(false) interface Props { promptItem: PromptItem; @@ -98,6 +99,20 @@ } + const EL = (e:KeyboardEvent) => { + if(e.ctrlKey && e.altKey && e.key === 'o'){ + opened = !opened + } + } + + onMount(() => { + document.addEventListener('keydown', EL) + }) + + onDestroy(() => { + document.removeEventListener('keydown', EL) + }) +
{ diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index e618f841..f2a674f8 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1740,13 +1740,16 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise let rDatas:string[] = [''] const processDataItem = (data:any) => { const parts = data?.candidates?.[0]?.content?.parts - for(let i=0;i 0){ - rDatas.push('') - } + if(parts){ + + for(let i=0;i 0){ + rDatas.push('') + } - rDatas[rDatas.length-1] += part.text + rDatas[rDatas.length-1] += part.text + } } if(data?.errors){ From 7cd49fc8c36895923f8eaec500d1ea57355a9304 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 23:45:07 +0900 Subject: [PATCH 26/33] Add V2 plugin --- plugins.md | 159 ++++++++++++++++++++++++++ src-tauri/src/main.rs | 4 +- src-tauri/tauri.conf.json | 2 +- src/lang/cn.ts | 2 +- src/lang/de.ts | 2 +- src/lang/en.ts | 2 +- src/lang/es.ts | 2 +- src/lang/ko.ts | 2 +- src/lang/vi.ts | 2 +- src/lang/zh-Hant.ts | 2 +- src/ts/globalApi.svelte.ts | 57 ++++++++-- src/ts/plugins/plugins.ts | 180 ++++++++++++++++++++++++++++-- src/ts/process/index.svelte.ts | 1 + src/ts/process/request.ts | 28 ++++- src/ts/process/scripts.ts | 10 ++ src/ts/storage/database.svelte.ts | 3 +- version.json | 2 +- 17 files changed, 426 insertions(+), 34 deletions(-) create mode 100644 plugins.md diff --git a/plugins.md b/plugins.md new file mode 100644 index 00000000..f4ad9b72 --- /dev/null +++ b/plugins.md @@ -0,0 +1,159 @@ + +# Plugins + +RisuAI uses a plugin system to allow for easy extension of the functionality. + +## Creating a Plugin + +A plugin is a js file with a header. for example: + +```js +//@name exampleplugin +//display-name: Example Plugin + +// Plugin code here +``` + +## Header Fields + +- `@name ` - The name of the plugin. This is used to identify the plugin. required. +- `@display-name ` - The display name of the plugin. This is used to display the plugin in the UI. +- `@arg ` Argument definition. This is used to define the arguments that the plugin takes. The type can be `int` or `string`. + +## API Reference + + +### `risuFetch(url: string, arg: GlobalFetchArgs = {}): Promise` + +> Note: `nativeFetch` is recommended for fetching URLs with POST request, as it has the same functionality as `risuFetch`, but with a similar API to `fetch` with more predictable behavior. + +Fetches a URL with a native API, which doesn't have CORS restrictions. + +#### Arguments + +- `url: string` - The URL to fetch. +- `arg: GlobalFetchArgs` - The fetch arguments. + - `body: string|Object` - The body to send with the request. if it's an object, it will be converted to JSON. + - `headers: Record` - The headers to send with the request. + - `method: string` - The method to use for the request `GET` and `POST` are supported. Default: `POST`. + - `abortSignal: AbortSignal` - The signal to use for aborting the request. + - `rawResponse: boolean` - If true, the response will be returned as Uint8Array. Default: `false`. + +#### Returns + +- `Promise` - The fetch result. + - `ok: boolean` - If the request was successful. + - `data: any` - The response data which is parsed JSON if possible. if `rawResponse` is true, it will be a Uint8Array. + - `headers: Record` - The response headers. + +### `nativeFetch(url: string, arg: NativeFetchArg = {}): Promise` + +Fetches a URL with the native fetch API, which has CORS restrictions. + +#### Arguments + +- `url: string` - The URL to fetch. +- `arg: NativeFetchArg` - The fetch arguments. + - `body: string|Uint8Array|ArrayBuffer` - The body to send with the request. + - `headers: Record` - The headers to send with the request. + - `method: string` - The method to use for the request. only `POST` is supported. Default: `POST`. + - `signal: AbortSignal` - The signal to use for aborting the request. + +#### Returns + +- `Promise` - The fetch result. + - `body: ReadableStream` - The response body. + - `headers: Headers` - The response headers. + - `status: number` - The response status. + - `json: () => Promise` - A function that returns a promise that resolves to the JSON representation of the response body. + - `text: () => Promise` - A function that returns a promise that resolves to the text representation of the response body. + +### `getArg(name: string): string|number` + +Gets the argument value by name. + +#### Arguments + +- `name: string` - The argument name. must be format of `::` like `exampleplugin::arg1`. + +#### Returns + +- `string|number` - The argument value. + +### `getChar(): character` + +Gets the current character. + +### `setChar(char: character): void` + +Sets the current character. + +### `addProvider(type: string, func: (arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>): void` + +Adds a provider to the plugin. + +#### Arguments + +- `type: string` - The provider name. +- `func: (arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>` - The provider function. + - `arg: PluginV2ProviderArgument` - The provider argument. + - `prompt_chat: Chat[]` - The chat prompt. + - `frequency_penalty?: number` - The frequency penalty. + - `min_p?: number` - The minimum p value. + - `presence_penalty?: number` - The presence penalty. + - `repetition_penalty?: number` - The repetition penalty. + - `top_k?: number` - The top k value. + - `top_p?: number` - The top p value. + - `temperature?: number` - The temperature value. + - `mode: string` - The mode. one of `model`, `submodel`, `memory`, `emotion`, `otherAx`, `translate` + - `Promise<{success:boolean,content:string}>` - The provider result. + - `success: boolean` - If the provider was successful. + - `content: string` - The provider content. + +### `addRisuScriptHandler(type: string, func: (content:string) => string|null|undefined|Promise): void` + +Adds a risu script handler to the plugin. + +#### Arguments + +- `type: string` - The handler type. one of `display`, `output`, `input`, `process` +- `func: (content:string) => string|null|undefined|Promise` - The handler function. + - `content: string` - The content to handle. + - `string|null|undefined|Promise` - The handler result. if it is a string or string promise, the data will be replaced with the result. + +### `removeRisuScriptHandler(type: string, func: (content:string) => string|null|undefined|Promise): void` + +Removes a risu script handler from the plugin. + +### `addRisuReplacer(type: string, func: ReplacerFunction): void` + +Adds a risu replacer to the plugin. + +#### Arguments + +- `type: string` - The replacer type. one of `beforeRequest`, `afterRequest`. +- `func: ReplacerFunction` - The replacer function. vary depending on the type. + - If the type is `afterRequest`, the function should be `(content: string) => string`. + - If the type is `beforeRequest`, the function should be `(content: Chat[]) => Chat[]`. + +### `removeRisuReplacer(type: string, func: ReplacerFunction): void` + +Removes a risu replacer from the plugin. + +### `onUnload(func: () => void): void` + +Adds an unload handler to the plugin. + + +## Migration from Plugin V1 + +The plugin system has been updated to V2. The following changes have been made: + - Now runs in same context as the main script rather than in a sandbox, making it accessible to the main script and DOM. + - Added `nativeFetch`, `addRisuScriptHandler`, `removeRisuScriptHandler`, `addRisuReplacer`, `removeRisuReplacer`, `onUnload` functions. + - `method`, `abortSignal`, `rawResponse` arguments has been added to `risuFetch`. + - `min_p`, `top_k`, `top_p`, `mode` arguments has been added to `addProvider`. + - `bias` argument has been removed from `addProvider`. however for compatibility, it still calls with empty array. + - Now plugin doesn't automatically terminates itself. you have to manually unload the plugin using `onUnload` function. + - `addCharaJs` function has been removed. use `addRisuScriptHandler` instead. + - Many security restrictions have been removed. + - `@risu-name`, `@risu-display-name`, `@risu-arg` headers has been removed. use `@name`, `@display-name`, `@arg` instead. if it's not present, it will be ran as V1 plugin. \ No newline at end of file diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 291a28c6..652510aa 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -357,12 +357,14 @@ async fn streamed_fetch( return format!(r#"{{"success":false,"body":"Invalid header JSON"}}"#); } + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + let client = reqwest::Client::new(); let response = client .post(&url) .headers(headers) .timeout(Duration::from_secs(240)) - .body(body) + .body(body_decoded) .send() .await; diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 0b39f0a6..e84f2f19 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -29,7 +29,7 @@ }, "productName": "RisuAI", "mainBinaryName": "RisuAI", - "version": "143.9.1", + "version": "144.0.0", "identifier": "co.aiclient.risu", "plugins": { "updater": { diff --git a/src/lang/cn.ts b/src/lang/cn.ts index f24a5f20..008ddce3 100644 --- a/src/lang/cn.ts +++ b/src/lang/cn.ts @@ -298,7 +298,7 @@ export const languageChinese = { "singleView": "单角色模式", "SpacedView": "多角色模式", "emphasizedView": "双角色模式", - "pluginWarn": "插件可在隔离环境中运行,但安装恶意插件可能导致问题。", + "pluginWarn": "但安装恶意插件可能导致问题。", "createGroupImg": "产生群组头像", "waifuWidth": "角色对话框宽度", "savebackup": "备份至 Google", diff --git a/src/lang/de.ts b/src/lang/de.ts index 8cf4e451..8d0d8bc6 100644 --- a/src/lang/de.ts +++ b/src/lang/de.ts @@ -210,7 +210,7 @@ export const languageGerman = { singleView: "Einzelansicht", SpacedView: "Mehrere Charakteransicht", emphasizedView: "Doppelte Charakteransicht", - pluginWarn: "Plugins werden in einer isolierten Umgebung ausgeführten, aber das Installieren von Plugins unbekannter Herkunft könnte Probleme verursachen oder sogar schädlichen Code enthalten", + pluginWarn: "Installieren von Plugins unbekannter Herkunft könnte Probleme verursachen oder sogar schädlichen Code enthalten", createGroupImg: "Gruppenicon generieren", waifuWidth: "Breite des Waifu Chat-Bereichs", savebackup: "Erstellen und laden Sie ein Backup auf Google hoch", diff --git a/src/lang/en.ts b/src/lang/en.ts index 6e621293..e51b5b18 100644 --- a/src/lang/en.ts +++ b/src/lang/en.ts @@ -341,7 +341,7 @@ export const languageEnglish = { singleView: "Single View", SpacedView: "Multiple Character View", emphasizedView: "Double Character View", - pluginWarn: "Plugins run in an isolated environment, but installing malicious plugins can cause problems.", + pluginWarn: "Installing malicious plugins can cause problems.", createGroupImg: "Generate group icon", waifuWidth: "Waifu Chat Width", savebackup: "Save Backup to google", diff --git a/src/lang/es.ts b/src/lang/es.ts index 5d351e62..9d36fbcf 100644 --- a/src/lang/es.ts +++ b/src/lang/es.ts @@ -260,7 +260,7 @@ export const languageSpanish = { singleView: "Vista Única", SpacedView: "Vista de Múltiples Personajes", emphasizedView: "Vista de Personajes Doble", - pluginWarn: "Los plugins se ejecutan en un entorno aislado, pero instalar plugins maliciosos puede causar problemas.", + pluginWarn: "Instalar plugins maliciosos puede causar problemas.", createGroupImg: "Generar icono de grupo", waifuWidth: "Ancho del Chat Waifu", savebackup: "Guardar Respaldo en Google", diff --git a/src/lang/ko.ts b/src/lang/ko.ts index a9b055d6..cb8ae195 100644 --- a/src/lang/ko.ts +++ b/src/lang/ko.ts @@ -293,7 +293,7 @@ export const languageKorean = { "singleView": "싱글", "SpacedView": "멀티플", "emphasizedView": "더블", - "pluginWarn": "플러그인은 기본적으로 분리된 환경에서 실행되지만, 악성 플러그인 설치 시 문제가 생길 수 있습니다.", + "pluginWarn": "악성 플러그인 설치 시 문제가 생길 수 있습니다.", "createGroupImg": "그룹 아이콘 자동생성", "waifuWidth": "Waifu 채팅창 넓이", "savebackup": "구글 백업 저장", diff --git a/src/lang/vi.ts b/src/lang/vi.ts index 0b1bb5cf..6d7133a9 100644 --- a/src/lang/vi.ts +++ b/src/lang/vi.ts @@ -181,7 +181,7 @@ export const LanguageVietnamese = { "singleView": "Chế độ xem đơn", "SpacedView": "Xem nhiều ký tự", "emphasizedView": "Chế độ xem nhân vật đôi", - "pluginWarn": "Các plugin chạy trong môi trường biệt lập nhưng việc cài đặt các plugin độc hại có thể gây ra sự cố.", + "pluginWarn": "Các plugin có thể gây ra sự cố khi cài đặt các plugin độc hại.", "createGroupImg": "Tạo biểu tượng nhóm", "waifuWidth": "Chiều rộng trò chuyện Waifu", "savebackup": "Lưu Sao lưu vào google", diff --git a/src/lang/zh-Hant.ts b/src/lang/zh-Hant.ts index dd4811bb..5d074e7d 100644 --- a/src/lang/zh-Hant.ts +++ b/src/lang/zh-Hant.ts @@ -300,7 +300,7 @@ export const languageChineseTraditional = { "singleView": "單角色模式", "SpacedView": "多角色模式", "emphasizedView": "雙角色模式", - "pluginWarn": "外掛程式可在隔離環境中運行,但安裝惡意外掛可能導致問題。", + "pluginWarn": "但安裝惡意外掛可能導致問題。", "createGroupImg": "產生群組頭像", "waifuWidth": "角色對話框寬度", "savebackup": "備份至 Google", diff --git a/src/ts/globalApi.svelte.ts b/src/ts/globalApi.svelte.ts index 1e971a5e..d2ed95bb 100644 --- a/src/ts/globalApi.svelte.ts +++ b/src/ts/globalApi.svelte.ts @@ -1809,18 +1809,53 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream; headers: Headers; status: number }> { +}):Promise<{ + body: ReadableStream; + headers: Headers; + status: number; + json: () => Promise; + text: () => Promise; +}> { + + const jsonizer = (body:ReadableStream) => { + return async () => { + const text = await textifyReadableStream(body) + return JSON.parse(text) + } + } + const textizer = (body:ReadableStream) => { + return async () => { + const text = await textifyReadableStream(body) + return text + } + } + let headers = arg.headers ?? {} + let realBody:Uint8Array + + if(typeof arg.body === 'string'){ + realBody = new TextEncoder().encode(arg.body) + } + else if(arg.body instanceof Uint8Array){ + realBody = arg.body + } + else if(arg.body instanceof ArrayBuffer){ + realBody = new Uint8Array(arg.body) + } + else{ + throw new Error('Invalid body type') + } + const db = getDatabase() let throughProxy = (!isTauri) && (!isNodeServer) && (!db.usePlainFetch) let fetchLogIndex = addFetchLog({ - body: arg.body, + body: new TextDecoder().decode(realBody), headers: arg.headers, response: 'Streamed Fetch', success: true, @@ -1849,7 +1884,7 @@ export async function fetchNative(url:string, arg:{ id: fetchId, url: url, headers: JSON.stringify(headers), - body: arg.body, + body: Buffer.from(realBody).toString('base64'), }).then((res) => { try { const parsedRes = JSON.parse(res as string) @@ -1868,7 +1903,7 @@ export async function fetchNative(url:string, arg:{ id: fetchId, url: url, headers: headers, - body: Buffer.from(arg.body).toString('base64'), + body: Buffer.from(realBody).toString('base64'), }).then((res) => { if(!res.success){ error = res.error @@ -1918,14 +1953,16 @@ export async function fetchNative(url:string, arg:{ return { body: readableStream, headers: new Headers(resHeaders), - status: status + status: status, + json: jsonizer(readableStream), + text: textizer(readableStream) } } else if(throughProxy){ const r = await fetch(hubURL + `/proxy2`, { - body: arg.body, + body: realBody, headers: arg.useRisuTk ? { "risu-header": encodeURIComponent(JSON.stringify(headers)), "risu-url": encodeURIComponent(url), @@ -1943,12 +1980,14 @@ export async function fetchNative(url:string, arg:{ return { body: pipeFetchLog(fetchLogIndex, r.body), headers: r.headers, - status: r.status + status: r.status, + json: jsonizer(r.body), + text: textizer(r.body) } } else{ return await fetch(url, { - body: arg.body, + body: realBody, headers: headers, method: arg.method, signal: arg.signal diff --git a/src/ts/plugins/plugins.ts b/src/ts/plugins/plugins.ts index 07d4e1a0..5553c87d 100644 --- a/src/ts/plugins/plugins.ts +++ b/src/ts/plugins/plugins.ts @@ -1,21 +1,16 @@ import { get, writable } from "svelte/store"; import { language } from "../../lang"; import { alertError } from "../alert"; -import { getDatabase, setDatabaseLite } from "../storage/database.svelte"; +import { getCurrentCharacter, getDatabase, setDatabaseLite } from "../storage/database.svelte"; import { checkNullish, selectSingleFile, sleep } from "../util"; import type { OpenAIChat } from "../process/index.svelte"; -import { globalFetch } from "../globalApi.svelte"; +import { fetchNative, globalFetch } from "../globalApi.svelte"; import { selectedCharID } from "../stores.svelte"; import { addAdditionalCharaJS } from "./embedscript"; +import type { ScriptMode } from "../process/scripts"; export const customProviderStore = writable([] as string[]) -interface PluginRequest{ - url: string - header?:{[key:string]:string} - body: any, - res: string -} interface ProviderPlugin{ name:string @@ -23,6 +18,7 @@ interface ProviderPlugin{ script:string arguments:{[key:string]:'int'|'string'|string[]} realArg:{[key:string]:number|string} + version?:1|2 } export type RisuPlugin = ProviderPlugin @@ -37,6 +33,7 @@ export async function importPlugin(){ const jsFile = Buffer.from(f.data).toString('utf-8').replace(/^\uFEFF/gm, ""); const splitedJs = jsFile.split('\n') let name = '' + let version:1|2 = 1 let displayName:string = undefined let arg:{[key:string]:'int'|'string'|string[]} = {} let realArg:{[key:string]:number|string} = {} @@ -49,15 +46,32 @@ export async function importPlugin(){ } name = provied.trim() } + if(line.startsWith('//@name')){ + const provied = line.slice(7) + if(provied === ''){ + alertError('plugin name must be longer than "", did you put it correctly?') + return + } + version = 2 + name = provied.trim() + } if(line.startsWith('//@risu-display-name')){ const provied = line.slice('//@risu-display-name'.length + 1) if(provied === ''){ alertError('plugin display name must be longer than "", did you put it correctly?') return } - name = provied.trim() + displayName = provied.trim() } - if(line.startsWith('//@risu-arg')){ + if(line.startsWith('//@display-name')){ + const provied = line.slice('//@display-name'.length + 1) + if(provied === ''){ + alertError('plugin display name must be longer than "", did you put it correctly?') + return + } + displayName = provied.trim() + } + if(line.startsWith('//@risu-arg') || line.startsWith('//@arg')){ const provied = line.trim().split(' ') if(provied.length < 3){ alertError('plugin argument is incorrect, did you put space in argument name?') @@ -90,7 +104,8 @@ export async function importPlugin(){ script: jsFile, realArg: realArg, arguments: arg, - displayName: displayName + displayName: displayName, + version: version } db.plugins ??= [] @@ -124,11 +139,18 @@ let pluginTranslator = false export async function loadPlugins() { let db = getDatabase() + if(pluginWorker){ pluginWorker.terminate() pluginWorker = null } - if(db.plugins.length > 0){ + + const plugins = safeStructuredClone(db.plugins).filter((a:RisuPlugin) => a.version === 1) + const pluginV2 = safeStructuredClone(db.plugins).filter((a:RisuPlugin) => a.version === 2) + + await loadV2Plugin(pluginV2) + + if(plugins.length > 0){ const da = await fetch("/pluginApi.js") const pluginApiString = await da.text() @@ -267,6 +289,140 @@ export async function loadPlugins() { } } +type PluginV2ProviderArgument = { + prompt_chat: OpenAIChat[], + frequency_penalty: number + min_p: number + presence_penalty: number + repetition_penalty: number + top_k: number + top_p: number + temperature: number + mode: string +} + +type EditFunction = (content:string) => string|null|undefined|Promise +type ReplacerFunction = (content:OpenAIChat[], type:string) => OpenAIChat[]|Promise + +export const pluginV2 = { + providers: new Map Promise<{success:boolean,content:string}> >(), + editdisplay: new Set(), + editoutput: new Set(), + editprocess: new Set(), + editinput: new Set(), + replacerbeforeRequest: new Set(), + replacerafterRequest: new Set<(content:string, type:string) => string|Promise>(), + unload: new Set<() => void|Promise>(), + loaded: false +} + +export async function loadV2Plugin(plugins:RisuPlugin[]){ + + if(pluginV2.loaded){ + for(const unload of pluginV2.unload){ + await unload() + } + + pluginV2.providers.clear() + pluginV2.editdisplay.clear() + pluginV2.editoutput.clear() + pluginV2.editprocess.clear() + pluginV2.editinput.clear() + } + + pluginV2.loaded = true + + globalThis.__pluginApis__ = { + risuFetch: globalFetch, + nativeFetch: fetchNative, + getArg: (arg:string) => { + const [name, realArg] = arg.split('::') + for(const plug of plugins){ + if(plug.name === name){ + return plug.realArg[realArg] + } + } + }, + getChar: () => { + return getCurrentCharacter() + }, + setChar: (char:any) => { + const db = getDatabase() + const charid = get(selectedCharID) + db.characters[charid] = char + setDatabaseLite(db) + }, + addProvider: (name:string, func:(arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>) => { + let provs = get(customProviderStore) + provs.push(name) + pluginV2.providers.set(name, func) + customProviderStore.set(provs) + }, + addRisuScriptHandler: (name:ScriptMode, func:EditFunction) => { + if(pluginV2['edit' + name]){ + pluginV2['edit' + name].add(func) + } + else{ + throw (`script handler named ${name} not found`) + } + }, + removeRisuScriptHandler: (name:ScriptMode, func:EditFunction) => { + if(pluginV2['edit' + name]){ + pluginV2['edit' + name].delete(func) + } + else{ + throw (`script handler named ${name} not found`) + } + }, + addRisuReplacer: (name:string, func:ReplacerFunction) => { + if(pluginV2['replacer' + name]){ + pluginV2['replacer' + name].add(func) + } + else{ + throw (`replacer handler named ${name} not found`) + } + }, + removeRisuReplacer: (name:string, func:ReplacerFunction) => { + if(pluginV2['replacer' + name]){ + pluginV2['replacer' + name].delete(func) + } + else{ + throw (`replacer handler named ${name} not found`) + } + }, + onUnload: (func:() => void|Promise) => { + pluginV2.unload.add(func) + } + } + + for(const plugin of plugins){ + const data = plugin.script + + const realScript = `(async () => { + const risuFetch = globalThis.__pluginApis__.risuFetch + const nativeFetch = globalThis.__pluginApis__.nativeFetch + const getArg = globalThis.__pluginApis__.getArg + const printLog = globalThis.__pluginApis__.printLog + const getChar = globalThis.__pluginApis__.getChar + const setChar = globalThis.__pluginApis__.setChar + const addProvider = globalThis.__pluginApis__.addProvider + const addRisuEventHandler = globalThis.__pluginApis__.addRisuEventHandler + const onUnload = globalThis.__pluginApis__.onUnload + + ${data} + })();` + + try { + eval(realScript) + } catch (error) { + console.error(error) + } + + console.log('Loaded V2 Plugin', plugin.name) + + } +} + export async function translatorPlugin(text:string, from:string, to:string) { if(!pluginTranslator){ return false diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts index e57f8788..f9c108c7 100644 --- a/src/ts/process/index.svelte.ts +++ b/src/ts/process/index.svelte.ts @@ -30,6 +30,7 @@ import { hypaMemoryV2 } from "./memory/hypav2"; import { runLuaEditTrigger } from "./lua"; import { parseChatML } from "../parser.svelte"; import { getModelInfo, LLMFlags } from "../model/modellist"; +import { pluginV2 } from "../plugins/plugins"; export interface OpenAIChat{ role: 'system'|'user'|'assistant'|'function' diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index f2a674f8..3aa9d242 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1,6 +1,6 @@ import type { MultiModal, OpenAIChat, OpenAIChatFull } from "./index.svelte"; import { getCurrentCharacter, getDatabase, setDatabase, type character } from "../storage/database.svelte"; -import { pluginProcess } from "../plugins/plugins"; +import { pluginProcess, pluginV2 } from "../plugins/plugins"; import { language } from "../../lang"; import { stringlizeAINChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize"; import { addFetchLog, fetchNative, globalFetch, isNodeServer, isTauri, textifyReadableStream } from "../globalApi.svelte"; @@ -209,7 +209,22 @@ export async function requestChatData(arg:requestDataArgument, model:ModelModeEx const db = getDatabase() let trys = 0 while(true){ + + if(pluginV2.replacerbeforeRequest.size > 0){ + for(const replacer of pluginV2.replacerbeforeRequest){ + arg.formated = await replacer(arg.formated, model) + } + } + const da = await requestChatDataMain(arg, model, abortSignal) + + if(da.type === 'success' && pluginV2.replacerafterRequest.size > 0){ + for(const replacer of pluginV2.replacerafterRequest){ + da.result = await replacer(da.result, model) + } + } + + if(da.type !== 'fail' || da.noRetry){ return da } @@ -1379,7 +1394,15 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise 0){ + for(const plugin of pluginV2[mode]){ + const res = await plugin(data) + if(res !== null && res !== undefined){ + data = res + } + } + } + if(scripts.length === 0){ cacheScript(scripts, originalData, data, mode) return {data, emoChanged} diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index 7ce18d48..7773595f 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; -export let appVer = "143.9.1" +export let appVer = "144.0.0" export let webAppSubVer = '' @@ -857,6 +857,7 @@ export interface Database{ geminiStream?:boolean assetMaxDifference:number menuSideBar:boolean + pluginV2: RisuPlugin[] } interface SeparateParameters{ diff --git a/version.json b/version.json index a45d30d0..ecc9e234 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"143.9.1"} \ No newline at end of file +{"version":"144.0.0"} \ No newline at end of file From 6860382bf14138f2c978fc99a376d9cae0117381 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 23:49:52 +0900 Subject: [PATCH 27/33] Update plugin documentation to include mode parameter for replacer functions --- plugins.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins.md b/plugins.md index f4ad9b72..18609b9b 100644 --- a/plugins.md +++ b/plugins.md @@ -133,8 +133,9 @@ Adds a risu replacer to the plugin. - `type: string` - The replacer type. one of `beforeRequest`, `afterRequest`. - `func: ReplacerFunction` - The replacer function. vary depending on the type. - - If the type is `afterRequest`, the function should be `(content: string) => string`. - - If the type is `beforeRequest`, the function should be `(content: Chat[]) => Chat[]`. + - If the type is `afterRequest`, the function should be `(content: string, mode:string) => string`. + - If the type is `beforeRequest`, the function should be `(content: Chat[], mode:string) => Chat[]`. + - mode is one of `model`, `submodel`, `memory`, `emotion`, `otherAx`, `translate`. ### `removeRisuReplacer(type: string, func: ReplacerFunction): void` From 45e8ca5ed6f1598c81f4f80c71a9438b4cb423e0 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sat, 21 Dec 2024 01:15:40 +0900 Subject: [PATCH 28/33] Update documentation for nativeFetch to clarify CORS restrictions and API design --- plugins.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins.md b/plugins.md index 18609b9b..4f0edd52 100644 --- a/plugins.md +++ b/plugins.md @@ -48,7 +48,7 @@ Fetches a URL with a native API, which doesn't have CORS restrictions. ### `nativeFetch(url: string, arg: NativeFetchArg = {}): Promise` -Fetches a URL with the native fetch API, which has CORS restrictions. +Fetches a URL with the native API, which doesn't have CORS restrictions. this API is designed as a subset of [fetch api](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) with a few differences. #### Arguments From 9d0402b7f5a9a6315441208a675751bfa1192370 Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sat, 21 Dec 2024 01:26:01 +0900 Subject: [PATCH 29/33] Add support for multiple HTTP methods in streamed_fetch and fetchNative --- src-tauri/src/main.rs | 44 +++++++++++++++++++++++++++++++++++--- src/ts/globalApi.svelte.ts | 14 ++++++++---- 2 files changed, 51 insertions(+), 7 deletions(-) diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index 652510aa..323a3cf0 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -333,6 +333,7 @@ async fn streamed_fetch( headers: String, body: String, app: AppHandle, + method: String, ) -> String { //parse headers let headers_json: Value = match serde_json::from_str(&headers) { @@ -357,14 +358,51 @@ async fn streamed_fetch( return format!(r#"{{"success":false,"body":"Invalid header JSON"}}"#); } - let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); - let client = reqwest::Client::new(); - let response = client + let builder: reqwest::RequestBuilder; + if method == "POST" { + + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + + builder = client .post(&url) .headers(headers) .timeout(Duration::from_secs(240)) .body(body_decoded) + } + else if method == "GET" { + builder = client + .get(&url) + .headers(headers) + .timeout(Duration::from_secs(240)); + } + else if method == "PUT" { + + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + + builder = client + .put(&url) + .headers(headers) + .timeout(Duration::from_secs(240)) + .body(body_decoded) + } + else if method == "DELETE" { + + let body_decoded = general_purpose::STANDARD.decode(body.as_bytes()).unwrap(); + + builder = client + .delete(&url) + .headers(headers) + .timeout(Duration::from_secs(240)) + .body(body_decoded) + } + else { + return format!(r#"{{"success":false, body:"Invalid method"}}"#); + } + + + + let response = builder .send() .await; diff --git a/src/ts/globalApi.svelte.ts b/src/ts/globalApi.svelte.ts index d2ed95bb..6c7dbe45 100644 --- a/src/ts/globalApi.svelte.ts +++ b/src/ts/globalApi.svelte.ts @@ -1811,7 +1811,7 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream { try { const parsedRes = JSON.parse(res as string) @@ -1973,7 +1979,7 @@ export async function fetchNative(url:string, arg:{ "risu-url": encodeURIComponent(url), "Content-Type": "application/json" }, - method: "POST", + method: arg.method, signal: arg.signal }) @@ -1990,7 +1996,7 @@ export async function fetchNative(url:string, arg:{ body: realBody, headers: headers, method: arg.method, - signal: arg.signal + signal: arg.signal, }) } } From 219042f8ad2d8eb98ceacb10d28b0d3d945aa61a Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sat, 21 Dec 2024 01:27:03 +0900 Subject: [PATCH 30/33] Update plugin documentation to clarify nativeFetch method support and usage --- plugins.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins.md b/plugins.md index 4f0edd52..83bb3965 100644 --- a/plugins.md +++ b/plugins.md @@ -25,7 +25,7 @@ A plugin is a js file with a header. for example: ### `risuFetch(url: string, arg: GlobalFetchArgs = {}): Promise` -> Note: `nativeFetch` is recommended for fetching URLs with POST request, as it has the same functionality as `risuFetch`, but with a similar API to `fetch` with more predictable behavior. +> Note: `nativeFetch` is recommended for fetching URLs, as it has the same functionality as `risuFetch`, but with a similar API to `fetch` with more predictable behavior. Fetches a URL with a native API, which doesn't have CORS restrictions. @@ -48,7 +48,7 @@ Fetches a URL with a native API, which doesn't have CORS restrictions. ### `nativeFetch(url: string, arg: NativeFetchArg = {}): Promise` -Fetches a URL with the native API, which doesn't have CORS restrictions. this API is designed as a subset of [fetch api](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) with a few differences. +Fetches a URL with the native API, which doesn't have CORS restrictions. this API is designed as a subset of [fetch api](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API), except it doesn't have CORS restrictions and default method is `POST`. #### Arguments @@ -56,7 +56,7 @@ Fetches a URL with the native API, which doesn't have CORS restrictions. this AP - `arg: NativeFetchArg` - The fetch arguments. - `body: string|Uint8Array|ArrayBuffer` - The body to send with the request. - `headers: Record` - The headers to send with the request. - - `method: string` - The method to use for the request. only `POST` is supported. Default: `POST`. + - `method: string` - The method to use for the request. `GET`, `POST`, `PUT`, `DELETE` are supported. Default: `POST`. - `signal: AbortSignal` - The signal to use for aborting the request. #### Returns From 709f45fa590356a8ef65868ebbe55c3156844a5a Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sat, 21 Dec 2024 01:29:45 +0900 Subject: [PATCH 31/33] Add arrayBuffer support to fetchNative and update documentation --- plugins.md | 1 + src/ts/globalApi.svelte.ts | 28 ++++++++++++++++++++++++++-- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/plugins.md b/plugins.md index 83bb3965..2034382d 100644 --- a/plugins.md +++ b/plugins.md @@ -67,6 +67,7 @@ Fetches a URL with the native API, which doesn't have CORS restrictions. this AP - `status: number` - The response status. - `json: () => Promise` - A function that returns a promise that resolves to the JSON representation of the response body. - `text: () => Promise` - A function that returns a promise that resolves to the text representation of the response body. + - `arrayBuffer: () => Promise` - A function that returns a promise that resolves to the ArrayBuffer representation of the response body. ### `getArg(name: string): string|number` diff --git a/src/ts/globalApi.svelte.ts b/src/ts/globalApi.svelte.ts index 6c7dbe45..225db6c9 100644 --- a/src/ts/globalApi.svelte.ts +++ b/src/ts/globalApi.svelte.ts @@ -1821,6 +1821,7 @@ export async function fetchNative(url:string, arg:{ status: number; json: () => Promise; text: () => Promise; + arrayBuffer: () => Promise; }> { const jsonizer = (body:ReadableStream) => { @@ -1835,6 +1836,27 @@ export async function fetchNative(url:string, arg:{ return text } } + const arrayBufferizer = (body:ReadableStream) => { + return async () => { + const chunks:Uint8Array[] = [] + const reader = body.getReader() + while(true){ + const {done, value} = await reader.read() + if(done){ + break + } + chunks.push(value) + } + const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0) + const arrayBuffer = new Uint8Array(totalLength) + let offset = 0 + for(const chunk of chunks){ + arrayBuffer.set(chunk, offset) + offset += chunk.length + } + return arrayBuffer.buffer + } + } arg.method = arg.method ?? 'POST' @@ -1961,7 +1983,8 @@ export async function fetchNative(url:string, arg:{ headers: new Headers(resHeaders), status: status, json: jsonizer(readableStream), - text: textizer(readableStream) + text: textizer(readableStream), + arrayBuffer: arrayBufferizer(readableStream) } @@ -1988,7 +2011,8 @@ export async function fetchNative(url:string, arg:{ headers: r.headers, status: r.status, json: jsonizer(r.body), - text: textizer(r.body) + text: textizer(r.body), + arrayBuffer: arrayBufferizer(r.body) } } else{ From e1f5a8ef089b03c21eceb2ba328742aa875c022a Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sat, 21 Dec 2024 01:29:57 +0900 Subject: [PATCH 32/33] Bump version to 144.1.0 in configuration and related files --- src-tauri/tauri.conf.json | 2 +- src/ts/storage/database.svelte.ts | 2 +- version.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index e84f2f19..0532b386 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -29,7 +29,7 @@ }, "productName": "RisuAI", "mainBinaryName": "RisuAI", - "version": "144.0.0", + "version": "144.1.0", "identifier": "co.aiclient.risu", "plugins": { "updater": { diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts index 7773595f..5249b235 100644 --- a/src/ts/storage/database.svelte.ts +++ b/src/ts/storage/database.svelte.ts @@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; -export let appVer = "144.0.0" +export let appVer = "144.1.0" export let webAppSubVer = '' diff --git a/version.json b/version.json index ecc9e234..2b666a0d 100644 --- a/version.json +++ b/version.json @@ -1 +1 @@ -{"version":"144.0.0"} \ No newline at end of file +{"version":"144.1.0"} \ No newline at end of file From bd7b58a477337197e48d84020518d11da023609e Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Sat, 21 Dec 2024 01:32:54 +0900 Subject: [PATCH 33/33] Remove risuLog function and recommend using console.log instead --- plugins.md | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins.md b/plugins.md index 2034382d..9e194888 100644 --- a/plugins.md +++ b/plugins.md @@ -157,5 +157,6 @@ The plugin system has been updated to V2. The following changes have been made: - `bias` argument has been removed from `addProvider`. however for compatibility, it still calls with empty array. - Now plugin doesn't automatically terminates itself. you have to manually unload the plugin using `onUnload` function. - `addCharaJs` function has been removed. use `addRisuScriptHandler` instead. + - `risuLog` function has been removed. use `console.log` instead. - Many security restrictions have been removed. - `@risu-name`, `@risu-display-name`, `@risu-arg` headers has been removed. use `@name`, `@display-name`, `@arg` instead. if it's not present, it will be ran as V1 plugin. \ No newline at end of file