Merge branch 'kwaroran:main' into main
This commit is contained in:
2
.github/workflows/github-actions-builder.yml
vendored
2
.github/workflows/github-actions-builder.yml
vendored
@@ -70,7 +70,7 @@ jobs:
|
|||||||
run: rustup target add x86_64-apple-darwin
|
run: rustup target add x86_64-apple-darwin
|
||||||
- if: matrix.settings.platform == 'macos-latest'
|
- if: matrix.settings.platform == 'macos-latest'
|
||||||
run: rustup target add aarch64-apple-darwin
|
run: rustup target add aarch64-apple-darwin
|
||||||
- uses: tauri-apps/tauri-action@v0
|
- uses: tauri-apps/tauri-action@v0.5.16
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||||
|
|||||||
@@ -29,7 +29,7 @@
|
|||||||
},
|
},
|
||||||
"productName": "RisuAI",
|
"productName": "RisuAI",
|
||||||
"mainBinaryName": "RisuAI",
|
"mainBinaryName": "RisuAI",
|
||||||
"version": "143.0.1",
|
"version": "143.5.0",
|
||||||
"identifier": "co.aiclient.risu",
|
"identifier": "co.aiclient.risu",
|
||||||
"plugins": {
|
"plugins": {
|
||||||
"updater": {
|
"updater": {
|
||||||
|
|||||||
@@ -460,7 +460,7 @@ export const languageEnglish = {
|
|||||||
loadDataFromAccount: "Load Data from Account",
|
loadDataFromAccount: "Load Data from Account",
|
||||||
saveCurrentDataToAccount: "Save Current Data to Account",
|
saveCurrentDataToAccount: "Save Current Data to Account",
|
||||||
chatAssumed: "",
|
chatAssumed: "",
|
||||||
proxyAPIKey: "Proxy Key/Password",
|
proxyAPIKey: "Key/Password",
|
||||||
proxyRequestModel: "Request Model",
|
proxyRequestModel: "Request Model",
|
||||||
officialWiki: "Official Wiki",
|
officialWiki: "Official Wiki",
|
||||||
officialWikiDesc: "Official Wiki for RisuAI. feel free to see.",
|
officialWikiDesc: "Official Wiki for RisuAI. feel free to see.",
|
||||||
@@ -807,8 +807,8 @@ export const languageEnglish = {
|
|||||||
predictedOutput: "Predicted Output",
|
predictedOutput: "Predicted Output",
|
||||||
systemContentReplacement: "System Content Replacement",
|
systemContentReplacement: "System Content Replacement",
|
||||||
systemRoleReplacement: "System Role Replacement",
|
systemRoleReplacement: "System Role Replacement",
|
||||||
seperateParameters: "Seperate Parameters",
|
seperateParameters: "Separate Parameters",
|
||||||
seperateParametersEnabled: "Enable Seperate Parameters",
|
seperateParametersEnabled: "Enable Separate Parameters",
|
||||||
summarizationPrompt: "Summarization Prompt",
|
summarizationPrompt: "Summarization Prompt",
|
||||||
translatorPrompt: "Translation Prompt",
|
translatorPrompt: "Translation Prompt",
|
||||||
translateBeforeHTMLFormatting: "Translate Before HTML Formatting",
|
translateBeforeHTMLFormatting: "Translate Before HTML Formatting",
|
||||||
@@ -822,4 +822,5 @@ export const languageEnglish = {
|
|||||||
googleCloudTokenization: "Google Cloud Tokenization",
|
googleCloudTokenization: "Google Cloud Tokenization",
|
||||||
presetChain: "Preset Chain",
|
presetChain: "Preset Chain",
|
||||||
legacyMediaFindings: "Legacy Media Findings",
|
legacyMediaFindings: "Legacy Media Findings",
|
||||||
|
staticsDisclaimer: "The statistics are based on the data from after July 2024. the data may not be accurate.",
|
||||||
}
|
}
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import Suggestion from './Suggestion.svelte';
|
import Suggestion from './Suggestion.svelte';
|
||||||
import AdvancedChatEditor from './AdvancedChatEditor.svelte';
|
import AdvancedChatEditor from './AdvancedChatEditor.svelte';
|
||||||
import { CameraIcon, DatabaseIcon, DicesIcon, GlobeIcon, ImagePlusIcon, LanguagesIcon, Laugh, MenuIcon, MicOffIcon, PackageIcon, Plus, RefreshCcwIcon, ReplyIcon, Send, StepForwardIcon } from "lucide-svelte";
|
import { CameraIcon, DatabaseIcon, DicesIcon, GlobeIcon, ImagePlusIcon, LanguagesIcon, Laugh, MenuIcon, MicOffIcon, PackageIcon, Plus, RefreshCcwIcon, ReplyIcon, Send, StepForwardIcon, XIcon } from "lucide-svelte";
|
||||||
import { selectedCharID, PlaygroundStore, createSimpleCharacter } from "../../ts/stores.svelte";
|
import { selectedCharID, PlaygroundStore, createSimpleCharacter } from "../../ts/stores.svelte";
|
||||||
import Chat from "./Chat.svelte";
|
import Chat from "./Chat.svelte";
|
||||||
import { type Message, type character, type groupChat } from "../../ts/storage/database.svelte";
|
import { type Message, type character, type groupChat } from "../../ts/storage/database.svelte";
|
||||||
@@ -25,7 +25,7 @@
|
|||||||
import { PreUnreroll, Prereroll } from 'src/ts/process/prereroll';
|
import { PreUnreroll, Prereroll } from 'src/ts/process/prereroll';
|
||||||
import { processMultiCommand } from 'src/ts/process/command';
|
import { processMultiCommand } from 'src/ts/process/command';
|
||||||
import { postChatFile } from 'src/ts/process/files/multisend';
|
import { postChatFile } from 'src/ts/process/files/multisend';
|
||||||
import { getInlayImage } from 'src/ts/process/files/image';
|
import { getInlayAsset } from 'src/ts/process/files/inlays';
|
||||||
import PlaygroundMenu from '../Playground/PlaygroundMenu.svelte';
|
import PlaygroundMenu from '../Playground/PlaygroundMenu.svelte';
|
||||||
import { ConnectionOpenStore } from 'src/ts/sync/multiuser';
|
import { ConnectionOpenStore } from 'src/ts/sync/multiuser';
|
||||||
|
|
||||||
@@ -71,7 +71,7 @@
|
|||||||
|
|
||||||
if(fileInput.length > 0){
|
if(fileInput.length > 0){
|
||||||
for(const file of fileInput){
|
for(const file of fileInput){
|
||||||
messageInput += `{{inlay::${file}}}`
|
messageInput += `{{inlayed::${file}}}`
|
||||||
}
|
}
|
||||||
fileInput = []
|
fileInput = []
|
||||||
}
|
}
|
||||||
@@ -546,8 +546,31 @@
|
|||||||
{#if fileInput.length > 0}
|
{#if fileInput.length > 0}
|
||||||
<div class="flex items-center ml-4 flex-wrap p-2 m-2 border-darkborderc border rounded-md">
|
<div class="flex items-center ml-4 flex-wrap p-2 m-2 border-darkborderc border rounded-md">
|
||||||
{#each fileInput as file, i}
|
{#each fileInput as file, i}
|
||||||
{#await getInlayImage(file) then inlayImage}
|
{#await getInlayAsset(file) then inlayAsset}
|
||||||
<img src={inlayImage.data} alt="Inlay" class="max-w-24 max-h-24">
|
<div class="relative">
|
||||||
|
{#if inlayAsset.type === 'image'}
|
||||||
|
<img src={inlayAsset.data} alt="Inlay" class="max-w-48 max-h-48 border border-darkborderc">
|
||||||
|
{:else if inlayAsset.type === 'video'}
|
||||||
|
<video controls class="max-w-48 max-h-48 border border-darkborderc">
|
||||||
|
<source src={inlayAsset.data} type="video/mp4" />
|
||||||
|
<track kind="captions" />
|
||||||
|
Your browser does not support the video tag.
|
||||||
|
</video>
|
||||||
|
{:else if inlayAsset.type === 'audio'}
|
||||||
|
<audio controls class="max-w-48 max-h-24 border border-darkborderc">
|
||||||
|
<source src={inlayAsset.data} type="audio/mpeg" />
|
||||||
|
Your browser does not support the audio tag.
|
||||||
|
</audio>
|
||||||
|
{:else}
|
||||||
|
<div class="max-w-24 max-h-24">{file}</div>
|
||||||
|
{/if}
|
||||||
|
<button class="absolute -right-1 -top-1 p-1 bg-darkbg text-textcolor rounded-md transition-colors hover:text-draculared focus:text-draculared" onclick={() => {
|
||||||
|
fileInput.splice(i, 1)
|
||||||
|
updateInputSizeAll()
|
||||||
|
}}>
|
||||||
|
<XIcon size={18} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
{/await}
|
{/await}
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
@@ -741,7 +764,7 @@
|
|||||||
|
|
||||||
<div class="flex items-center cursor-pointer hover:text-green-500 transition-colors" onclick={async () => {
|
<div class="flex items-center cursor-pointer hover:text-green-500 transition-colors" onclick={async () => {
|
||||||
const res = await postChatFile(messageInput)
|
const res = await postChatFile(messageInput)
|
||||||
if(res?.type === 'image'){
|
if(res?.type === 'asset'){
|
||||||
fileInput.push(res.data)
|
fileInput.push(res.data)
|
||||||
updateInputSizeAll()
|
updateInputSizeAll()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import Check from "src/lib/UI/GUI/CheckInput.svelte";
|
import Check from "src/lib/UI/GUI/CheckInput.svelte";
|
||||||
import { language } from "src/lang";
|
import { language } from "src/lang";
|
||||||
|
import Button from "src/lib/UI/GUI/Button.svelte";
|
||||||
import { DBState } from 'src/ts/stores.svelte';
|
import { DBState } from 'src/ts/stores.svelte';
|
||||||
import { alertMd } from "src/ts/alert";
|
import { alertMd } from "src/ts/alert";
|
||||||
import { getRequestLog, isTauri } from "src/ts/globalApi.svelte";
|
import { getRequestLog, isTauri } from "src/ts/globalApi.svelte";
|
||||||
@@ -165,21 +165,23 @@
|
|||||||
<Check bind:check={DBState.db.usePlainFetch} name={language.forcePlainFetch}> <Help key="forcePlainFetch" unrecommended/></Check>
|
<Check bind:check={DBState.db.usePlainFetch} name={language.forcePlainFetch}> <Help key="forcePlainFetch" unrecommended/></Check>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
<button
|
<Button
|
||||||
|
className="mt-4"
|
||||||
onclick={async () => {
|
onclick={async () => {
|
||||||
alertMd(getRequestLog())
|
alertMd(getRequestLog())
|
||||||
}}
|
}}
|
||||||
class="drop-shadow-lg p-3 border-darkborderc border-solid mt-6 flex justify-center items-center ml-2 mr-2 border-1 hover:bg-selected text-sm">
|
>
|
||||||
{language.ShowLog}
|
{language.ShowLog}
|
||||||
</button>
|
</Button>
|
||||||
{#if Capacitor.isNativePlatform()}
|
{#if Capacitor.isNativePlatform()}
|
||||||
<button
|
<Button
|
||||||
|
className="mt-4"
|
||||||
onclick={async () => {
|
onclick={async () => {
|
||||||
estaStorage = await capStorageInvestigation()
|
estaStorage = await capStorageInvestigation()
|
||||||
}}
|
}}
|
||||||
class="drop-shadow-lg p-3 border-darkborderc border-solid mt-6 flex justify-center items-center ml-2 mr-2 border-1 hover:bg-selected text-sm">
|
>
|
||||||
Investigate Storage
|
Investigate Storage
|
||||||
</button>
|
</Button>
|
||||||
|
|
||||||
{#if estaStorage.length > 0}
|
{#if estaStorage.length > 0}
|
||||||
<div class="mt-4 flex flex-col w-full p-2">
|
<div class="mt-4 flex flex-col w-full p-2">
|
||||||
@@ -192,12 +194,17 @@
|
|||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
{#if DBState.db.tpo}
|
<Button
|
||||||
<button
|
className="mt-4"
|
||||||
onclick={async () => {
|
onclick={async () => {
|
||||||
installPython()
|
let mdTable = "| Type | Value |\n| --- | --- |\n"
|
||||||
}}
|
const s = DBState.db.statics
|
||||||
class="drop-shadow-lg p-3 border-darkbutton border-solid mt-6 flex justify-center items-center ml-2 mr-2 border-1 hover:bg-selected text-sm">
|
for (const key in s) {
|
||||||
Test Python
|
mdTable += `| ${key} | ${s[key]} |\n`
|
||||||
</button>
|
}
|
||||||
{/if}
|
mdTable += `\n\n<small>${language.staticsDisclaimer}</small>`
|
||||||
|
alertMd(mdTable)
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Show Statistics
|
||||||
|
</Button>
|
||||||
@@ -31,7 +31,7 @@
|
|||||||
search: '',
|
search: '',
|
||||||
page: 0,
|
page: 0,
|
||||||
nsfw: false,
|
nsfw: false,
|
||||||
sort: ''
|
sort: 'recommended'
|
||||||
}) then charas}
|
}) then charas}
|
||||||
{#if charas.length > 0}
|
{#if charas.length > 0}
|
||||||
{@html hubAdditionalHTML}
|
{@html hubAdditionalHTML}
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
let charas:hubType[] = $state([])
|
let charas:hubType[] = $state([])
|
||||||
|
|
||||||
let page = $state(0)
|
let page = $state(0)
|
||||||
let sort = $state('')
|
let sort = $state('recommended')
|
||||||
|
|
||||||
let search = $state('')
|
let search = $state('')
|
||||||
let menuOpen = $state(false)
|
let menuOpen = $state(false)
|
||||||
@@ -27,6 +27,16 @@
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function changeSort(type:string) {
|
||||||
|
if(sort === type){
|
||||||
|
sort = 'recommended'
|
||||||
|
}else{
|
||||||
|
sort = type
|
||||||
|
}
|
||||||
|
page = 0
|
||||||
|
return getHub()
|
||||||
|
}
|
||||||
|
|
||||||
getHub()
|
getHub()
|
||||||
|
|
||||||
|
|
||||||
@@ -36,6 +46,9 @@
|
|||||||
<input bind:value={search} class="peer focus:border-textcolor transition-colors outline-none text-textcolor p-2 min-w-0 border border-r-0 bg-transparent rounded-md rounded-r-none input-text text-xl flex-grow ml-4 border-darkborderc resize-none overflow-y-hidden overflow-x-hidden max-w-full">
|
<input bind:value={search} class="peer focus:border-textcolor transition-colors outline-none text-textcolor p-2 min-w-0 border border-r-0 bg-transparent rounded-md rounded-r-none input-text text-xl flex-grow ml-4 border-darkborderc resize-none overflow-y-hidden overflow-x-hidden max-w-full">
|
||||||
<button
|
<button
|
||||||
onclick={() => {
|
onclick={() => {
|
||||||
|
if(sort === 'random' || sort === 'recommended'){
|
||||||
|
sort = ''
|
||||||
|
}
|
||||||
page = 0
|
page = 0
|
||||||
getHub()
|
getHub()
|
||||||
}}
|
}}
|
||||||
@@ -81,6 +94,7 @@
|
|||||||
getHub()
|
getHub()
|
||||||
}}>
|
}}>
|
||||||
{
|
{
|
||||||
|
sort === 'recommended' ? language.recommended :
|
||||||
sort === '' ? language.recent :
|
sort === '' ? language.recent :
|
||||||
sort === 'trending' ? language.trending :
|
sort === 'trending' ? language.trending :
|
||||||
sort === 'downloads' ? language.downloads :
|
sort === 'downloads' ? language.downloads :
|
||||||
@@ -99,26 +113,22 @@
|
|||||||
</button>
|
</button>
|
||||||
<div class="ml-2 mr-2 h-full border-r border-r-selected"></div>
|
<div class="ml-2 mr-2 h-full border-r border-r-selected"></div>
|
||||||
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow" class:ring={sort === ''} onclick={() => {
|
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow" class:ring={sort === ''} onclick={() => {
|
||||||
sort = ''
|
changeSort('')
|
||||||
getHub()
|
|
||||||
}}>
|
}}>
|
||||||
{language.recent}
|
{language.recent}
|
||||||
</button>
|
</button>
|
||||||
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow" class:ring={sort === 'trending'} onclick={() => {
|
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow" class:ring={sort === 'trending'} onclick={() => {
|
||||||
sort = 'trending'
|
changeSort('trending')
|
||||||
getHub()
|
|
||||||
}}>
|
}}>
|
||||||
{language.trending}
|
{language.trending}
|
||||||
</button>
|
</button>
|
||||||
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow" class:ring={sort === 'downloads'} onclick={() => {
|
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow" class:ring={sort === 'downloads'} onclick={() => {
|
||||||
sort = 'downloads'
|
changeSort('downloads')
|
||||||
getHub()
|
|
||||||
}}>
|
}}>
|
||||||
{language.downloads}
|
{language.downloads}
|
||||||
</button>
|
</button>
|
||||||
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow min-w-0 max-w-full" class:ring={sort === 'random'} onclick={() => {
|
<button class="bg-darkbg p-2 rounded-lg ml-2 flex justify-center items-center hover:bg-selected transition-shadow min-w-0 max-w-full" class:ring={sort === 'random'} onclick={() => {
|
||||||
sort = 'random'
|
changeSort('random')
|
||||||
getHub()
|
|
||||||
}}>
|
}}>
|
||||||
{language.random}
|
{language.random}
|
||||||
</button>
|
</button>
|
||||||
@@ -132,7 +142,7 @@
|
|||||||
{/each}
|
{/each}
|
||||||
{/key}
|
{/key}
|
||||||
</div>
|
</div>
|
||||||
{#if sort !== 'random'}
|
{#if sort !== 'random' && sort !== 'recommended'}
|
||||||
<div class="w-full flex justify-center">
|
<div class="w-full flex justify-center">
|
||||||
<div class="flex">
|
<div class="flex">
|
||||||
<button class="bg-darkbg h-14 w-14 min-w-14 rounded-lg flex justify-center items-center hover:ring transition-shadow" onclick={() => {
|
<button class="bg-darkbg h-14 w-14 min-w-14 rounded-lg flex justify-center items-center hover:ring transition-shadow" onclick={() => {
|
||||||
|
|||||||
@@ -248,7 +248,15 @@ html, body{
|
|||||||
@apply w-full flex justify-center
|
@apply w-full flex justify-center
|
||||||
}
|
}
|
||||||
|
|
||||||
.x-risu-risu-inlay-image > img{
|
.x-risu-risu-inlay-image img{
|
||||||
|
@apply rounded-lg focus:outline-none max-w-80 w-full
|
||||||
|
}
|
||||||
|
|
||||||
|
.x-risu-risu-inlay-image video{
|
||||||
|
@apply rounded-lg focus:outline-none max-w-80 w-full
|
||||||
|
}
|
||||||
|
|
||||||
|
.x-risu-risu-inlay-image audio{
|
||||||
@apply rounded-lg focus:outline-none max-w-80 w-full
|
@apply rounded-lg focus:outline-none max-w-80 w-full
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import { AppendableBuffer, BlankWriter, checkCharOrder, downloadFile, isNodeServ
|
|||||||
import { SettingsMenuIndex, ShowRealmFrameStore, selectedCharID, settingsOpen } from "./stores.svelte"
|
import { SettingsMenuIndex, ShowRealmFrameStore, selectedCharID, settingsOpen } from "./stores.svelte"
|
||||||
import { convertImage, hasher } from "./parser.svelte"
|
import { convertImage, hasher } from "./parser.svelte"
|
||||||
import { CCardLib, type CharacterCardV3, type LorebookEntry } from '@risuai/ccardlib'
|
import { CCardLib, type CharacterCardV3, type LorebookEntry } from '@risuai/ccardlib'
|
||||||
import { reencodeImage } from "./process/files/image"
|
import { reencodeImage } from "./process/files/inlays"
|
||||||
import { PngChunk } from "./pngChunk"
|
import { PngChunk } from "./pngChunk"
|
||||||
import type { OnnxModelFiles } from "./process/transformers"
|
import type { OnnxModelFiles } from "./process/transformers"
|
||||||
import { CharXReader, CharXWriter } from "./process/processzip"
|
import { CharXReader, CharXWriter } from "./process/processzip"
|
||||||
@@ -277,7 +277,7 @@ export async function importCharacterProcess(f:{
|
|||||||
if(parsed.spec !== 'chara_card_v2' && parsed.spec !== 'chara_card_v3'){
|
if(parsed.spec !== 'chara_card_v2' && parsed.spec !== 'chara_card_v3'){
|
||||||
const charaData:OldTavernChar = JSON.parse(Buffer.from(readedChara, 'base64').toString('utf-8'))
|
const charaData:OldTavernChar = JSON.parse(Buffer.from(readedChara, 'base64').toString('utf-8'))
|
||||||
console.log(charaData)
|
console.log(charaData)
|
||||||
const imgp = await saveAsset(await reencodeImage(img))
|
const imgp = await saveAsset(img)
|
||||||
db.characters.push(convertOffSpecCards(charaData, imgp))
|
db.characters.push(convertOffSpecCards(charaData, imgp))
|
||||||
setDatabaseLite(db)
|
setDatabaseLite(db)
|
||||||
alertNormal(language.importedCharacter)
|
alertNormal(language.importedCharacter)
|
||||||
@@ -633,7 +633,7 @@ async function importCharacterCardSpec(card:CharacterCardV2Risu|CharacterCardV3,
|
|||||||
|
|
||||||
const data = card.data
|
const data = card.data
|
||||||
console.log(card)
|
console.log(card)
|
||||||
let im = img ? await saveAsset(await reencodeImage(img)) : undefined
|
let im = img ? await saveAsset(img) : undefined
|
||||||
let db = getDatabase()
|
let db = getDatabase()
|
||||||
|
|
||||||
const risuext = safeStructuredClone(data.extensions.risuai)
|
const risuext = safeStructuredClone(data.extensions.risuai)
|
||||||
@@ -1140,7 +1140,7 @@ export async function exportCharacterCard(char:character, type:'png'|'json'|'cha
|
|||||||
const spec:'v2'|'v3' = arg.spec ?? 'v2' //backward compatibility
|
const spec:'v2'|'v3' = arg.spec ?? 'v2' //backward compatibility
|
||||||
try{
|
try{
|
||||||
char.image = ''
|
char.image = ''
|
||||||
img = await reencodeImage(img)
|
img = type === 'png' ? (await reencodeImage(img)) : img
|
||||||
const localWriter = arg.writer ?? (new LocalWriter())
|
const localWriter = arg.writer ?? (new LocalWriter())
|
||||||
if(!arg.writer && type !== 'json'){
|
if(!arg.writer && type !== 'json'){
|
||||||
const nameExt = {
|
const nameExt = {
|
||||||
|
|||||||
@@ -2,14 +2,11 @@ import { get, writable } from "svelte/store";
|
|||||||
import { saveImage, setDatabase, type character, type Chat, defaultSdDataFunc, type loreBook, getDatabase, getCharacterByIndex, setCharacterByIndex } from "./storage/database.svelte";
|
import { saveImage, setDatabase, type character, type Chat, defaultSdDataFunc, type loreBook, getDatabase, getCharacterByIndex, setCharacterByIndex } from "./storage/database.svelte";
|
||||||
import { alertAddCharacter, alertConfirm, alertError, alertNormal, alertSelect, alertStore, alertWait } from "./alert";
|
import { alertAddCharacter, alertConfirm, alertError, alertNormal, alertSelect, alertStore, alertWait } from "./alert";
|
||||||
import { language } from "../lang";
|
import { language } from "../lang";
|
||||||
import { decode as decodeMsgpack } from "msgpackr";
|
|
||||||
import { checkNullish, findCharacterbyId, getUserName, selectMultipleFile, selectSingleFile, sleep } from "./util";
|
import { checkNullish, findCharacterbyId, getUserName, selectMultipleFile, selectSingleFile, sleep } from "./util";
|
||||||
import { v4 as uuidv4 } from 'uuid';
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
import { MobileGUIStack, OpenRealmStore, selectedCharID } from "./stores.svelte";
|
import { MobileGUIStack, OpenRealmStore, selectedCharID } from "./stores.svelte";
|
||||||
import { checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte";
|
import { checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte";
|
||||||
import { reencodeImage } from "./process/files/image";
|
|
||||||
import { updateInlayScreen } from "./process/inlayScreen";
|
import { updateInlayScreen } from "./process/inlayScreen";
|
||||||
import { PngChunk } from "./pngChunk";
|
|
||||||
import { parseMarkdownSafe } from "./parser.svelte";
|
import { parseMarkdownSafe } from "./parser.svelte";
|
||||||
import { translateHTML } from "./translator/translator";
|
import { translateHTML } from "./translator/translator";
|
||||||
import { doingChat } from "./process/index.svelte";
|
import { doingChat } from "./process/index.svelte";
|
||||||
@@ -84,7 +81,7 @@ export async function selectCharImg(charIndex:number) {
|
|||||||
}
|
}
|
||||||
const img = selected.data
|
const img = selected.data
|
||||||
let db = getDatabase()
|
let db = getDatabase()
|
||||||
const imgp = await saveImage(await reencodeImage(img))
|
const imgp = await saveImage(img)
|
||||||
dumpCharImage(charIndex)
|
dumpCharImage(charIndex)
|
||||||
db.characters[charIndex].image = imgp
|
db.characters[charIndex].image = imgp
|
||||||
setDatabase(db)
|
setDatabase(db)
|
||||||
@@ -668,56 +665,6 @@ function dataURLtoBuffer(string:string){
|
|||||||
return Buffer.from(data, 'base64');
|
return Buffer.from(data, 'base64');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function addDefaultCharacters() {
|
|
||||||
const imgs = [fetch('/sample/rika.png'),fetch('/sample/yuzu.png')]
|
|
||||||
|
|
||||||
alertStore.set({
|
|
||||||
type: 'wait',
|
|
||||||
msg: `Loading Sample bots...`
|
|
||||||
})
|
|
||||||
|
|
||||||
for(const img of imgs){
|
|
||||||
const imgBuffer = await (await img).arrayBuffer()
|
|
||||||
const readed = PngChunk.read(Buffer.from(imgBuffer), ["risuai"])?.risuai
|
|
||||||
await sleep(10)
|
|
||||||
const va = decodeMsgpack(Buffer.from(readed,'base64')) as any
|
|
||||||
if(va.type !== 101){
|
|
||||||
alertError(language.errors.noData)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
let char:character = va.data
|
|
||||||
let db = getDatabase()
|
|
||||||
if(char.emotionImages && char.emotionImages.length > 0){
|
|
||||||
for(let i=0;i<char.emotionImages.length;i++){
|
|
||||||
await sleep(10)
|
|
||||||
const imgp = await saveImage(char.emotionImages[i][1] as any)
|
|
||||||
char.emotionImages[i][1] = imgp
|
|
||||||
}
|
|
||||||
}
|
|
||||||
char.chats = [{
|
|
||||||
message: [],
|
|
||||||
note: '',
|
|
||||||
name: 'Chat 1',
|
|
||||||
localLore: []
|
|
||||||
}]
|
|
||||||
|
|
||||||
if(checkNullish(char.sdData)){
|
|
||||||
char.sdData = defaultSdDataFunc()
|
|
||||||
}
|
|
||||||
|
|
||||||
char.chatPage = 0
|
|
||||||
char.image = await saveImage(await reencodeImage(Buffer.from(imgBuffer)))
|
|
||||||
char.chaId = uuidv4()
|
|
||||||
db.characters.push(characterFormatUpdate(char))
|
|
||||||
setDatabase(db)
|
|
||||||
}
|
|
||||||
|
|
||||||
alertStore.set({
|
|
||||||
type: 'none',
|
|
||||||
msg: ''
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function removeChar(index:number,name:string, type:'normal'|'permanent'|'permanentForce' = 'normal'){
|
export async function removeChar(index:number,name:string, type:'normal'|'permanent'|'permanentForce' = 'normal'){
|
||||||
const db = getDatabase()
|
const db = getDatabase()
|
||||||
if(type !== 'permanentForce'){
|
if(type !== 'permanentForce'){
|
||||||
|
|||||||
@@ -13,7 +13,8 @@ export enum LLMFlags{
|
|||||||
hasStreaming,
|
hasStreaming,
|
||||||
requiresAlternateRole,
|
requiresAlternateRole,
|
||||||
mustStartWithUserInput,
|
mustStartWithUserInput,
|
||||||
poolSupported
|
poolSupported,
|
||||||
|
hasVideoInput
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum LLMProvider{
|
export enum LLMProvider{
|
||||||
@@ -708,7 +709,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
format: LLMFormat.Mistral,
|
format: LLMFormat.Mistral,
|
||||||
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
||||||
recommended: true,
|
recommended: true,
|
||||||
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
|
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.Mistral
|
tokenizer: LLMTokenizer.Mistral
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -719,7 +720,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
format: LLMFormat.Mistral,
|
format: LLMFormat.Mistral,
|
||||||
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
||||||
recommended: true,
|
recommended: true,
|
||||||
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
|
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.Mistral
|
tokenizer: LLMTokenizer.Mistral
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -729,7 +730,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
provider: LLMProvider.Mistral,
|
provider: LLMProvider.Mistral,
|
||||||
format: LLMFormat.Mistral,
|
format: LLMFormat.Mistral,
|
||||||
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
||||||
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
|
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.Mistral
|
tokenizer: LLMTokenizer.Mistral
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -739,7 +740,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
provider: LLMProvider.Mistral,
|
provider: LLMProvider.Mistral,
|
||||||
format: LLMFormat.Mistral,
|
format: LLMFormat.Mistral,
|
||||||
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
||||||
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
|
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.Mistral
|
tokenizer: LLMTokenizer.Mistral
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -749,7 +750,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
provider: LLMProvider.Mistral,
|
provider: LLMProvider.Mistral,
|
||||||
format: LLMFormat.Mistral,
|
format: LLMFormat.Mistral,
|
||||||
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
flags: [LLMFlags.hasFirstSystemPrompt, LLMFlags.mustStartWithUserInput, LLMFlags.requiresAlternateRole],
|
||||||
parameters: ['temperature', 'presence_penalty', 'frequency_penalty'],
|
parameters: ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'],
|
||||||
recommended: true,
|
recommended: true,
|
||||||
tokenizer: LLMTokenizer.Mistral
|
tokenizer: LLMTokenizer.Mistral
|
||||||
},
|
},
|
||||||
@@ -767,7 +768,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-exp-1121',
|
id: 'gemini-exp-1121',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud,
|
tokenizer: LLMTokenizer.GoogleCloud,
|
||||||
},
|
},
|
||||||
@@ -776,17 +777,26 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-exp-1206',
|
id: 'gemini-exp-1206',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming],
|
||||||
recommended: true,
|
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Gemini Flash 2.0 Exp",
|
||||||
|
id: 'gemini-2.0-flash-exp',
|
||||||
|
provider: LLMProvider.GoogleCloud,
|
||||||
|
format: LLMFormat.GoogleCloud,
|
||||||
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming],
|
||||||
|
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
|
||||||
|
tokenizer: LLMTokenizer.GoogleCloud,
|
||||||
|
recommended: true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "Gemini Pro 1.5",
|
name: "Gemini Pro 1.5",
|
||||||
id: 'gemini-1.5-pro-latest',
|
id: 'gemini-1.5-pro-latest',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming],
|
||||||
recommended: true,
|
recommended: true,
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
@@ -796,7 +806,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-1.5-flash',
|
id: 'gemini-1.5-flash',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
recommended: true,
|
recommended: true,
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
@@ -836,7 +846,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-exp-1114',
|
id: 'gemini-exp-1114',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
@@ -845,7 +855,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-1.5-pro-002',
|
id: 'gemini-1.5-pro-002',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
@@ -854,7 +864,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-1.5-flash-002',
|
id: 'gemini-1.5-flash-002',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
@@ -863,7 +873,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-pro',
|
id: 'gemini-pro',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
@@ -872,7 +882,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-pro-vision',
|
id: 'gemini-pro-vision',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
@@ -881,7 +891,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-ultra',
|
id: 'gemini-ultra',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
@@ -890,7 +900,7 @@ export const LLMModels: LLMModel[] = [
|
|||||||
id: 'gemini-ultra-vision',
|
id: 'gemini-ultra-vision',
|
||||||
provider: LLMProvider.GoogleCloud,
|
provider: LLMProvider.GoogleCloud,
|
||||||
format: LLMFormat.GoogleCloud,
|
format: LLMFormat.GoogleCloud,
|
||||||
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
|
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
|
||||||
parameters: ['temperature', 'top_k', 'top_p'],
|
parameters: ['temperature', 'top_k', 'top_p'],
|
||||||
tokenizer: LLMTokenizer.GoogleCloud
|
tokenizer: LLMTokenizer.GoogleCloud
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import css, { type CssAtRuleAST } from '@adobe/css-tools'
|
|||||||
import { SizeStore, selectedCharID } from './stores.svelte';
|
import { SizeStore, selectedCharID } from './stores.svelte';
|
||||||
import { calcString } from './process/infunctions';
|
import { calcString } from './process/infunctions';
|
||||||
import { findCharacterbyId, getPersonaPrompt, getUserIcon, getUserName, parseKeyValue, sfc32, sleep, uuidtoNumber } from './util';
|
import { findCharacterbyId, getPersonaPrompt, getUserIcon, getUserName, parseKeyValue, sfc32, sleep, uuidtoNumber } from './util';
|
||||||
import { getInlayImage } from './process/files/image';
|
import { getInlayAsset } from './process/files/inlays';
|
||||||
import { getModuleAssets, getModuleLorebooks } from './process/modules';
|
import { getModuleAssets, getModuleLorebooks } from './process/modules';
|
||||||
import type { OpenAIChat } from './process/index.svelte';
|
import type { OpenAIChat } from './process/index.svelte';
|
||||||
import hljs from 'highlight.js/lib/core'
|
import hljs from 'highlight.js/lib/core'
|
||||||
@@ -375,41 +375,51 @@ async function parseAdditionalAssets(data:string, char:simpleCharacterArgument|c
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext?:string}}){
|
function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext?:string}}){
|
||||||
|
|
||||||
if(Object.keys(assetPaths).length === 0){
|
if(Object.keys(assetPaths).length === 0){
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//Levenshtein distance, new with 1d array
|
||||||
const dest = (a:string, b:string) => {
|
const dest = (a:string, b:string) => {
|
||||||
let d:Int16Array[] = []
|
const h = a.length + 1
|
||||||
|
const w = b.length + 1
|
||||||
for(let i=0;i<a.length+1;i++){
|
let d = new Int16Array(h * w)
|
||||||
d.push(new Int16Array(b.length+1))
|
for(let i=0;i<h;i++){
|
||||||
|
d[i * w] = i
|
||||||
}
|
}
|
||||||
|
for(let i=0;i<w;i++){
|
||||||
for(let i=0;i<=a.length;i++){
|
d[i] = i
|
||||||
d[i][0] = i
|
|
||||||
}
|
}
|
||||||
|
for(let i=1; i<h; i++){
|
||||||
for(let i=0;i<=b.length;i++){
|
for(let j=1;j<w;j++){
|
||||||
d[0][i] = i
|
d[i * w + j] = Math.min(
|
||||||
}
|
d[(i-1) * w + j-1] + (a.charAt(i-1)===b.charAt(j-1) ? 0 : 1),
|
||||||
|
d[(i-1) * w + j]+1, d[i * w + j-1]+1
|
||||||
for(let i=1; i<=a.length; i++){
|
|
||||||
for(let j=1;j<=b.length;j++){
|
|
||||||
d[i][j] = Math.min(
|
|
||||||
d[i-1][j-1] + (a.charAt(i-1)===b.charAt(j-1) ? 0 : 1),
|
|
||||||
d[i-1][j]+1, d[i][j-1]+1
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return d[h * w - 1]
|
||||||
return d[a.length][b.length];
|
|
||||||
|
}
|
||||||
|
|
||||||
|
function trimmer(str:string){
|
||||||
|
const ext = ['webp', 'png', 'jpg', 'jpeg', 'gif', 'mp4', 'webm', 'avi', 'm4p', 'm4v', 'mp3', 'wav', 'ogg']
|
||||||
|
for(const e of ext){
|
||||||
|
if(str.endsWith('.' + e)){
|
||||||
|
str = str.substring(0, str.length - e.length - 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
return str.trim().replace(/[_ -.]/g, '')
|
||||||
}
|
}
|
||||||
|
|
||||||
let closest = ''
|
let closest = ''
|
||||||
let closestDist = 999999
|
let closestDist = 999999
|
||||||
|
const trimmedName = trimmer(name)
|
||||||
for(const key in assetPaths){
|
for(const key in assetPaths){
|
||||||
const dist = dest(name.trim().replace(/[_ ]/g, ''), key.trim().replace(/[_ ]/g, ''))
|
const dist = dest(trimmedName, trimmer(key))
|
||||||
if(dist < closestDist){
|
if(dist < closestDist){
|
||||||
closest = key
|
closest = key
|
||||||
closestDist = dist
|
closestDist = dist
|
||||||
@@ -418,15 +428,27 @@ function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext
|
|||||||
return assetPaths[closest]
|
return assetPaths[closest]
|
||||||
}
|
}
|
||||||
|
|
||||||
async function parseInlayImages(data:string){
|
async function parseInlayAssets(data:string){
|
||||||
const inlayMatch = data.match(/{{inlay::(.+?)}}/g)
|
const inlayMatch = data.match(/{{(inlay|inlayed)::(.+?)}}/g)
|
||||||
if(inlayMatch){
|
if(inlayMatch){
|
||||||
for(const inlay of inlayMatch){
|
for(const inlay of inlayMatch){
|
||||||
const id = inlay.substring(9, inlay.length - 2)
|
const inlayType = inlay.startsWith('{{inlayed') ? 'inlayed' : 'inlay'
|
||||||
const img = await getInlayImage(id)
|
const id = inlay.substring(inlay.indexOf('::') + 2, inlay.length - 2)
|
||||||
if(img){
|
const asset = await getInlayAsset(id)
|
||||||
data = data.replace(inlay, `<img src="${img.data}"/>`)
|
let prefix = inlayType === 'inlayed' ? `<div class="risu-inlay-image">` : ''
|
||||||
|
let postfix = inlayType === 'inlayed' ? `</div>\n\n` : ''
|
||||||
|
switch(asset?.type){
|
||||||
|
case 'image':
|
||||||
|
data = data.replace(inlay, `${prefix}<img src="${asset.data}"/>${postfix}`)
|
||||||
|
break
|
||||||
|
case 'video':
|
||||||
|
data = data.replace(inlay, `${prefix}<video controls><source src="${asset.data}" type="video/mp4"></video>${postfix}`)
|
||||||
|
break
|
||||||
|
case 'audio':
|
||||||
|
data = data.replace(inlay, `${prefix}<audio controls><source src="${asset.data}" type="audio/mpeg"></audio>${postfix}`)
|
||||||
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return data
|
return data
|
||||||
@@ -463,7 +485,7 @@ export async function ParseMarkdown(
|
|||||||
if(firstParsed !== data && char && char.type !== 'group'){
|
if(firstParsed !== data && char && char.type !== 'group'){
|
||||||
data = await parseAdditionalAssets(data, char, additionalAssetMode, 'post')
|
data = await parseAdditionalAssets(data, char, additionalAssetMode, 'post')
|
||||||
}
|
}
|
||||||
data = await parseInlayImages(data ?? '')
|
data = await parseInlayAssets(data ?? '')
|
||||||
|
|
||||||
data = encodeStyle(data)
|
data = encodeStyle(data)
|
||||||
if(mode === 'normal'){
|
if(mode === 'normal'){
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { getUserName, selectSingleFile, sleep } from "./util"
|
|||||||
import { alertError, alertNormal, alertStore } from "./alert"
|
import { alertError, alertNormal, alertStore } from "./alert"
|
||||||
import { downloadFile, readImage } from "./globalApi.svelte"
|
import { downloadFile, readImage } from "./globalApi.svelte"
|
||||||
import { language } from "src/lang"
|
import { language } from "src/lang"
|
||||||
import { reencodeImage } from "./process/files/image"
|
import { reencodeImage } from "./process/files/inlays"
|
||||||
import { PngChunk } from "./pngChunk"
|
import { PngChunk } from "./pngChunk"
|
||||||
import { v4 } from "uuid"
|
import { v4 } from "uuid"
|
||||||
|
|
||||||
|
|||||||
@@ -2,25 +2,73 @@ import localforage from "localforage";
|
|||||||
import { v4 } from "uuid";
|
import { v4 } from "uuid";
|
||||||
import { getDatabase } from "../../storage/database.svelte";
|
import { getDatabase } from "../../storage/database.svelte";
|
||||||
import { checkImageType } from "../../parser.svelte";
|
import { checkImageType } from "../../parser.svelte";
|
||||||
|
import { getModelInfo, LLMFlags } from "src/ts/model/modellist";
|
||||||
|
|
||||||
|
const inlayImageExts = [
|
||||||
|
'jpg', 'jpeg', 'png', 'gif', 'webp', 'avif'
|
||||||
|
]
|
||||||
|
|
||||||
|
const inlayAudioExts = [
|
||||||
|
'wav', 'mp3', 'ogg', 'flac'
|
||||||
|
]
|
||||||
|
|
||||||
|
const inlayVideoExts = [
|
||||||
|
'webm', 'mp4', 'mkv'
|
||||||
|
]
|
||||||
|
|
||||||
const inlayStorage = localforage.createInstance({
|
const inlayStorage = localforage.createInstance({
|
||||||
name: 'inlay',
|
name: 'inlay',
|
||||||
storeName: 'inlay'
|
storeName: 'inlay'
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function postInlayImage(img:{
|
export async function postInlayAsset(img:{
|
||||||
name:string,
|
name:string,
|
||||||
data:Uint8Array
|
data:Uint8Array
|
||||||
}){
|
}){
|
||||||
|
|
||||||
const extention = img.name.split('.').at(-1)
|
const extention = img.name.split('.').at(-1)
|
||||||
const imgObj = new Image()
|
const imgObj = new Image()
|
||||||
imgObj.src = URL.createObjectURL(new Blob([img.data], {type: `image/${extention}`}))
|
|
||||||
|
|
||||||
return await writeInlayImage(imgObj, {
|
if(inlayImageExts.includes(extention)){
|
||||||
name: img.name,
|
imgObj.src = URL.createObjectURL(new Blob([img.data], {type: `image/${extention}`}))
|
||||||
ext: extention
|
|
||||||
})
|
return await writeInlayImage(imgObj, {
|
||||||
|
name: img.name,
|
||||||
|
ext: extention
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if(inlayAudioExts.includes(extention)){
|
||||||
|
const b64 = Buffer.from(img.data).toString('base64')
|
||||||
|
const dataURI = `data:audio/${extention};base64,${b64}`
|
||||||
|
const imgid = v4()
|
||||||
|
|
||||||
|
await inlayStorage.setItem(imgid, {
|
||||||
|
name: img.name,
|
||||||
|
data: dataURI,
|
||||||
|
ext: extention,
|
||||||
|
type: 'audio'
|
||||||
|
})
|
||||||
|
|
||||||
|
return `${imgid}`
|
||||||
|
}
|
||||||
|
|
||||||
|
if(inlayVideoExts.includes(extention)){
|
||||||
|
const b64 = Buffer.from(img.data).toString('base64')
|
||||||
|
const dataURI = `data:video/${extention};base64,${b64}`
|
||||||
|
const imgid = v4()
|
||||||
|
|
||||||
|
await inlayStorage.setItem(imgid, {
|
||||||
|
name: img.name,
|
||||||
|
data: dataURI,
|
||||||
|
ext: extention,
|
||||||
|
type: 'video'
|
||||||
|
})
|
||||||
|
|
||||||
|
return `${imgid}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function writeInlayImage(imgObj:HTMLImageElement, arg:{name?:string, ext?:string} = {}) {
|
export async function writeInlayImage(imgObj:HTMLImageElement, arg:{name?:string, ext?:string} = {}) {
|
||||||
@@ -60,21 +108,23 @@ export async function writeInlayImage(imgObj:HTMLImageElement, arg:{name?:string
|
|||||||
await inlayStorage.setItem(imgid, {
|
await inlayStorage.setItem(imgid, {
|
||||||
name: arg.name ?? imgid,
|
name: arg.name ?? imgid,
|
||||||
data: dataURI,
|
data: dataURI,
|
||||||
ext: arg.ext ?? 'png',
|
ext: 'png',
|
||||||
height: drawHeight,
|
height: drawHeight,
|
||||||
width: drawWidth
|
width: drawWidth,
|
||||||
|
type: 'image'
|
||||||
})
|
})
|
||||||
|
|
||||||
return `${imgid}`
|
return `${imgid}`
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getInlayImage(id: string){
|
export async function getInlayAsset(id: string){
|
||||||
const img:{
|
const img:{
|
||||||
name: string,
|
name: string,
|
||||||
data: string
|
data: string
|
||||||
ext: string
|
ext: string
|
||||||
height: number
|
height: number
|
||||||
width: number
|
width: number
|
||||||
|
type: 'image'|'video'|'audio'
|
||||||
} = await inlayStorage.getItem(id)
|
} = await inlayStorage.getItem(id)
|
||||||
if(img === null){
|
if(img === null){
|
||||||
return null
|
return null
|
||||||
@@ -84,19 +134,7 @@ export async function getInlayImage(id: string){
|
|||||||
|
|
||||||
export function supportsInlayImage(){
|
export function supportsInlayImage(){
|
||||||
const db = getDatabase()
|
const db = getDatabase()
|
||||||
return db.aiModel.startsWith('gptv') || db.aiModel === 'gemini-pro-vision' || db.aiModel.startsWith('gemini-exp') || db.aiModel.startsWith('claude-3') || db.aiModel.startsWith('gpt4_turbo') || db.aiModel.startsWith('gpt5') || db.aiModel.startsWith('gpt4o') ||
|
return getModelInfo(db.aiModel).flags.includes(LLMFlags.hasImageInput)
|
||||||
(db.aiModel === 'reverse_proxy' && (
|
|
||||||
db.proxyRequestModel?.startsWith('gptv') || db.proxyRequestModel === 'gemini-pro-vision' || db.proxyRequestModel?.startsWith('claude-3') || db.proxyRequestModel.startsWith('gpt4_turbo') ||
|
|
||||||
db.proxyRequestModel?.startsWith('gpt5') || db.proxyRequestModel?.startsWith('gpt4o') ||
|
|
||||||
db.proxyRequestModel === 'custom' && (
|
|
||||||
db.customProxyRequestModel?.startsWith('gptv') ||
|
|
||||||
db.customProxyRequestModel === 'gemini-pro-vision' ||
|
|
||||||
db.customProxyRequestModel?.startsWith('claude-3') ||
|
|
||||||
db.customProxyRequestModel.startsWith('gpt-4-turbo') ||
|
|
||||||
db.customProxyRequestModel?.startsWith('gpt5') ||
|
|
||||||
db.customProxyRequestModel?.startsWith('gpt4o')
|
|
||||||
)
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function reencodeImage(img:Uint8Array){
|
export async function reencodeImage(img:Uint8Array){
|
||||||
@@ -5,7 +5,7 @@ import { doingChat, sendChat } from '../index.svelte';
|
|||||||
import { downloadFile, isTauri } from 'src/ts/globalApi.svelte';
|
import { downloadFile, isTauri } from 'src/ts/globalApi.svelte';
|
||||||
import { HypaProcesser } from '../memory/hypamemory';
|
import { HypaProcesser } from '../memory/hypamemory';
|
||||||
import { BufferToText as BufferToText, selectSingleFile, sleep } from 'src/ts/util';
|
import { BufferToText as BufferToText, selectSingleFile, sleep } from 'src/ts/util';
|
||||||
import { postInlayImage } from './image';
|
import { postInlayAsset } from './inlays';
|
||||||
|
|
||||||
type sendFileArg = {
|
type sendFileArg = {
|
||||||
file:string
|
file:string
|
||||||
@@ -178,11 +178,11 @@ async function sendXMLFile(arg:sendFileArg) {
|
|||||||
return Buffer.from(`<File>\n${message}\n</File>\n`).toString('base64')
|
return Buffer.from(`<File>\n${message}\n</File>\n`).toString('base64')
|
||||||
}
|
}
|
||||||
|
|
||||||
type postFileResult = postFileResultImage | postFileResultVoid | postFileResultText
|
type postFileResult = postFileResultAsset | postFileResultVoid | postFileResultText
|
||||||
|
|
||||||
type postFileResultImage = {
|
type postFileResultAsset = {
|
||||||
data: string,
|
data: string,
|
||||||
type: 'image',
|
type: 'asset',
|
||||||
}
|
}
|
||||||
|
|
||||||
type postFileResultVoid = {
|
type postFileResultVoid = {
|
||||||
@@ -201,6 +201,22 @@ export async function postChatFile(query:string):Promise<postFileResult>{
|
|||||||
'jpeg',
|
'jpeg',
|
||||||
'png',
|
'png',
|
||||||
'webp',
|
'webp',
|
||||||
|
'gif',
|
||||||
|
'avif',
|
||||||
|
|
||||||
|
//audio format
|
||||||
|
'wav',
|
||||||
|
'mp3',
|
||||||
|
'ogg',
|
||||||
|
'flac',
|
||||||
|
|
||||||
|
//video format
|
||||||
|
'mp4',
|
||||||
|
'webm',
|
||||||
|
'mpeg',
|
||||||
|
'avi',
|
||||||
|
|
||||||
|
//other format
|
||||||
'po',
|
'po',
|
||||||
// 'pdf',
|
// 'pdf',
|
||||||
'txt'
|
'txt'
|
||||||
@@ -243,14 +259,33 @@ export async function postChatFile(query:string):Promise<postFileResult>{
|
|||||||
name: file.name
|
name: file.name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//image format
|
||||||
case 'jpg':
|
case 'jpg':
|
||||||
case 'jpeg':
|
case 'jpeg':
|
||||||
case 'png':
|
case 'png':
|
||||||
case 'webp':{
|
case 'webp':
|
||||||
const postData = await postInlayImage(file)
|
case 'gif':
|
||||||
|
case 'avif':
|
||||||
|
|
||||||
|
//audio format
|
||||||
|
case 'wav':
|
||||||
|
case 'mp3':
|
||||||
|
case 'ogg':
|
||||||
|
case 'flac':
|
||||||
|
|
||||||
|
//video format
|
||||||
|
case 'mp4':
|
||||||
|
case 'webm':
|
||||||
|
case 'mpeg':
|
||||||
|
case 'avi':{
|
||||||
|
const postData = await postInlayAsset(file)
|
||||||
|
if(!postData){
|
||||||
|
return null
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
data: postData,
|
data: postData,
|
||||||
type: 'image'
|
type: 'asset'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'txt':{
|
case 'txt':{
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import { groupOrder } from "./group";
|
|||||||
import { runTrigger } from "./triggers";
|
import { runTrigger } from "./triggers";
|
||||||
import { HypaProcesser } from "./memory/hypamemory";
|
import { HypaProcesser } from "./memory/hypamemory";
|
||||||
import { additionalInformations } from "./embedding/addinfo";
|
import { additionalInformations } from "./embedding/addinfo";
|
||||||
import { getInlayImage, supportsInlayImage } from "./files/image";
|
import { getInlayAsset, supportsInlayImage } from "./files/inlays";
|
||||||
import { getGenerationModelString } from "./models/modelString";
|
import { getGenerationModelString } from "./models/modelString";
|
||||||
import { connectionOpen, peerRevertChat, peerSafeCheck, peerSync } from "../sync/multiuser";
|
import { connectionOpen, peerRevertChat, peerSafeCheck, peerSync } from "../sync/multiuser";
|
||||||
import { runInlayScreen } from "./inlayScreen";
|
import { runInlayScreen } from "./inlayScreen";
|
||||||
@@ -29,6 +29,7 @@ import { hanuraiMemory } from "./memory/hanuraiMemory";
|
|||||||
import { hypaMemoryV2 } from "./memory/hypav2";
|
import { hypaMemoryV2 } from "./memory/hypav2";
|
||||||
import { runLuaEditTrigger } from "./lua";
|
import { runLuaEditTrigger } from "./lua";
|
||||||
import { parseChatML } from "../parser.svelte";
|
import { parseChatML } from "../parser.svelte";
|
||||||
|
import { getModelInfo, LLMFlags } from "../model/modellist";
|
||||||
|
|
||||||
export interface OpenAIChat{
|
export interface OpenAIChat{
|
||||||
role: 'system'|'user'|'assistant'|'function'
|
role: 'system'|'user'|'assistant'|'function'
|
||||||
@@ -41,7 +42,7 @@ export interface OpenAIChat{
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface MultiModal{
|
export interface MultiModal{
|
||||||
type:'image'|'video'
|
type:'image'|'video'|'audio'
|
||||||
base64:string,
|
base64:string,
|
||||||
height?:number,
|
height?:number,
|
||||||
width?:number
|
width?:number
|
||||||
@@ -687,10 +688,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
}
|
}
|
||||||
let inlays:string[] = []
|
let inlays:string[] = []
|
||||||
if(msg.role === 'char'){
|
if(msg.role === 'char'){
|
||||||
formatedChat = formatedChat.replace(/{{inlay::(.+?)}}/g, '')
|
formatedChat = formatedChat.replace(/{{(inlay|inlayed)::(.+?)}}/g, '')
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
const inlayMatch = formatedChat.match(/{{inlay::(.+?)}}/g)
|
const inlayMatch = formatedChat.match(/{{(inlay|inlayed)::(.+?)}}/g)
|
||||||
if(inlayMatch){
|
if(inlayMatch){
|
||||||
for(const inlay of inlayMatch){
|
for(const inlay of inlayMatch){
|
||||||
inlays.push(inlay)
|
inlays.push(inlay)
|
||||||
@@ -699,12 +700,13 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
}
|
}
|
||||||
|
|
||||||
let multimodal:MultiModal[] = []
|
let multimodal:MultiModal[] = []
|
||||||
|
const modelinfo = getModelInfo(DBState.db.aiModel)
|
||||||
if(inlays.length > 0){
|
if(inlays.length > 0){
|
||||||
for(const inlay of inlays){
|
for(const inlay of inlays){
|
||||||
const inlayName = inlay.replace('{{inlay::', '').replace('}}', '')
|
const inlayName = inlay.replace('{{inlayed::', '').replace('{{inlay::', '').replace('}}', '')
|
||||||
const inlayData = await getInlayImage(inlayName)
|
const inlayData = await getInlayAsset(inlayName)
|
||||||
if(inlayData){
|
if(inlayData?.type === 'image'){
|
||||||
if(supportsInlayImage()){
|
if(modelinfo.flags.includes(LLMFlags.hasImageInput)){
|
||||||
multimodal.push({
|
multimodal.push({
|
||||||
type: 'image',
|
type: 'image',
|
||||||
base64: inlayData.data,
|
base64: inlayData.data,
|
||||||
@@ -717,6 +719,14 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
formatedChat += `[${captionResult[0].generated_text}]`
|
formatedChat += `[${captionResult[0].generated_text}]`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if(inlayData?.type === 'video' || inlayData?.type === 'audio'){
|
||||||
|
if(multimodal.length === 0){
|
||||||
|
multimodal.push({
|
||||||
|
type: inlayData.type,
|
||||||
|
base64: inlayData.data
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
formatedChat = formatedChat.replace(inlay, '')
|
formatedChat = formatedChat.replace(inlay, '')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1133,7 +1143,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
pointer++
|
pointer++
|
||||||
}
|
}
|
||||||
formated = formated.filter((v) => {
|
formated = formated.filter((v) => {
|
||||||
return v.content !== ''
|
return v.content !== '' || (v.multimodals && v.multimodals.length > 0)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { writeInlayImage } from "./files/image";
|
import { writeInlayImage } from "./files/inlays";
|
||||||
import type { character } from "../storage/database.svelte";
|
import type { character } from "../storage/database.svelte";
|
||||||
import { generateAIImage } from "./stableDiff";
|
import { generateAIImage } from "./stableDiff";
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { ReloadGUIPointer, selectedCharID } from "../stores.svelte";
|
|||||||
import { alertError, alertInput, alertNormal } from "../alert";
|
import { alertError, alertInput, alertNormal } from "../alert";
|
||||||
import { HypaProcesser } from "./memory/hypamemory";
|
import { HypaProcesser } from "./memory/hypamemory";
|
||||||
import { generateAIImage } from "./stableDiff";
|
import { generateAIImage } from "./stableDiff";
|
||||||
import { writeInlayImage } from "./files/image";
|
import { writeInlayImage } from "./files/inlays";
|
||||||
import type { OpenAIChat } from "./index.svelte";
|
import type { OpenAIChat } from "./index.svelte";
|
||||||
import { requestChatData } from "./request";
|
import { requestChatData } from "./request";
|
||||||
import { v4 } from "uuid";
|
import { v4 } from "uuid";
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { risuChatParser } from "../parser.svelte";
|
|||||||
import { SignatureV4 } from "@smithy/signature-v4";
|
import { SignatureV4 } from "@smithy/signature-v4";
|
||||||
import { HttpRequest } from "@smithy/protocol-http";
|
import { HttpRequest } from "@smithy/protocol-http";
|
||||||
import { Sha256 } from "@aws-crypto/sha256-js";
|
import { Sha256 } from "@aws-crypto/sha256-js";
|
||||||
import { supportsInlayImage } from "./files/image";
|
import { supportsInlayImage } from "./files/inlays";
|
||||||
import { Capacitor } from "@capacitor/core";
|
import { Capacitor } from "@capacitor/core";
|
||||||
import { getFreeOpenRouterModel } from "../model/openrouter";
|
import { getFreeOpenRouterModel } from "../model/openrouter";
|
||||||
import { runTransformers } from "./transformers";
|
import { runTransformers } from "./transformers";
|
||||||
@@ -95,7 +95,9 @@ type ParameterMap = {
|
|||||||
[key in Parameter]?: string;
|
[key in Parameter]?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
function applyParameters(data: { [key: string]: any }, parameters: Parameter[], rename: ParameterMap, ModelMode:ModelModeExtended): { [key: string]: any } {
|
function applyParameters(data: { [key: string]: any }, parameters: Parameter[], rename: ParameterMap, ModelMode:ModelModeExtended, arg:{
|
||||||
|
ignoreTopKIfZero?:boolean
|
||||||
|
} = {}): { [key: string]: any } {
|
||||||
const db = getDatabase()
|
const db = getDatabase()
|
||||||
if(db.seperateParametersEnabled && ModelMode !== 'model'){
|
if(db.seperateParametersEnabled && ModelMode !== 'model'){
|
||||||
if(ModelMode === 'submodel'){
|
if(ModelMode === 'submodel'){
|
||||||
@@ -103,6 +105,10 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
|
|||||||
}
|
}
|
||||||
|
|
||||||
for(const parameter of parameters){
|
for(const parameter of parameters){
|
||||||
|
if(parameter === 'top_k' && arg.ignoreTopKIfZero && db.seperateParameters[ModelMode][parameter] === 0){
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
let value = db.seperateParameters[ModelMode][parameter]
|
let value = db.seperateParameters[ModelMode][parameter]
|
||||||
|
|
||||||
if(value === -1000 || value === undefined){
|
if(value === -1000 || value === undefined){
|
||||||
@@ -117,6 +123,9 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
|
|||||||
|
|
||||||
for(const parameter of parameters){
|
for(const parameter of parameters){
|
||||||
let value = 0
|
let value = 0
|
||||||
|
if(parameter === 'top_k' && arg.ignoreTopKIfZero && db.top_k === 0){
|
||||||
|
continue
|
||||||
|
}
|
||||||
switch(parameter){
|
switch(parameter){
|
||||||
case 'temperature':{
|
case 'temperature':{
|
||||||
value = db.temperature === -1000 ? -1000 : (db.temperature / 100)
|
value = db.temperature === -1000 ? -1000 : (db.temperature / 100)
|
||||||
@@ -209,8 +218,13 @@ function reformater(formated:OpenAIChat[],modelInfo:LLMModel){
|
|||||||
|
|
||||||
if(!modelInfo.flags.includes(LLMFlags.hasFullSystemPrompt)){
|
if(!modelInfo.flags.includes(LLMFlags.hasFullSystemPrompt)){
|
||||||
if(modelInfo.flags.includes(LLMFlags.hasFirstSystemPrompt)){
|
if(modelInfo.flags.includes(LLMFlags.hasFirstSystemPrompt)){
|
||||||
if(formated[0].role === 'system'){
|
while(formated[0].role === 'system'){
|
||||||
systemPrompt = formated[0]
|
if(systemPrompt){
|
||||||
|
systemPrompt.content += '\n\n' + formated[0].content
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
systemPrompt = formated[0]
|
||||||
|
}
|
||||||
formated = formated.slice(1)
|
formated = formated.slice(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -391,7 +405,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
|
|
||||||
if(db.newOAIHandle){
|
if(db.newOAIHandle){
|
||||||
formatedChat = formatedChat.filter(m => {
|
formatedChat = formatedChat.filter(m => {
|
||||||
return m.content !== ''
|
return m.content !== '' || (m.multimodals && m.multimodals.length > 0)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -520,10 +534,9 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
|
|||||||
body: applyParameters({
|
body: applyParameters({
|
||||||
model: requestModel,
|
model: requestModel,
|
||||||
messages: reformatedChat,
|
messages: reformatedChat,
|
||||||
top_p: db.top_p,
|
|
||||||
safe_prompt: false,
|
safe_prompt: false,
|
||||||
max_tokens: arg.maxTokens,
|
max_tokens: arg.maxTokens,
|
||||||
}, ['temperature', 'presence_penalty', 'frequency_penalty'], {}, arg.mode ),
|
}, ['temperature', 'presence_penalty', 'frequency_penalty', 'top_p'], {}, arg.mode ),
|
||||||
headers: {
|
headers: {
|
||||||
"Authorization": "Bearer " + db.mistralKey,
|
"Authorization": "Bearer " + db.mistralKey,
|
||||||
},
|
},
|
||||||
@@ -1407,7 +1420,11 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
|
|||||||
});
|
});
|
||||||
|
|
||||||
for (const modal of chat.multimodals) {
|
for (const modal of chat.multimodals) {
|
||||||
if (modal.type === "image") {
|
if (
|
||||||
|
(modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) ||
|
||||||
|
(modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) ||
|
||||||
|
(modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput))
|
||||||
|
) {
|
||||||
const dataurl = modal.base64;
|
const dataurl = modal.base64;
|
||||||
const base64 = dataurl.split(",")[1];
|
const base64 = dataurl.split(",")[1];
|
||||||
const mediaType = dataurl.split(";")[0].split(":")[1];
|
const mediaType = dataurl.split(";")[0].split(":")[1];
|
||||||
@@ -1482,14 +1499,24 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
let para:Parameter[] = ['temperature', 'top_p', 'top_k', 'presence_penalty', 'frequency_penalty']
|
||||||
|
|
||||||
|
para = para.filter((v) => {
|
||||||
|
return arg.modelInfo.parameters.includes(v)
|
||||||
|
})
|
||||||
|
|
||||||
const body = {
|
const body = {
|
||||||
contents: reformatedChat,
|
contents: reformatedChat,
|
||||||
generation_config: applyParameters({
|
generation_config: applyParameters({
|
||||||
"maxOutputTokens": maxTokens,
|
"maxOutputTokens": maxTokens,
|
||||||
}, ['temperature', 'top_p'], {
|
}, para, {
|
||||||
'top_p': "topP"
|
'top_p': "topP",
|
||||||
}, arg.mode),
|
'top_k': "topK",
|
||||||
|
'presence_penalty': "presencePenalty",
|
||||||
|
'frequency_penalty': "frequencyPenalty"
|
||||||
|
}, arg.mode, {
|
||||||
|
ignoreTopKIfZero: true
|
||||||
|
}),
|
||||||
safetySettings: uncensoredCatagory,
|
safetySettings: uncensoredCatagory,
|
||||||
systemInstruction: {
|
systemInstruction: {
|
||||||
parts: [
|
parts: [
|
||||||
@@ -1582,9 +1609,65 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
|
|||||||
else if(arg.modelInfo.format === LLMFormat.VertexAIGemini){
|
else if(arg.modelInfo.format === LLMFormat.VertexAIGemini){
|
||||||
url =`https://${REGION}-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/us-central1/publishers/google/models/${arg.modelInfo.internalID}:streamGenerateContent`
|
url =`https://${REGION}-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/us-central1/publishers/google/models/${arg.modelInfo.internalID}:streamGenerateContent`
|
||||||
}
|
}
|
||||||
|
else if(arg.modelInfo.format === LLMFormat.GoogleCloud && arg.useStreaming){
|
||||||
|
url = `https://generativelanguage.googleapis.com/v1beta/models/${arg.modelInfo.internalID}:streamGenerateContent?key=${db.google.accessToken}`
|
||||||
|
}
|
||||||
else{
|
else{
|
||||||
url = `https://generativelanguage.googleapis.com/v1beta/models/${arg.modelInfo.internalID}:generateContent?key=${db.google.accessToken}`
|
url = `https://generativelanguage.googleapis.com/v1beta/models/${arg.modelInfo.internalID}:generateContent?key=${db.google.accessToken}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if(arg.modelInfo.format === LLMFormat.GoogleCloud && arg.useStreaming){
|
||||||
|
headers['Content-Type'] = 'application/json'
|
||||||
|
const f = await fetchNative(url, {
|
||||||
|
headers: headers,
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
method: 'POST',
|
||||||
|
chatId: arg.chatId,
|
||||||
|
})
|
||||||
|
|
||||||
|
if(f.status !== 200){
|
||||||
|
return {
|
||||||
|
type: 'fail',
|
||||||
|
result: await textifyReadableStream(f.body)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let fullResult:string = ''
|
||||||
|
|
||||||
|
const stream = new TransformStream<Uint8Array, StreamResponseChunk>( {
|
||||||
|
async transform(chunk, control) {
|
||||||
|
fullResult += new TextDecoder().decode(chunk)
|
||||||
|
try {
|
||||||
|
let reformatted = fullResult
|
||||||
|
if(reformatted.endsWith(',')){
|
||||||
|
reformatted = fullResult.slice(0, -1) + ']'
|
||||||
|
}
|
||||||
|
if(!reformatted.endsWith(']')){
|
||||||
|
reformatted = fullResult + ']'
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = JSON.parse(reformatted)
|
||||||
|
|
||||||
|
let r = ''
|
||||||
|
for(const d of data){
|
||||||
|
r += d.candidates[0].content.parts[0].text
|
||||||
|
}
|
||||||
|
control.enqueue({
|
||||||
|
'0': r
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},)
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: 'streaming',
|
||||||
|
result: f.body.pipeThrough(stream)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const res = await globalFetch(url, {
|
const res = await globalFetch(url, {
|
||||||
headers: headers,
|
headers: headers,
|
||||||
body: body,
|
body: body,
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import type { OpenAIChat } from "./index.svelte";
|
|||||||
import { HypaProcesser } from "./memory/hypamemory";
|
import { HypaProcesser } from "./memory/hypamemory";
|
||||||
import { requestChatData } from "./request";
|
import { requestChatData } from "./request";
|
||||||
import { generateAIImage } from "./stableDiff";
|
import { generateAIImage } from "./stableDiff";
|
||||||
import { writeInlayImage } from "./files/image";
|
import { writeInlayImage } from "./files/inlays";
|
||||||
import { runLua } from "./lua";
|
import { runLua } from "./lua";
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme';
|
|||||||
import type { PromptItem, PromptSettings } from '../process/prompt';
|
import type { PromptItem, PromptSettings } from '../process/prompt';
|
||||||
import type { OobaChatCompletionRequestParams } from '../model/ooba';
|
import type { OobaChatCompletionRequestParams } from '../model/ooba';
|
||||||
|
|
||||||
export let appVer = "143.0.1"
|
export let appVer = "143.5.0"
|
||||||
export let webAppSubVer = ''
|
export let webAppSubVer = ''
|
||||||
|
|
||||||
|
|
||||||
@@ -456,7 +456,7 @@ export function setDatabase(data:Database){
|
|||||||
data.vertexClientEmail ??= ''
|
data.vertexClientEmail ??= ''
|
||||||
data.vertexPrivateKey ??= ''
|
data.vertexPrivateKey ??= ''
|
||||||
data.seperateParametersEnabled ??= false
|
data.seperateParametersEnabled ??= false
|
||||||
data.seperateParameters = {
|
data.seperateParameters ??= {
|
||||||
memory: {},
|
memory: {},
|
||||||
emotion: {},
|
emotion: {},
|
||||||
translate: {},
|
translate: {},
|
||||||
@@ -860,6 +860,7 @@ export interface Database{
|
|||||||
googleClaudeTokenizing: boolean
|
googleClaudeTokenizing: boolean
|
||||||
presetChain: string
|
presetChain: string
|
||||||
legacyMediaFindings?:boolean
|
legacyMediaFindings?:boolean
|
||||||
|
geminiStream?:boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SeparateParameters{
|
interface SeparateParameters{
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import type { Tiktoken } from "@dqbd/tiktoken";
|
|||||||
import type { Tokenizer } from "@mlc-ai/web-tokenizers";
|
import type { Tokenizer } from "@mlc-ai/web-tokenizers";
|
||||||
import { type groupChat, type character, type Chat, getCurrentCharacter, getDatabase } from "./storage/database.svelte";
|
import { type groupChat, type character, type Chat, getCurrentCharacter, getDatabase } from "./storage/database.svelte";
|
||||||
import type { MultiModal, OpenAIChat } from "./process/index.svelte";
|
import type { MultiModal, OpenAIChat } from "./process/index.svelte";
|
||||||
import { supportsInlayImage } from "./process/files/image";
|
import { supportsInlayImage } from "./process/files/inlays";
|
||||||
import { risuChatParser } from "./parser.svelte";
|
import { risuChatParser } from "./parser.svelte";
|
||||||
import { tokenizeGGUFModel } from "./process/models/local";
|
import { tokenizeGGUFModel } from "./process/models/local";
|
||||||
import { globalFetch } from "./globalApi.svelte";
|
import { globalFetch } from "./globalApi.svelte";
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":"143.0.1"}
|
{"version":"143.5.0"}
|
||||||
Reference in New Issue
Block a user