Merge branch 'kwaroran:main' into main

This commit is contained in:
HyperBlaze
2024-12-20 23:36:41 -08:00
committed by GitHub
39 changed files with 1655 additions and 315 deletions

View File

@@ -5,12 +5,13 @@ import { language } from "../lang";
import { checkNullish, findCharacterbyId, getUserName, selectMultipleFile, selectSingleFile, sleep } from "./util";
import { v4 as uuidv4 } from 'uuid';
import { MobileGUIStack, OpenRealmStore, selectedCharID } from "./stores.svelte";
import { checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte";
import { AppendableBuffer, checkCharOrder, downloadFile, getFileSrc } from "./globalApi.svelte";
import { updateInlayScreen } from "./process/inlayScreen";
import { parseMarkdownSafe } from "./parser.svelte";
import { checkImageType, parseMarkdownSafe } from "./parser.svelte";
import { translateHTML } from "./translator/translator";
import { doingChat } from "./process/index.svelte";
import { importCharacter } from "./characterCards";
import { PngChunk } from "./pngChunk";
export function createNewCharacter() {
let db = getDatabase()
@@ -81,6 +82,41 @@ export async function selectCharImg(charIndex:number) {
}
const img = selected.data
let db = getDatabase()
const type = checkImageType(img)
console.log(type)
try {
if(type === 'PNG' && db.characters[charIndex].type === 'character'){
const gen = PngChunk.readGenerator(img)
const allowedChunk = [
'parameters', 'Comment', 'Title', 'Description', 'Author', 'Software', 'Source', 'Disclaimer', 'Warning', 'Copyright',
]
for await (const chunk of gen){
if(chunk instanceof AppendableBuffer){
continue
}
if(!chunk){
continue
}
if(chunk.value.length > 20_000){
continue
}
if(allowedChunk.includes(chunk.key)){
console.log(chunk.key, chunk.value)
db.characters[charIndex].extentions ??= {}
db.characters[charIndex].extentions.pngExif ??= {}
db.characters[charIndex].extentions.pngExif[chunk.key] = chunk.value
}
}
console.log(db.characters[charIndex].extentions)
}
} catch (error) {
console.error(error)
}
const imgp = await saveImage(img)
dumpCharImage(charIndex)
db.characters[charIndex].image = imgp

View File

@@ -1809,24 +1809,86 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream<Uint
* @throws {Error} - Throws an error if the request is aborted or if there is an error in the response.
*/
export async function fetchNative(url:string, arg:{
body:string,
body:string|Uint8Array|ArrayBuffer,
headers?:{[key:string]:string},
method?:"POST",
method?:"POST"|"GET"|"PUT"|"DELETE",
signal?:AbortSignal,
useRisuTk?:boolean,
chatId?:string
}):Promise<{ body: ReadableStream<Uint8Array>; headers: Headers; status: number }> {
}):Promise<{
body: ReadableStream<Uint8Array>;
headers: Headers;
status: number;
json: () => Promise<any>;
text: () => Promise<string>;
arrayBuffer: () => Promise<ArrayBuffer>;
}> {
const jsonizer = (body:ReadableStream<Uint8Array>) => {
return async () => {
const text = await textifyReadableStream(body)
return JSON.parse(text)
}
}
const textizer = (body:ReadableStream<Uint8Array>) => {
return async () => {
const text = await textifyReadableStream(body)
return text
}
}
const arrayBufferizer = (body:ReadableStream<Uint8Array>) => {
return async () => {
const chunks:Uint8Array[] = []
const reader = body.getReader()
while(true){
const {done, value} = await reader.read()
if(done){
break
}
chunks.push(value)
}
const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0)
const arrayBuffer = new Uint8Array(totalLength)
let offset = 0
for(const chunk of chunks){
arrayBuffer.set(chunk, offset)
offset += chunk.length
}
return arrayBuffer.buffer
}
}
arg.method = arg.method ?? 'POST'
let headers = arg.headers ?? {}
let realBody:Uint8Array
if(arg.method === 'GET' || arg.method === 'DELETE'){
realBody = new Uint8Array(0)
}
if(typeof arg.body === 'string'){
realBody = new TextEncoder().encode(arg.body)
}
else if(arg.body instanceof Uint8Array){
realBody = arg.body
}
else if(arg.body instanceof ArrayBuffer){
realBody = new Uint8Array(arg.body)
}
else{
throw new Error('Invalid body type')
}
const db = getDatabase()
let throughProxy = (!isTauri) && (!isNodeServer) && (!db.usePlainFetch)
let fetchLogIndex = addFetchLog({
body: arg.body,
body: new TextDecoder().decode(realBody),
headers: arg.headers,
response: 'Streamed Fetch',
success: true,
url: url,
resType: 'stream',
chatId: arg.chatId
chatId: arg.chatId,
})
if(isTauri){
fetchIndex++
@@ -1849,7 +1911,8 @@ export async function fetchNative(url:string, arg:{
id: fetchId,
url: url,
headers: JSON.stringify(headers),
body: arg.body,
body: Buffer.from(realBody).toString('base64'),
method: arg.method
}).then((res) => {
try {
const parsedRes = JSON.parse(res as string)
@@ -1868,7 +1931,7 @@ export async function fetchNative(url:string, arg:{
id: fetchId,
url: url,
headers: headers,
body: Buffer.from(arg.body).toString('base64'),
body: Buffer.from(realBody).toString('base64'),
}).then((res) => {
if(!res.success){
error = res.error
@@ -1918,14 +1981,17 @@ export async function fetchNative(url:string, arg:{
return {
body: readableStream,
headers: new Headers(resHeaders),
status: status
status: status,
json: jsonizer(readableStream),
text: textizer(readableStream),
arrayBuffer: arrayBufferizer(readableStream)
}
}
else if(throughProxy){
const r = await fetch(hubURL + `/proxy2`, {
body: arg.body,
body: realBody,
headers: arg.useRisuTk ? {
"risu-header": encodeURIComponent(JSON.stringify(headers)),
"risu-url": encodeURIComponent(url),
@@ -1936,22 +2002,25 @@ export async function fetchNative(url:string, arg:{
"risu-url": encodeURIComponent(url),
"Content-Type": "application/json"
},
method: "POST",
method: arg.method,
signal: arg.signal
})
return {
body: pipeFetchLog(fetchLogIndex, r.body),
headers: r.headers,
status: r.status
status: r.status,
json: jsonizer(r.body),
text: textizer(r.body),
arrayBuffer: arrayBufferizer(r.body)
}
}
else{
return await fetch(url, {
body: arg.body,
body: realBody,
headers: headers,
method: arg.method,
signal: arg.signal
signal: arg.signal,
})
}
}

View File

@@ -14,7 +14,10 @@ export enum LLMFlags{
requiresAlternateRole,
mustStartWithUserInput,
poolSupported,
hasVideoInput
hasVideoInput,
OAICompletionTokens,
DeveloperRole,
geminiThinking
}
export enum LLMProvider{
@@ -409,7 +412,7 @@ export const LLMModels: LLMModel[] = [
flags: [
LLMFlags.hasImageInput,
LLMFlags.hasFullSystemPrompt,
LLMFlags.hasStreaming
LLMFlags.hasStreaming,
],
parameters: OpenAIParameters,
tokenizer: LLMTokenizer.tiktokenO200Base
@@ -421,8 +424,8 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.OpenAI,
format: LLMFormat.OpenAICompatible,
flags: [
LLMFlags.hasFullSystemPrompt,
LLMFlags.hasStreaming
LLMFlags.hasStreaming,
LLMFlags.OAICompletionTokens
],
parameters: OpenAIParameters,
tokenizer: LLMTokenizer.tiktokenO200Base
@@ -434,8 +437,24 @@ export const LLMModels: LLMModel[] = [
provider: LLMProvider.OpenAI,
format: LLMFormat.OpenAICompatible,
flags: [
LLMFlags.hasStreaming,
LLMFlags.OAICompletionTokens
],
parameters: OpenAIParameters,
tokenizer: LLMTokenizer.tiktokenO200Base
},
{
id: 'o1',
internalID: 'o1',
name: 'o1',
provider: LLMProvider.OpenAI,
format: LLMFormat.OpenAICompatible,
flags: [
LLMFlags.hasStreaming,
LLMFlags.OAICompletionTokens,
LLMFlags.hasFullSystemPrompt,
LLMFlags.hasStreaming
LLMFlags.hasImageInput,
LLMFlags.DeveloperRole
],
parameters: OpenAIParameters,
tokenizer: LLMTokenizer.tiktokenO200Base
@@ -759,7 +778,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-1.5-pro-exp-0827',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -768,7 +787,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-exp-1121',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud,
},
@@ -777,7 +796,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-exp-1206',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -786,7 +805,17 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-2.0-flash-exp',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
tokenizer: LLMTokenizer.GoogleCloud,
recommended: true
},
{
name: "Gemini Flash 2.0 Thinking 1219",
id: 'gemini-2.0-flash-thinking-exp-1219',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming, LLMFlags.geminiThinking, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'],
tokenizer: LLMTokenizer.GoogleCloud,
recommended: true
@@ -796,7 +825,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-1.5-pro-latest',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
recommended: true,
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
@@ -806,7 +835,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-1.5-flash',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
recommended: true,
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
@@ -817,7 +846,7 @@ export const LLMModels: LLMModel[] = [
internalID: 'gemini-exp-1121',
provider: LLMProvider.VertexAI,
format: LLMFormat.VertexAIGemini,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.Gemma
},
@@ -827,7 +856,7 @@ export const LLMModels: LLMModel[] = [
internalID: 'gemini-1.5-pro-latest',
provider: LLMProvider.VertexAI,
format: LLMFormat.VertexAIGemini,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.Gemma
},
@@ -837,7 +866,7 @@ export const LLMModels: LLMModel[] = [
internalID: 'gemini-1.5-flash',
provider: LLMProvider.VertexAI,
format: LLMFormat.VertexAIGemini,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.Gemma
},
@@ -846,7 +875,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-exp-1114',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -855,7 +884,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-1.5-pro-002',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -864,7 +893,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-1.5-flash-002',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -873,7 +902,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-pro',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -882,7 +911,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-pro-vision',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -891,7 +920,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-ultra',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},
@@ -900,7 +929,7 @@ export const LLMModels: LLMModel[] = [
id: 'gemini-ultra-vision',
provider: LLMProvider.GoogleCloud,
format: LLMFormat.GoogleCloud,
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming],
flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.hasStreaming, LLMFlags.requiresAlternateRole],
parameters: ['temperature', 'top_k', 'top_p'],
tokenizer: LLMTokenizer.GoogleCloud
},

View File

@@ -8,6 +8,7 @@ function nodeObserve(node:HTMLElement){
const triggerName = node.getAttribute('risu-trigger');
const btnEvent = node.getAttribute('risu-btn');
const observerAdded = node.getAttribute('risu-observer');
const hlLang = node.getAttribute('x-hl-lang');
if(observerAdded){
return
@@ -45,13 +46,65 @@ function nodeObserve(node:HTMLElement){
node.setAttribute('risu-observer', 'true');
return
}
if(hlLang){
node.addEventListener('contextmenu', (e)=>{
e.preventDefault();
const menu = document.createElement('div');
menu.setAttribute('class', 'fixed z-50 min-w-[160px] py-2 bg-gray-800 rounded-lg border border-gray-700')
const copyOption = document.createElement('div');
copyOption.textContent = 'Copy';
copyOption.setAttribute('class', 'px-4 py-2 text-sm text-gray-300 hover:bg-gray-700 cursor-pointer')
copyOption.addEventListener('click', ()=>{
navigator.clipboard.writeText(node.getAttribute('x-hl-text'));
menu.remove();
})
const downloadOption = document.createElement('div');
downloadOption.textContent = 'Download';
downloadOption.setAttribute('class', 'px-4 py-2 text-sm text-gray-300 hover:bg-gray-700 cursor-pointer')
downloadOption.addEventListener('click', ()=>{
const a = document.createElement('a');
a.href = URL.createObjectURL(new Blob([node.getAttribute('x-hl-text')], {type: 'text/plain'}));
a.download = 'code.' + hlLang;
a.click();
menu.remove();
})
menu.appendChild(copyOption);
menu.appendChild(downloadOption);
menu.style.left = e.clientX + 'px';
menu.style.top = e.clientY + 'px';
document.body.appendChild(menu);
document.addEventListener('click', ()=>{
menu.remove();
}, {once: true})
})
}
}
export async function startObserveDom(){
//For codeblock we are using MutationObserver since it doesn't appear well
const observer = new MutationObserver((mutations) => {
mutations.forEach((mutation) => {
mutation.addedNodes.forEach((node) => {
if(node instanceof HTMLElement){
nodeObserve(node);
}
})
})
})
//We are using a while loop intead of MutationObserver because MutationObserver is expensive for just a few elements
while(true){
document.querySelectorAll('[risu-trigger]').forEach(nodeObserve);
document.querySelectorAll('[risu-btn]').forEach(nodeObserve);
document.querySelectorAll('[x-hl-lang]').forEach(nodeObserve);
await sleep(100);
}
}

View File

@@ -14,6 +14,7 @@ import { getModuleAssets, getModuleLorebooks } from './process/modules';
import type { OpenAIChat } from './process/index.svelte';
import hljs from 'highlight.js/lib/core'
import 'highlight.js/styles/atom-one-dark.min.css'
import { language } from 'src/lang';
const markdownItOptions = {
html: true,
@@ -117,18 +118,30 @@ async function renderHighlightableMarkdown(data:string) {
//import language if not already loaded
//we do not refactor this to a function because we want to keep vite to only import the languages that are needed
let languageModule:any = null
let shotLang = ''
switch(lang){
case 'js':
case 'javascript':{
lang = 'javascript'
shotLang = 'js'
if(!hljs.getLanguage('javascript')){
languageModule = await import('highlight.js/lib/languages/javascript')
}
break
}
case 'txt':
case 'vtt':{
shotLang = lang
lang = 'plaintext'
if(!hljs.getLanguage('plaintext')){
languageModule = await import('highlight.js/lib/languages/plaintext')
}
break
}
case 'py':
case 'python':{
lang = 'python'
shotLang = 'py'
if(!hljs.getLanguage('python')){
languageModule = await import('highlight.js/lib/languages/python')
}
@@ -136,6 +149,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'css':{
lang = 'css'
shotLang = 'css'
if(!hljs.getLanguage('css')){
languageModule = await import('highlight.js/lib/languages/css')
}
@@ -144,6 +158,7 @@ async function renderHighlightableMarkdown(data:string) {
case 'xml':
case 'html':{
lang = 'xml'
shotLang = 'xml'
if(!hljs.getLanguage('xml')){
languageModule = await import('highlight.js/lib/languages/xml')
}
@@ -151,6 +166,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'lua':{
lang = 'lua'
shotLang = 'lua'
if(!hljs.getLanguage('lua')){
languageModule = await import('highlight.js/lib/languages/lua')
}
@@ -158,6 +174,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'dart':{
lang = 'dart'
shotLang = 'dart'
if(!hljs.getLanguage('dart')){
languageModule = await import('highlight.js/lib/languages/dart')
}
@@ -165,6 +182,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'java':{
lang = 'java'
shotLang = 'java'
if(!hljs.getLanguage('java')){
languageModule = await import('highlight.js/lib/languages/java')
}
@@ -172,6 +190,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'rust':{
lang = 'rust'
shotLang = 'rs'
if(!hljs.getLanguage('rust')){
languageModule = await import('highlight.js/lib/languages/rust')
}
@@ -180,6 +199,7 @@ async function renderHighlightableMarkdown(data:string) {
case 'c':
case 'cpp':{
lang = 'cpp'
shotLang = 'cpp'
if(!hljs.getLanguage('cpp')){
languageModule = await import('highlight.js/lib/languages/cpp')
}
@@ -188,6 +208,7 @@ async function renderHighlightableMarkdown(data:string) {
case 'csharp':
case 'cs':{
lang = 'csharp'
shotLang = 'cs'
if(!hljs.getLanguage('csharp')){
languageModule = await import('highlight.js/lib/languages/csharp')
}
@@ -196,6 +217,7 @@ async function renderHighlightableMarkdown(data:string) {
case 'ts':
case 'typescript':{
lang = 'typescript'
shotLang = 'ts'
if(!hljs.getLanguage('typescript')){
languageModule = await import('highlight.js/lib/languages/typescript')
}
@@ -203,6 +225,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'json':{
lang = 'json'
shotLang = 'json'
if(!hljs.getLanguage('json')){
languageModule = await import('highlight.js/lib/languages/json')
}
@@ -210,6 +233,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'yaml':{
lang = 'yaml'
shotLang = 'yml'
if(!hljs.getLanguage('yaml')){
languageModule = await import('highlight.js/lib/languages/yaml')
}
@@ -217,6 +241,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'shell':{
lang = 'shell'
shotLang = 'sh'
if(!hljs.getLanguage('shell')){
languageModule = await import('highlight.js/lib/languages/shell')
}
@@ -224,6 +249,7 @@ async function renderHighlightableMarkdown(data:string) {
}
case 'bash':{
lang = 'bash'
shotLang = 'sh'
if(!hljs.getLanguage('bash')){
languageModule = await import('highlight.js/lib/languages/bash')
}
@@ -231,6 +257,7 @@ async function renderHighlightableMarkdown(data:string) {
}
default:{
lang = 'none'
shotLang = 'none'
}
}
if(languageModule){
@@ -244,7 +271,9 @@ async function renderHighlightableMarkdown(data:string) {
language: lang,
ignoreIllegals: true
}).value
rendered = rendered.replace(placeholder, `<pre class="hljs"><code>${highlighted}</code></pre>`)
rendered = rendered.replace(placeholder, `<pre class="hljs" x-hl-lang="${shotLang}" x-hl-text="${
Buffer.from(code).toString('hex')
}"><code>${highlighted}</code></pre>`)
}
} catch (error) {
@@ -425,6 +454,9 @@ function getClosestMatch(name:string, assetPaths:{[key:string]:{path:string, ext
closestDist = dist
}
}
if(closestDist > DBState.db.assetMaxDifference){
return null
}
return assetPaths[closest]
}
@@ -464,6 +496,11 @@ export interface simpleCharacterArgument{
triggerscript?: triggerscript[]
}
function parseThoughts(data:string){
return data.replace(/<Thoughts>(.+)<\/Thoughts>/gms, (full, txt) => {
return `<details><summary>${language.cot}</summary>${txt}</details>`
})
}
export async function ParseMarkdown(
data:string,
@@ -475,25 +512,31 @@ export async function ParseMarkdown(
let firstParsed = ''
const additionalAssetMode = (mode === 'back') ? 'back' : 'normal'
let char = (typeof(charArg) === 'string') ? (findCharacterbyId(charArg)) : (charArg)
if(char && char.type !== 'group'){
data = await parseAdditionalAssets(data, char, additionalAssetMode, 'pre')
firstParsed = data
}
if(char){
data = (await processScriptFull(char, data, 'editdisplay', chatID, cbsConditions)).data
}
if(firstParsed !== data && char && char.type !== 'group'){
data = await parseAdditionalAssets(data, char, additionalAssetMode, 'post')
}
data = await parseInlayAssets(data ?? '')
data = parseThoughts(data)
data = encodeStyle(data)
if(mode === 'normal'){
data = await renderHighlightableMarkdown(data)
}
return decodeStyle(DOMPurify.sanitize(data, {
ADD_TAGS: ["iframe", "style", "risu-style", "x-em"],
ADD_ATTR: ["allow", "allowfullscreen", "frameborder", "scrolling", "risu-btn", 'risu-trigger', 'risu-mark'],
ADD_TAGS: ["iframe", "style", "risu-style", "x-em",],
ADD_ATTR: ["allow", "allowfullscreen", "frameborder", "scrolling", "risu-btn", 'risu-trigger', 'risu-mark', 'x-hl-lang', 'x-hl-text'],
}))
}
@@ -1941,6 +1984,7 @@ export function risuChatParser(da:string, arg:{
callStack: arg.callStack,
}
da = da.replace(/\<(user|char|bot)\>/gi, '{{$1}}')
const isPureMode = () => {
return pureModeNest.size > 0
@@ -1963,15 +2007,6 @@ export function risuChatParser(da:string, arg:{
stackType[nested.length] = 1
break
}
case '<':{
if(stackType[nested.length] === 1){
nested[0] += da[pointer]
break
}
nested.unshift('')
stackType[nested.length] = 2
break
}
case '#':{
//legacy if statement, deprecated
if(da[pointer + 1] !== '}' || nested.length === 1 || stackType[nested.length] !== 1){
@@ -2101,79 +2136,6 @@ export function risuChatParser(da:string, arg:{
}
break
}
case '>':{
if(stackType[nested.length] === 1){
nested[0] += da[pointer]
break
}
if(nested.length === 1 || stackType[nested.length] !== 2){
break
}
const dat = nested.shift()
if(isPureMode() && pureModeType() !== 'pureSyntax' && pureModeType() !== ''){
nested[0] += `<${dat}>`
break
}
switch(dat){
case 'Comment':{
if(arg.runVar){
break
}
if(!commentMode){
thinkingMode = false
commentMode = true
commentLatest = nested.map((f) => f)
if(commentLatest[0].endsWith('\n')){
commentLatest[0] = commentLatest[0].substring(0, commentLatest[0].length - 1)
}
commentV = new Uint8Array(stackType)
}
break
}
case '/Comment':{
if(commentMode){
nested = commentLatest
stackType = commentV
commentMode = false
}
break
}
case 'Thoughts':{
if(!visualize){
nested[0] += `<${dat}>`
break
}
if(!commentMode){
thinkingMode = true
commentMode = true
commentLatest = nested.map((f) => f)
if(commentLatest[0].endsWith('\n')){
commentLatest[0] = commentLatest[0].substring(0, commentLatest[0].length - 1)
}
commentV = new Uint8Array(stackType)
}
break
}
case '/Thoughts':{
if(!visualize){
nested[0] += `<${dat}>`
break
}
if(commentMode){
nested = commentLatest
stackType = commentV
commentMode = false
}
break
}
default:{
const mc = isPureMode() ? null : smMatcher(dat, matcherObj)
nested[0] += mc ?? `<${dat}>`
break
}
}
break
}
default:{
nested[0] += da[pointer]
break

View File

@@ -1,21 +1,16 @@
import { get, writable } from "svelte/store";
import { language } from "../../lang";
import { alertError } from "../alert";
import { getDatabase, setDatabaseLite } from "../storage/database.svelte";
import { getCurrentCharacter, getDatabase, setDatabaseLite } from "../storage/database.svelte";
import { checkNullish, selectSingleFile, sleep } from "../util";
import type { OpenAIChat } from "../process/index.svelte";
import { globalFetch } from "../globalApi.svelte";
import { fetchNative, globalFetch } from "../globalApi.svelte";
import { selectedCharID } from "../stores.svelte";
import { addAdditionalCharaJS } from "./embedscript";
import type { ScriptMode } from "../process/scripts";
export const customProviderStore = writable([] as string[])
interface PluginRequest{
url: string
header?:{[key:string]:string}
body: any,
res: string
}
interface ProviderPlugin{
name:string
@@ -23,6 +18,7 @@ interface ProviderPlugin{
script:string
arguments:{[key:string]:'int'|'string'|string[]}
realArg:{[key:string]:number|string}
version?:1|2
}
export type RisuPlugin = ProviderPlugin
@@ -37,6 +33,7 @@ export async function importPlugin(){
const jsFile = Buffer.from(f.data).toString('utf-8').replace(/^\uFEFF/gm, "");
const splitedJs = jsFile.split('\n')
let name = ''
let version:1|2 = 1
let displayName:string = undefined
let arg:{[key:string]:'int'|'string'|string[]} = {}
let realArg:{[key:string]:number|string} = {}
@@ -49,15 +46,32 @@ export async function importPlugin(){
}
name = provied.trim()
}
if(line.startsWith('//@name')){
const provied = line.slice(7)
if(provied === ''){
alertError('plugin name must be longer than "", did you put it correctly?')
return
}
version = 2
name = provied.trim()
}
if(line.startsWith('//@risu-display-name')){
const provied = line.slice('//@risu-display-name'.length + 1)
if(provied === ''){
alertError('plugin display name must be longer than "", did you put it correctly?')
return
}
name = provied.trim()
displayName = provied.trim()
}
if(line.startsWith('//@risu-arg')){
if(line.startsWith('//@display-name')){
const provied = line.slice('//@display-name'.length + 1)
if(provied === ''){
alertError('plugin display name must be longer than "", did you put it correctly?')
return
}
displayName = provied.trim()
}
if(line.startsWith('//@risu-arg') || line.startsWith('//@arg')){
const provied = line.trim().split(' ')
if(provied.length < 3){
alertError('plugin argument is incorrect, did you put space in argument name?')
@@ -90,7 +104,8 @@ export async function importPlugin(){
script: jsFile,
realArg: realArg,
arguments: arg,
displayName: displayName
displayName: displayName,
version: version
}
db.plugins ??= []
@@ -124,11 +139,18 @@ let pluginTranslator = false
export async function loadPlugins() {
let db = getDatabase()
if(pluginWorker){
pluginWorker.terminate()
pluginWorker = null
}
if(db.plugins.length > 0){
const plugins = safeStructuredClone(db.plugins).filter((a:RisuPlugin) => a.version === 1)
const pluginV2 = safeStructuredClone(db.plugins).filter((a:RisuPlugin) => a.version === 2)
await loadV2Plugin(pluginV2)
if(plugins.length > 0){
const da = await fetch("/pluginApi.js")
const pluginApiString = await da.text()
@@ -267,6 +289,140 @@ export async function loadPlugins() {
}
}
type PluginV2ProviderArgument = {
prompt_chat: OpenAIChat[],
frequency_penalty: number
min_p: number
presence_penalty: number
repetition_penalty: number
top_k: number
top_p: number
temperature: number
mode: string
}
type EditFunction = (content:string) => string|null|undefined|Promise<string|null|undefined>
type ReplacerFunction = (content:OpenAIChat[], type:string) => OpenAIChat[]|Promise<OpenAIChat[]>
export const pluginV2 = {
providers: new Map<string, (arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}> >(),
editdisplay: new Set<EditFunction>(),
editoutput: new Set<EditFunction>(),
editprocess: new Set<EditFunction>(),
editinput: new Set<EditFunction>(),
replacerbeforeRequest: new Set<ReplacerFunction>(),
replacerafterRequest: new Set<(content:string, type:string) => string|Promise<string>>(),
unload: new Set<() => void|Promise<void>>(),
loaded: false
}
export async function loadV2Plugin(plugins:RisuPlugin[]){
if(pluginV2.loaded){
for(const unload of pluginV2.unload){
await unload()
}
pluginV2.providers.clear()
pluginV2.editdisplay.clear()
pluginV2.editoutput.clear()
pluginV2.editprocess.clear()
pluginV2.editinput.clear()
}
pluginV2.loaded = true
globalThis.__pluginApis__ = {
risuFetch: globalFetch,
nativeFetch: fetchNative,
getArg: (arg:string) => {
const [name, realArg] = arg.split('::')
for(const plug of plugins){
if(plug.name === name){
return plug.realArg[realArg]
}
}
},
getChar: () => {
return getCurrentCharacter()
},
setChar: (char:any) => {
const db = getDatabase()
const charid = get(selectedCharID)
db.characters[charid] = char
setDatabaseLite(db)
},
addProvider: (name:string, func:(arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>) => {
let provs = get(customProviderStore)
provs.push(name)
pluginV2.providers.set(name, func)
customProviderStore.set(provs)
},
addRisuScriptHandler: (name:ScriptMode, func:EditFunction) => {
if(pluginV2['edit' + name]){
pluginV2['edit' + name].add(func)
}
else{
throw (`script handler named ${name} not found`)
}
},
removeRisuScriptHandler: (name:ScriptMode, func:EditFunction) => {
if(pluginV2['edit' + name]){
pluginV2['edit' + name].delete(func)
}
else{
throw (`script handler named ${name} not found`)
}
},
addRisuReplacer: (name:string, func:ReplacerFunction) => {
if(pluginV2['replacer' + name]){
pluginV2['replacer' + name].add(func)
}
else{
throw (`replacer handler named ${name} not found`)
}
},
removeRisuReplacer: (name:string, func:ReplacerFunction) => {
if(pluginV2['replacer' + name]){
pluginV2['replacer' + name].delete(func)
}
else{
throw (`replacer handler named ${name} not found`)
}
},
onUnload: (func:() => void|Promise<void>) => {
pluginV2.unload.add(func)
}
}
for(const plugin of plugins){
const data = plugin.script
const realScript = `(async () => {
const risuFetch = globalThis.__pluginApis__.risuFetch
const nativeFetch = globalThis.__pluginApis__.nativeFetch
const getArg = globalThis.__pluginApis__.getArg
const printLog = globalThis.__pluginApis__.printLog
const getChar = globalThis.__pluginApis__.getChar
const setChar = globalThis.__pluginApis__.setChar
const addProvider = globalThis.__pluginApis__.addProvider
const addRisuEventHandler = globalThis.__pluginApis__.addRisuEventHandler
const onUnload = globalThis.__pluginApis__.onUnload
${data}
})();`
try {
eval(realScript)
} catch (error) {
console.error(error)
}
console.log('Loaded V2 Plugin', plugin.name)
}
}
export async function translatorPlugin(text:string, from:string, to:string) {
if(!pluginTranslator){
return false

View File

@@ -30,6 +30,7 @@ import { hypaMemoryV2 } from "./memory/hypav2";
import { runLuaEditTrigger } from "./lua";
import { parseChatML } from "../parser.svelte";
import { getModelInfo, LLMFlags } from "../model/modellist";
import { pluginV2 } from "../plugins/plugins";
export interface OpenAIChat{
role: 'system'|'user'|'assistant'|'function'
@@ -39,6 +40,7 @@ export interface OpenAIChat{
removable?:boolean
attr?:string[]
multimodals?: MultiModal[]
thoughts?: string[]
}
export interface MultiModal{
@@ -752,19 +754,19 @@ export async function sendChat(chatProcessIndex = -1,arg:{
break
}
}
if(usingPromptTemplate && DBState.db.promptSettings.maxThoughtTagDepth !== -1){
const depth = ms.length - index
if(depth >= DBState.db.promptSettings.maxThoughtTagDepth){
formatedChat = formatedChat.replace(/<Thoughts>(.+?)<\/Thoughts>/gm, '')
}
}
let thoughts:string[] = []
formatedChat = formatedChat.replace(/<Thoughts>(.+?)<\/Thoughts>/gm, (match, p1) => {
thoughts.push(p1)
return ''
})
const chat:OpenAIChat = {
role: role,
content: formatedChat,
memo: msg.chatId,
attr: attr,
multimodals: multimodal
multimodals: multimodal,
thoughts: thoughts
}
if(chat.multimodals.length === 0){
delete chat.multimodals

View File

@@ -1,6 +1,6 @@
import type { MultiModal, OpenAIChat, OpenAIChatFull } from "./index.svelte";
import { getCurrentCharacter, getDatabase, setDatabase, type character } from "../storage/database.svelte";
import { pluginProcess } from "../plugins/plugins";
import { pluginProcess, pluginV2 } from "../plugins/plugins";
import { language } from "../../lang";
import { stringlizeAINChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize";
import { addFetchLog, fetchNative, globalFetch, isNodeServer, isTauri, textifyReadableStream } from "../globalApi.svelte";
@@ -105,11 +105,46 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
}
for(const parameter of parameters){
let value = 0
if(parameter === 'top_k' && arg.ignoreTopKIfZero && db.seperateParameters[ModelMode][parameter] === 0){
continue
}
let value = db.seperateParameters[ModelMode][parameter]
switch(parameter){
case 'temperature':{
value = db.seperateParameters[ModelMode].temperature === -1000 ? -1000 : (db.seperateParameters[ModelMode].temperature / 100)
break
}
case 'top_k':{
value = db.seperateParameters[ModelMode].top_k
break
}
case 'repetition_penalty':{
value = db.seperateParameters[ModelMode].repetition_penalty
break
}
case 'min_p':{
value = db.seperateParameters[ModelMode].min_p
break
}
case 'top_a':{
value = db.seperateParameters[ModelMode].top_a
break
}
case 'top_p':{
value = db.seperateParameters[ModelMode].top_p
break
}
case 'frequency_penalty':{
value = db.seperateParameters[ModelMode].frequency_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].frequency_penalty / 100)
break
}
case 'presence_penalty':{
value = db.seperateParameters[ModelMode].presence_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].presence_penalty / 100)
break
}
}
if(value === -1000 || value === undefined){
continue
@@ -174,7 +209,22 @@ export async function requestChatData(arg:requestDataArgument, model:ModelModeEx
const db = getDatabase()
let trys = 0
while(true){
if(pluginV2.replacerbeforeRequest.size > 0){
for(const replacer of pluginV2.replacerbeforeRequest){
arg.formated = await replacer(arg.formated, model)
}
}
const da = await requestChatDataMain(arg, model, abortSignal)
if(da.type === 'success' && pluginV2.replacerafterRequest.size > 0){
for(const replacer of pluginV2.replacerafterRequest){
da.result = await replacer(da.result, model)
}
}
if(da.type !== 'fail' || da.noRetry){
return da
}
@@ -202,7 +252,7 @@ interface OpenAIImageContents {
type OpenAIContents = OpenAITextContents|OpenAIImageContents
export interface OpenAIChatExtra {
role: 'system'|'user'|'assistant'|'function'
role: 'system'|'user'|'assistant'|'function'|'developer'
content: string|OpenAIContents[]
memo?:string
name?:string
@@ -247,7 +297,23 @@ function reformater(formated:OpenAIChat[],modelInfo:LLMModel){
}
if(newFormated[newFormated.length-1].role === m.role){
newFormated[newFormated.length-1].content += '\n' + m.content
if(m.multimodals){
if(!newFormated[newFormated.length-1].multimodals){
newFormated[newFormated.length-1].multimodals = []
}
newFormated[newFormated.length-1].multimodals.push(...m.multimodals)
}
if(m.thoughts){
if(!newFormated[newFormated.length-1].thoughts){
newFormated[newFormated.length-1].thoughts = []
}
newFormated[newFormated.length-1].thoughts.push(...m.thoughts)
}
continue
}
else{
@@ -409,15 +475,6 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
})
}
if(aiModel.startsWith('gpt4o1')){
for(let i=0;i<formatedChat.length;i++){
if(formatedChat[i].role === 'system'){
formatedChat[i].content = `<system>${formatedChat[i].content}</system>`
formatedChat[i].role = 'user'
}
}
}
for(let i=0;i<arg.biasString.length;i++){
const bia = arg.biasString[i]
if(bia[0].startsWith('[[') && bia[0].endsWith(']]')){
@@ -476,6 +533,15 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
openrouterRequestModel = await getFreeOpenRouterModel()
}
if(arg.modelInfo.flags.includes(LLMFlags.DeveloperRole)){
formatedChat = formatedChat.map((v) => {
if(v.role === 'system'){
v.role = 'developer'
}
return v
})
}
console.log(formatedChat)
if(arg.modelInfo.format === LLMFormat.Mistral){
requestModel = aiModel
@@ -500,12 +566,12 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
}
else{
const prevChat = reformatedChat[reformatedChat.length-1]
if(prevChat.role === chat.role){
if(prevChat?.role === chat.role){
reformatedChat[reformatedChat.length-1].content += '\n' + chat.content
continue
}
else if(chat.role === 'system'){
if(prevChat.role === 'user'){
if(prevChat?.role === 'user'){
reformatedChat[reformatedChat.length-1].content += '\nSystem:' + chat.content
}
else{
@@ -617,7 +683,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
})
if(aiModel.startsWith('gpt4o1')){
if(aiModel.startsWith('gpt4o1') || arg.modelInfo.flags.includes(LLMFlags.OAICompletionTokens)){
body.max_completion_tokens = body.max_tokens
delete body.max_tokens
}
@@ -1328,7 +1394,15 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise<requestDat
const db = getDatabase()
const maxTokens = arg.maxTokens
const bias = arg.biasString
const d = await pluginProcess({
const v2Function = pluginV2.providers.get(db.currentPluginProvider)
const d = v2Function ? (await v2Function(applyParameters({
prompt_chat: formated,
mode: arg.mode,
bias: []
}, [
'frequency_penalty','min_p','presence_penalty','repetition_penalty','top_k','top_p','temperature'
], {}, arg.mode) as any)) : await pluginProcess({
bias: bias,
prompt_chat: formated,
temperature: (db.temperature / 100),
@@ -1336,6 +1410,7 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise<requestDat
presence_penalty: (db.PresensePenalty / 100),
frequency_penalty: (db.frequencyPenalty / 100)
})
if(!d){
return {
type: 'fail',
@@ -1387,14 +1462,51 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
for(let i=0;i<formated.length;i++){
const chat = formated[i]
if(i === 0){
if(chat.role === 'user' || chat.role === 'assistant'){
reformatedChat.push({
role: chat.role === 'user' ? 'USER' : 'MODEL',
parts: [{
text: chat.content
}]
})
const prevChat = reformatedChat[reformatedChat.length-1]
const qRole =
chat.role === 'user' ? 'USER' :
chat.role === 'assistant' ? 'MODEL' :
chat.role
if (chat.multimodals && chat.multimodals.length > 0 && chat.role === "user") {
let geminiParts: GeminiPart[] = [];
geminiParts.push({
text: chat.content,
});
for (const modal of chat.multimodals) {
if (
(modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) ||
(modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) ||
(modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput))
) {
const dataurl = modal.base64;
const base64 = dataurl.split(",")[1];
const mediaType = dataurl.split(";")[0].split(":")[1];
geminiParts.push({
inlineData: {
mimeType: mediaType,
data: base64,
}
});
}
}
reformatedChat.push({
role: "USER",
parts: geminiParts,
});
} else if (prevChat?.role === qRole) {
reformatedChat[reformatedChat.length-1].parts[
reformatedChat[reformatedChat.length-1].parts.length-1
].text += '\n' + chat.content
continue
}
else if(chat.role === 'system'){
if(prevChat?.role === 'USER'){
reformatedChat[reformatedChat.length-1].parts[0].text += '\nsystem:' + chat.content
}
else{
reformatedChat.push({
@@ -1405,78 +1517,32 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
})
}
}
else if(chat.role === 'assistant' && arg.modelInfo.flags.includes(LLMFlags.geminiThinking)){
reformatedChat.push({
role: 'MODEL',
parts: [chat.thoughts?.length > 0 ? {
text: chat.thoughts.join('\n\n')
} : null, {
text: chat.content
}]
})
}
else if(chat.role === 'assistant' || chat.role === 'user'){
reformatedChat.push({
role: chat.role === 'user' ? 'USER' : 'MODEL',
parts: [{
text: chat.content
}]
})
}
else{
const prevChat = reformatedChat[reformatedChat.length-1]
const qRole =
chat.role === 'user' ? 'USER' :
chat.role === 'assistant' ? 'MODEL' :
chat.role
if (chat.multimodals && chat.multimodals.length > 0 && chat.role === "user") {
let geminiParts: GeminiPart[] = [];
geminiParts.push({
text: chat.content,
});
for (const modal of chat.multimodals) {
if (
(modal.type === "image" && arg.modelInfo.flags.includes(LLMFlags.hasImageInput)) ||
(modal.type === "audio" && arg.modelInfo.flags.includes(LLMFlags.hasAudioInput)) ||
(modal.type === "video" && arg.modelInfo.flags.includes(LLMFlags.hasVideoInput))
) {
const dataurl = modal.base64;
const base64 = dataurl.split(",")[1];
const mediaType = dataurl.split(";")[0].split(":")[1];
geminiParts.push({
inlineData: {
mimeType: mediaType,
data: base64,
}
});
}
}
reformatedChat.push({
role: "USER",
parts: geminiParts,
});
} else if (prevChat.role === qRole) {
reformatedChat[reformatedChat.length-1].parts[0].text += '\n' + chat.content
continue
}
else if(chat.role === 'system'){
if(prevChat.role === 'USER'){
reformatedChat[reformatedChat.length-1].parts[0].text += '\nsystem:' + chat.content
}
else{
reformatedChat.push({
role: "USER",
parts: [{
text: chat.role + ':' + chat.content
}]
})
}
}
else if(chat.role === 'assistant' || chat.role === 'user'){
reformatedChat.push({
role: chat.role === 'user' ? 'USER' : 'MODEL',
parts: [{
text: chat.content
}]
})
}
else{
reformatedChat.push({
role: "USER",
parts: [{
text: chat.role + ':' + chat.content
}]
})
}
reformatedChat.push({
role: "USER",
parts: [{
text: chat.role + ':' + chat.content
}]
})
}
}
@@ -1649,12 +1715,25 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
const data = JSON.parse(reformatted)
let r = ''
let rDatas:string[] = ['']
for(const d of data){
r += d.candidates[0].content.parts[0].text
const parts = d.candidates[0].content?.parts
for(let i=0;i<parts.length;i++){
const part = parts[i]
if(i > 0){
rDatas.push('')
}
rDatas[rDatas.length-1] += part.text
}
}
if(rDatas.length > 1){
const thought = rDatas.splice(rDatas.length-2, 1)[0]
rDatas[rDatas.length-1] = `<Thoughts>${thought}</Thoughts>\n\n${rDatas.join('\n\n')}`
}
control.enqueue({
'0': r
'0': rDatas[rDatas.length-1],
})
} catch (error) {
console.log(error)
@@ -1682,13 +1761,22 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
}
}
let fullRes = ''
let rDatas:string[] = ['']
const processDataItem = (data:any) => {
if(data?.candidates?.[0]?.content?.parts?.[0]?.text){
fullRes += data.candidates[0].content.parts[0].text
const parts = data?.candidates?.[0]?.content?.parts
if(parts){
for(let i=0;i<parts.length;i++){
const part = parts[i]
if(i > 0){
rDatas.push('')
}
rDatas[rDatas.length-1] += part.text
}
}
else if(data?.errors){
if(data?.errors){
return {
type: 'fail',
result: `${JSON.stringify(data.errors)}`
@@ -1711,9 +1799,15 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
processDataItem(res.data)
}
if(rDatas.length > 1){
const thought = rDatas.splice(rDatas.length-2, 1)[0]
rDatas[rDatas.length-1] = `<Thoughts>${thought}</Thoughts>\n\n${rDatas.join('\n\n')}`
}
return {
type: 'success',
result: fullRes
result: rDatas[rDatas.length-1]
}
}

View File

@@ -10,6 +10,7 @@ import { runCharacterJS } from "../plugins/embedscript";
import { getModuleAssets, getModuleRegexScripts } from "./modules";
import { HypaProcesser } from "./memory/hypamemory";
import { runLuaEditTrigger } from "./lua";
import { pluginV2 } from "../plugins/plugins";
const dreg = /{{data}}/g
const randomness = /\|\|\|/g
@@ -109,6 +110,15 @@ export async function processScriptFull(char:character|groupChat|simpleCharacter
data,
})
data = await runLuaEditTrigger(char, mode, data)
if(pluginV2[mode].size > 0){
for(const plugin of pluginV2[mode]){
const res = await plugin(data)
if(res !== null && res !== undefined){
data = res
}
}
}
if(scripts.length === 0){
cacheScript(scripts, originalData, data, mode)
return {data, emoChanged}

View File

@@ -134,7 +134,6 @@ export const runVITS = async (text: string, modelData:string|OnnxModelFiles = 'X
});
}
export const registerOnnxModel = async ():Promise<OnnxModelFiles> => {
const id = v4().replace(/-/g, '')

View File

@@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme';
import type { PromptItem, PromptSettings } from '../process/prompt';
import type { OobaChatCompletionRequestParams } from '../model/ooba';
export let appVer = "143.5.0"
export let appVer = "144.1.0"
export let webAppSubVer = ''
@@ -464,14 +464,12 @@ export function setDatabase(data:Database){
}
data.customFlags ??= []
data.enableCustomFlags ??= false
data.assetMaxDifference ??= 4
changeLanguage(data.language)
setDatabaseLite(data)
}
export function setDatabaseLite(data:Database){
if(import.meta.env.DEV){
console.trace('setDatabaseLite executed')
}
DBState.db = data
}
@@ -861,6 +859,9 @@ export interface Database{
presetChain: string
legacyMediaFindings?:boolean
geminiStream?:boolean
assetMaxDifference:number
menuSideBar:boolean
pluginV2: RisuPlugin[]
}
interface SeparateParameters{

View File

@@ -317,8 +317,15 @@ export async function translateHTML(html: string, reverse:boolean, charArg:simpl
return
}
// node.textContent = await translate(node.textContent || '', reverse);
let translated = await translate(node.textContent || "", reverse);
const translateChunks = (node.textContent || '').split(/\n\n+/g);
let translatedChunksPromises: Promise<string>[] = [];
for (const chunk of translateChunks) {
const translatedPromise = translate(chunk, reverse);
translatedChunksPromises.push(translatedPromise);
}
const translatedChunks = await Promise.all(translatedChunksPromises);
let translated = translatedChunks.join("\n\n");
if (!reprocessDisplayScript) {
node.textContent = translated;
return;