This commit is contained in:
Kwaroran
2025-03-22 19:09:47 +09:00
41 changed files with 4715 additions and 175 deletions

131
src/ts/defaulthotkeys.ts Normal file
View File

@@ -0,0 +1,131 @@
export interface Hotkey{
key: string
ctrl?: boolean
shift?: boolean
alt?: boolean
action: string
}
export const defaultHotkeys: Hotkey[] = [
{
key: 'r',
ctrl: true,
alt: true,
action: 'reroll'
},
{
key: 'f',
ctrl: true,
alt: true,
action: 'unreroll'
},
{
key: 't',
ctrl: true,
alt: true,
action: 'translate'
},
{
key: 'd',
ctrl: true,
alt: true,
action: 'remove'
},
{
key: 'e',
ctrl: true,
alt: true,
action: 'edit'
},
{
key: 'c',
ctrl: true,
alt: true,
action: 'copy'
},
{
key: 'Enter',
ctrl: true,
alt: true,
action: 'send'
},
{
key: 's',
ctrl: true,
action: 'settings'
},
{
key: 'h',
ctrl: true,
action: 'home'
},
{
key: 'p',
ctrl: true,
action: 'presets'
},
{
key: 'e',
ctrl: true,
action: 'persona'
},
{
key: 'm',
ctrl: true,
action: 'modelSelect'
},
{
key: '.',
ctrl: true,
action: 'toggleCSS'
},
//Needs to implement after this
{
key: '[',
ctrl: true,
action: 'prevChar'
},
{
key: ']',
ctrl: true,
action: 'nextChar'
},
{
key: '`',
ctrl: true,
action: 'quickMenu'
},
{
key: 'q',
ctrl: true,
action: 'quickSettings'
},
{
key: 'v',
ctrl: true,
action: 'toggleVoice'
},
{
key: 'l',
ctrl: true,
action: 'toggleLog'
},
{
key: 'u',
ctrl: true,
action: 'previewRequest'
},
{
key: 'w',
ctrl: true,
action: 'webcam'
},
{
key: ' ',
action: 'focusInput'
},
]

View File

@@ -1,62 +1,198 @@
import { get } from "svelte/store"
import { alertSelect, alertToast, doingAlert } from "./alert"
import { alertMd, alertSelect, alertToast, alertWait, doingAlert } from "./alert"
import { changeToPreset as changeToPreset2, getDatabase } from "./storage/database.svelte"
import { alertStore, MobileGUIStack, MobileSideBar, openPersonaList, openPresetList, SafeModeStore, selectedCharID, settingsOpen } from "./stores.svelte"
import { alertStore, MobileGUIStack, MobileSideBar, openPersonaList, openPresetList, OpenRealmStore, PlaygroundStore, QuickSettings, SafeModeStore, selectedCharID, settingsOpen } from "./stores.svelte"
import { language } from "src/lang"
import { updateTextThemeAndCSS } from "./gui/colorscheme"
import { defaultHotkeys } from "./defaulthotkeys"
import { doingChat, previewBody, sendChat } from "./process/index.svelte"
import { getRequestLog } from "./globalApi.svelte"
export function initHotkey(){
document.addEventListener('keydown', (ev) => {
if(ev.ctrlKey){
if(
!ev.ctrlKey &&
!ev.altKey &&
!ev.shiftKey &&
['INPUT', 'TEXTAREA'].includes(document.activeElement.tagName)
){
return
}
if(ev.altKey){
switch(ev.key){
case "r":{
ev.preventDefault()
clickQuery('.button-icon-reroll')
const database = getDatabase()
const hotKeys = database?.hotkeys ?? defaultHotkeys
let hotkeyRan = false
for(const hotkey of hotKeys){
let hotKeyRanThisTime = true
hotkey.ctrl = hotkey.ctrl ?? false
hotkey.alt = hotkey.alt ?? false
hotkey.shift = hotkey.shift ?? false
if(hotkey.key === ev.key){
console.log(`Hotkey: "${hotkey.key}" ${hotkey.ctrl} ${hotkey.alt} ${hotkey.shift}`)
console.log(`Event: "${ev.key}" ${ev.ctrlKey} ${ev.altKey} ${ev.shiftKey}`)
}
if(hotkey.ctrl !== ev.ctrlKey){
continue
}
if(hotkey.alt !== ev.altKey){
continue
}
if(hotkey.shift !== ev.shiftKey){
continue
}
if(hotkey.key !== ev.key){
continue
}
if(!hotkey.ctrl && !hotkey.alt && !hotkey.shift){
if(['INPUT', 'TEXTAREA'].includes(document.activeElement.tagName)){
continue
}
}
switch(hotkey.action){
case 'reroll':{
clickQuery('.button-icon-reroll')
break
}
case 'unreroll':{
clickQuery('.button-icon-unreroll')
break
}
case 'translate':{
clickQuery('.button-icon-translate')
break
}
case 'remove':{
clickQuery('.button-icon-remove')
break
}
case 'edit':{
clickQuery('.button-icon-edit')
setTimeout(() => {
focusQuery('.message-edit-area')
}, 100)
break
}
case 'copy':{
clickQuery('.button-icon-copy')
break
}
case 'focusInput':{
focusQuery('.text-input-area')
break
}
case 'send':{
clickQuery('.button-icon-send')
break
}
case 'settings':{
settingsOpen.set(!get(settingsOpen))
break
}
case 'home':{
selectedCharID.set(-1)
break
}
case 'presets':{
openPresetList.set(!get(openPresetList))
break
}
case 'persona':{
openPersonaList.set(!get(openPersonaList))
break
}
case 'toggleCSS':{
SafeModeStore.set(!get(SafeModeStore))
updateTextThemeAndCSS()
break
}
case 'prevChar':{
const sorted = database.characters.map((v, i) => {
return {name: v.name, i}
}).sort((a, b) => a.name.localeCompare(b.name))
const currentIndex = sorted.findIndex(v => v.i === get(selectedCharID))
if(currentIndex === 0){
return
}
case "f":{
ev.preventDefault()
clickQuery('.button-icon-unreroll')
if(currentIndex >= sorted.length - 1){
return
}
case "t":{
ev.preventDefault()
clickQuery('.button-icon-translate')
selectedCharID.set(sorted[currentIndex - 1].i)
PlaygroundStore.set(0)
OpenRealmStore.set(false)
break
}
case 'nextChar':{
const sorted = database.characters.map((v, i) => {
return {name: v.name, i}
}).sort((a, b) => a.name.localeCompare(b.name))
const currentIndex = sorted.findIndex(v => v.i === get(selectedCharID))
if(currentIndex === 0){
return
}
case "d":{
ev.preventDefault()
clickQuery('.button-icon-remove')
if(currentIndex >= sorted.length - 1){
return
}
case 'e':{
ev.preventDefault()
clickQuery('.button-icon-edit')
setTimeout(() => {
focusQuery('.message-edit-area')
}, 100)
return
}
case 'c':{
ev.preventDefault()
clickQuery('.button-icon-copy')
return
}
case 'i':{
ev.preventDefault()
focusQuery('.text-input-area')
return
}
case 'Enter':{
ev.preventDefault()
clickQuery('.button-icon-send')
return
selectedCharID.set(sorted[currentIndex + 1].i)
PlaygroundStore.set(0)
OpenRealmStore.set(false)
break
}
case 'quickMenu':{
quickMenu()
break
}
case 'previewRequest':{
if(get(doingChat) && get(selectedCharID) !== -1){
return false
}
alertWait("Loading...")
sendChat(-1, {
previewPrompt: true
})
let md = ''
md += '### Prompt\n'
md += '```json\n' + JSON.stringify(JSON.parse(previewBody), null, 2).replaceAll('```', '\\`\\`\\`') + '\n```\n'
doingChat.set(false)
alertMd(md)
break
}
case 'toggleLog':{
alertMd(getRequestLog())
break
}
case 'quickSettings':{
QuickSettings.open = !QuickSettings.open
QuickSettings.index = 0
break
}
default:{
hotKeyRanThisTime = false
}
}
if(hotKeyRanThisTime){
hotkeyRan = true
break
}
}
if(hotkeyRan){
ev.preventDefault()
ev.stopPropagation()
return
}
if(ev.ctrlKey){
switch (ev.key){
case "1":{
changeToPreset(0)
@@ -112,37 +248,6 @@ export function initHotkey(){
ev.stopPropagation()
break
}
case 's':{
settingsOpen.set(!get(settingsOpen))
ev.preventDefault()
ev.stopPropagation()
break
}
case 'h':{
selectedCharID.set(-1)
ev.preventDefault()
ev.stopPropagation()
break
}
case 'p':{
openPresetList.set(!get(openPresetList))
ev.preventDefault()
ev.stopPropagation()
break
}
case 'e':{
openPersonaList.set(!get(openPersonaList))
ev.preventDefault()
ev.stopPropagation()
break
}
case '.':{
SafeModeStore.set(!get(SafeModeStore))
updateTextThemeAndCSS()
ev.preventDefault()
ev.stopPropagation()
break
}
}
}
if(ev.key === 'Escape'){
@@ -179,18 +284,7 @@ export function initHotkey(){
if(doingAlert()){
return
}
const selStr = await alertSelect([
language.presets,
language.persona,
language.cancel
])
const sel = parseInt(selStr)
if(sel === 0){
openPresetList.set(!get(openPresetList))
}
if(sel === 1){
openPersonaList.set(!get(openPersonaList))
}
quickMenu()
}
if(touchs === 1){
touchStartTime = Date.now()
@@ -201,6 +295,21 @@ export function initHotkey(){
})
}
async function quickMenu(){
const selStr = await alertSelect([
language.presets,
language.persona,
language.cancel
])
const sel = parseInt(selStr)
if(sel === 0){
openPresetList.set(!get(openPresetList))
}
if(sel === 1){
openPersonaList.set(!get(openPersonaList))
}
}
function clickQuery(query:string){
let ele = document.querySelector(query) as HTMLElement
console.log(ele)

View File

@@ -62,7 +62,8 @@ export enum LLMFormat{
Kobold,
Ollama,
Horde,
AWSBedrockClaude
AWSBedrockClaude,
OpenAIResponseAPI
}
export enum LLMTokenizer{
@@ -1391,6 +1392,21 @@ for(let model of LLMModels){
model.fullName ??= model.provider !== LLMProvider.AsIs ? `${ProviderNames.get(model.provider) ?? ''} ${model.name}`.trim() : model.name
}
for(let i=0; i<LLMModels.length; i++){
if(LLMModels[i].provider === LLMProvider.OpenAI && LLMModels[i].format === LLMFormat.OpenAICompatible){
LLMModels.push({
...LLMModels[i],
format: LLMFormat.OpenAIResponseAPI,
flags: [...LLMModels[i].flags, LLMFlags.hasPrefill],
id: `${LLMModels[i].id}-response-api`,
name: `${LLMModels[i].name} (Response API)`,
fullName: `${LLMModels[i].fullName ?? LLMModels[i].name} (Response API)`,
recommended: false
})
}
}
export function getModelInfo(id: string): LLMModel{
const db = getDatabase()

View File

@@ -120,7 +120,7 @@ export async function loadPlugins() {
}
type PluginV2ProviderArgument = {
prompt_chat: OpenAIChat[],
prompt_chat: OpenAIChat[]
frequency_penalty: number
min_p: number
presence_penalty: number
@@ -129,6 +129,7 @@ type PluginV2ProviderArgument = {
top_p: number
temperature: number
mode: string
max_tokens: number
}
type PluginV2ProviderOptions = {
@@ -140,7 +141,7 @@ type EditFunction = (content:string) => string|null|undefined|Promise<string|nul
type ReplacerFunction = (content:OpenAIChat[], type:string) => OpenAIChat[]|Promise<OpenAIChat[]>
export const pluginV2 = {
providers: new Map<string, (arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string|ReadableStream<string>}> >(),
providers: new Map<string, (arg:PluginV2ProviderArgument, abortSignal?: AbortSignal) => Promise<{success:boolean,content:string|ReadableStream<string>}> >(),
providerOptions: new Map<string, PluginV2ProviderOptions>(),
editdisplay: new Set<EditFunction>(),
editoutput: new Set<EditFunction>(),
@@ -189,7 +190,7 @@ export async function loadV2Plugin(plugins:RisuPlugin[]){
db.characters[charid] = char
setDatabaseLite(db)
},
addProvider: (name:string, func:(arg:PluginV2ProviderArgument) => Promise<{success:boolean,content:string}>, options?:PluginV2ProviderOptions) => {
addProvider: (name:string, func:(arg:PluginV2ProviderArgument, abortSignal?:AbortSignal) => Promise<{success:boolean,content:string}>, options?:PluginV2ProviderOptions) => {
let provs = get(customProviderStore)
provs.push(name)
pluginV2.providers.set(name, func)

View File

@@ -1,5 +1,5 @@
import { get, writable } from "svelte/store";
import { type character, type MessageGenerationInfo, type Chat, changeToPreset } from "../storage/database.svelte";
import { type character, type MessageGenerationInfo, type Chat, changeToPreset, setCurrentChat } from "../storage/database.svelte";
import { DBState } from '../stores.svelte';
import { CharEmotion, selectedCharID } from "../stores.svelte";
import { ChatTokenizer, tokenize, tokenizeNum } from "../tokenizer";
@@ -63,6 +63,7 @@ export const doingChat = writable(false)
export const chatProcessStage = writable(0)
export const abortChat = writable(false)
export let previewFormated:OpenAIChat[] = []
export let previewBody:string = ''
export async function sendChat(chatProcessIndex = -1,arg:{
chatAdditonalTokens?:number,
@@ -70,6 +71,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
continue?:boolean,
usedContinueTokens?:number,
preview?:boolean
previewPrompt?:boolean
} = {}):Promise<boolean> {
chatProcessStage.set(0)
@@ -694,6 +696,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
const triggerResult = await runTrigger(currentChar, 'start', {chat: currentChat})
if(triggerResult){
currentChat = triggerResult.chat
setCurrentChat(currentChat)
ms = currentChat.message
currentTokens += triggerResult.tokens
if(triggerResult.stopSending){
@@ -1303,9 +1306,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{
bias: {},
continue: arg.continue,
chatId: generationId,
imageResponse: DBState.db.outputImageModal
imageResponse: DBState.db.outputImageModal,
previewBody: arg.previewPrompt
}, 'model', abortSignal)
if(arg.previewPrompt && req.type === 'success'){
previewBody = req.result
return true
}
let result = ''
let emoChanged = false
let resendChat = false

View File

@@ -17,6 +17,7 @@ function toRPN(expression:string) {
'≤': {precedence: 1, associativity: 'Left'},
'≥': {precedence: 1, associativity: 'Left'},
'=': {precedence: 1, associativity: 'Left'},
'≠': {precedence: 1, associativity: 'Left'},
'!': {precedence: 5, associativity: 'Right'},
};
const operatorsKeys = Object.keys(operators);
@@ -27,7 +28,11 @@ function toRPN(expression:string) {
let lastToken = ''
for(let i = 0; i < expression.length; i++) {
if(operatorsKeys.includes(expression[i])) {
const char = expression[i]
if (char === '-' && (i === 0 || operatorsKeys.includes(expression[i - 1]) || expression[i - 1] === '(')) {
lastToken += char
}
else if (operatorsKeys.includes(char)) {
if(lastToken !== '') {
expression2.push(lastToken)
}
@@ -35,10 +40,10 @@ function toRPN(expression:string) {
expression2.push('0')
}
lastToken = ''
expression2.push(expression[i])
expression2.push(char)
}
else{
lastToken += expression[i]
lastToken += char
}
}
@@ -94,6 +99,7 @@ function calculateRPN(expression:string) {
case '≤': stack.push(a <= b ? 1 : 0); break;
case '≥': stack.push(a >= b ? 1 : 0); break;
case '=': stack.push(a === b ? 1 : 0); break;
case '≠': stack.push(a !== b ? 1 : 0); break;
case '!': stack.push(b ? 0 : 1); break;
}
}
@@ -121,7 +127,14 @@ function executeRPNCalculation(text:string) {
return "0"
}
return parsed.toString()
}).replace(/&&/g, '&').replace(/\|\|/g, '|').replace(/<=/g, '≤').replace(/>=/g, '≥').replace(/==/g, '=').replace(/null/gi, '0')
})
.replace(/&&/g, '&')
.replace(/\|\|/g, '|')
.replace(/<=/g, '≤')
.replace(/>=/g, '≥')
.replace(/==/g, '=')
.replace(/!=/g, '≠')
.replace(/null/gi, '0')
const expression = toRPN(text);
const evaluated = calculateRPN(expression);
return evaluated

View File

@@ -23,6 +23,8 @@ interface LuaEngineState {
engine: LuaEngine;
mutex: Mutex;
chat: Chat;
setVar: (key:string, value:string) => void,
getVar: (key:string) => string
}
let LuaEngines = new Map<string, LuaEngineState>()
@@ -55,12 +57,16 @@ export async function runLua(code:string, arg:{
code,
engine: await luaFactory.createEngine({injectObjects: true}),
mutex: new Mutex(),
chat
chat,
setVar,
getVar
}
LuaEngines.set(mode, luaEngineState)
wasEmpty = true
} else {
luaEngineState.chat = chat
luaEngineState.setVar = setVar
luaEngineState.getVar = getVar
}
return await luaEngineState.mutex.runExclusive(async () => {
if (wasEmpty || code !== luaEngineState.code) {
@@ -72,13 +78,13 @@ export async function runLua(code:string, arg:{
if(!LuaSafeIds.has(id) && !LuaEditDisplayIds.has(id)){
return
}
setVar(key, value)
luaEngineState.setVar(key, value)
})
luaEngine.global.set('getChatVar', (id:string,key:string) => {
if(!LuaSafeIds.has(id) && !LuaEditDisplayIds.has(id)){
return
}
return getVar(key)
return luaEngineState.getVar(key)
})
luaEngine.global.set('stopChat', (id:string) => {
if(!LuaSafeIds.has(id)){

View File

@@ -133,9 +133,11 @@ export class HypaProcesser{
}
async addText(texts:string[]) {
const db = getDatabase()
const suffix = (this.model === 'custom' && db.hypaCustomSettings.model) ? `-${db.hypaCustomSettings.model}` : ""
for(let i=0;i<texts.length;i++){
const itm:memoryVector = await this.forage.getItem(texts[i] + '|' + this.model)
const itm:memoryVector = await this.forage.getItem(texts[i] + '|' + this.model + suffix)
if(itm){
itm.alreadySaved = true
this.vectors.push(itm)
@@ -164,7 +166,7 @@ export class HypaProcesser{
for(let i=0;i<memoryVectors.length;i++){
const vec = memoryVectors[i]
if(!vec.alreadySaved){
await this.forage.setItem(texts[i] + '|' + this.model, vec)
await this.forage.setItem(texts[i] + '|' + this.model + suffix, vec)
}
}

View File

@@ -22,6 +22,7 @@ import { extractJSON, getGeneralJSONSchema, getOpenAIJSONSchema } from "./templa
import { getModelInfo, LLMFlags, LLMFormat, type LLMModel } from "../model/modellist";
import { runTrigger } from "./triggers";
import { registerClaudeObserver } from "../observer.svelte";
import { v4 } from "uuid";
@@ -43,6 +44,7 @@ interface requestDataArgument{
schema?:string
extractJson?:string
imageResponse?:boolean
previewBody?:boolean
}
interface RequestDataArgumentExtended extends requestDataArgument{
@@ -485,6 +487,13 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
targ.customURL = db.forceReplaceUrl
}
if(db.seperateModelsForAxModels){
if(db.seperateModels[model]){
targ.aiModel = db.seperateModels[model]
targ.modelInfo = getModelInfo(targ.aiModel)
}
}
const format = targ.modelInfo.format
targ.formated = reformater(targ.formated, targ.modelInfo)
@@ -521,6 +530,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:ModelMo
return requestHorde(targ)
case LLMFormat.WebLLM:
return requestWebLLM(targ)
case LLMFormat.OpenAIResponseAPI:
return requestOpenAIResponseAPI(targ)
}
return {
@@ -726,8 +737,8 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
}
}
}
const res = await globalFetch(arg.customURL ?? "https://api.mistral.ai/v1/chat/completions", {
const targs = {
body: applyParameters({
model: requestModel,
messages: reformatedChat,
@@ -739,7 +750,20 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
},
abortSignal: arg.abortSignal,
chatId: arg.chatId
})
} as const
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: "https://api.mistral.ai/v1/chat/completions",
body: targs.body,
headers: targs.headers
})
}
}
const res = await globalFetch(arg.customURL ?? "https://api.mistral.ai/v1/chat/completions", targs)
const dat = res.data as any
if(res.ok){
@@ -959,6 +983,17 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
}
}
}
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: replacerURL,
body: body,
headers: headers
})
}
}
const da = await fetchNative(replacerURL, {
body: JSON.stringify(body),
method: "POST",
@@ -1140,6 +1175,17 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
}
}
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: replacerURL,
body: body,
headers: headers
})
}
}
const res = await globalFetch(replacerURL, {
body: body,
headers: headers,
@@ -1278,6 +1324,15 @@ async function requestOpenAILegacyInstruct(arg:RequestDataArgumentExtended):Prom
//return `\n\n${author}: ${m.content.trim()}`;
}).join("") + `\n## Response\n`;
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
error: "This model is not supported in preview mode"
})
}
}
const response = await globalFetch(arg.customURL ?? "https://api.openai.com/v1/completions", {
body: {
model: "gpt-3.5-turbo-instruct",
@@ -1310,6 +1365,165 @@ async function requestOpenAILegacyInstruct(arg:RequestDataArgumentExtended):Prom
}
interface OAIResponseInputItem {
content:({
type: 'input_text',
text: string
}|{
detail: 'high'|'low'|'auto'
type: 'input_image',
image_url: string
}|{
type: 'input_file',
file_data: string
filename?: string
})[]
role:'user'|'system'|'developer'
}
interface OAIResponseOutputItem {
content:({
type: 'output_text',
text: string,
annotations: []
})[]
type: 'message',
status: 'in_progress'|'complete'|'incomplete'
role:'assistant'
}
type OAIResponseItem = OAIResponseInputItem|OAIResponseOutputItem
async function requestOpenAIResponseAPI(arg:RequestDataArgumentExtended):Promise<requestDataResponse>{
const formated = arg.formated
const db = getDatabase()
const aiModel = arg.aiModel
const maxTokens = arg.maxTokens
const items:OAIResponseItem[] = []
for(let i=0;i<formated.length;i++){
const content = formated[i]
switch(content.role){
case 'function':
break
case 'assistant':{
const item:OAIResponseOutputItem = {
content: [],
role: content.role,
status: 'complete',
type: 'message',
}
item.content.push({
type: 'output_text',
text: content.content,
annotations: []
})
items.push(item)
break
}
case 'user':
case 'system':{
const item:OAIResponseInputItem = {
content: [],
role: content.role
}
item.content.push({
type: 'input_text',
text: content.content
})
content.multimodals ??= []
for(const multimodal of content.multimodals){
if(multimodal.type === 'image'){
item.content.push({
type: 'input_image',
detail: 'auto',
image_url: multimodal.base64
})
}
else{
item.content.push({
type: 'input_file',
file_data: multimodal.base64,
})
}
}
items.push(item)
break
}
}
}
if(items[items.length-1].role === 'assistant'){
(items[items.length-1] as OAIResponseOutputItem).status = 'incomplete'
}
const body = applyParameters({
model: arg.modelInfo.internalID ?? aiModel,
input: items,
max_output_tokens: maxTokens,
tools: [],
store: false
}, ['temperature', 'top_p'], {}, arg.mode)
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: "https://api.openai.com/v1/responses",
body: body,
headers: {
"Authorization": "Bearer " + db.openAIKey,
"Content-Type": "application/json"
}
})
}
}
if(db.modelTools.includes('search')){
body.tools.push('web_search_preview')
}
const response = await globalFetch("https://api.openai.com/v1/responses", {
body: body,
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + db.openAIKey,
},
chatId: arg.chatId
});
if(!response.ok){
return {
type: 'fail',
result: (language.errors.httpError + `${JSON.stringify(response.data)}`)
}
}
const text:string = (response.data.output?.find((m:OAIResponseOutputItem) => m.type === 'message') as OAIResponseOutputItem)?.content?.find(m => m.type === 'output_text')?.text
if(!text){
return {
type: 'fail',
result: JSON.stringify(response.data)
}
}
return {
type: 'success',
result: text
}
}
async function requestNovelAI(arg:RequestDataArgumentExtended):Promise<requestDataResponse>{
const formated = arg.formated
const db = getDatabase()
@@ -1324,6 +1538,15 @@ async function requestNovelAI(arg:RequestDataArgumentExtended):Promise<requestDa
sequence: number[], bias: number, ensure_sequence_finish: false, generate_once: true
}[] = []
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
error: "This model is not supported in preview mode"
})
}
}
for(let i=0;i<biasString.length;i++){
const bia = biasString[i]
const tokens = await tokenizeNum(bia[0])
@@ -1426,6 +1649,7 @@ async function requestOobaLegacy(arg:RequestDataArgumentExtended):Promise<reques
return risuChatParser(v.replace(/\\n/g, "\n"))
})
}
bodyTemplate = {
'max_new_tokens': db.maxResponse,
'do_sample': db.ooba.do_sample,
@@ -1454,6 +1678,17 @@ async function requestOobaLegacy(arg:RequestDataArgumentExtended):Promise<reques
'X-API-KEY': db.mancerHeader
}
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: blockingUrl,
body: bodyTemplate,
headers: headers
})
}
}
if(useStreaming){
const oobaboogaSocket = new WebSocket(streamUrl);
const statusCode = await new Promise((resolve) => {
@@ -1572,6 +1807,17 @@ async function requestOoba(arg:RequestDataArgumentExtended):Promise<requestDataR
}
}
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: urlStr,
body: bodyTemplate,
headers: {}
})
}
}
const response = await globalFetch(urlStr, {
body: bodyTemplate,
chatId: arg.chatId
@@ -1598,6 +1844,15 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise<requestDat
const maxTokens = arg.maxTokens
const bias = arg.biasString
const v2Function = pluginV2.providers.get(db.currentPluginProvider)
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
error: "Plugin is not supported in preview mode"
})
}
}
const d = v2Function ? (await v2Function(applyParameters({
prompt_chat: formated,
@@ -1606,7 +1861,7 @@ async function requestPlugin(arg:RequestDataArgumentExtended):Promise<requestDat
max_tokens: maxTokens,
}, [
'frequency_penalty','min_p','presence_penalty','repetition_penalty','top_k','top_p','temperature'
], {}, arg.mode) as any)) : await pluginProcess({
], {}, arg.mode) as any, arg.abortSignal)) : await pluginProcess({
bias: bias,
prompt_chat: formated,
temperature: (db.temperature / 100),
@@ -2037,6 +2292,17 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
if(arg.modelInfo.format === LLMFormat.GoogleCloud && arg.useStreaming){
headers['Content-Type'] = 'application/json'
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: url,
body: body,
headers: headers
})
}
}
const f = await fetchNative(url, {
headers: headers,
body: JSON.stringify(body),
@@ -2127,6 +2393,17 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
}
}
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: url,
body: body,
headers: headers
})
}
}
const res = await globalFetch(url, {
headers: headers,
body: body,
@@ -2240,6 +2517,17 @@ async function requestKobold(arg:RequestDataArgumentExtended):Promise<requestDat
], {
'repetition_penalty': 'rep_pen'
}, arg.mode) as KoboldGenerationInputSchema
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: url.toString(),
body: body,
headers: {}
})
}
}
const da = await globalFetch(url.toString(), {
method: "POST",
@@ -2306,6 +2594,18 @@ async function requestNovelList(arg:RequestDataArgumentExtended):Promise<request
logit_bias: (logit_bias.length > 0) ? logit_bias.join("<<|>>") : undefined,
logit_bias_values: (logit_bias_values.length > 0) ? logit_bias_values.join("|") : undefined,
};
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: api_server_url + '/api',
body: send_body,
headers: headers
})
}
}
const response = await globalFetch(arg.customURL ?? api_server_url + '/api', {
method: 'POST',
headers: headers,
@@ -2339,6 +2639,15 @@ async function requestOllama(arg:RequestDataArgumentExtended):Promise<requestDat
const formated = arg.formated
const db = getDatabase()
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
error: "Preview body is not supported for Ollama"
})
}
}
const ollama = new Ollama({host: db.ollamaURL})
const response = await ollama.chat({
@@ -2453,6 +2762,20 @@ async function requestCohere(arg:RequestDataArgumentExtended):Promise<requestDat
console.log(body)
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: arg.customURL ?? 'https://api.cohere.com/v1/chat',
body: body,
headers: {
"Authorization": "Bearer " + db.cohereAPIKey,
"Content-Type": "application/json"
}
})
}
}
const res = await globalFetch(arg.customURL ?? 'https://api.cohere.com/v1/chat', {
method: "POST",
headers: {
@@ -2548,7 +2871,7 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
if(claudeChat.length > 0 && claudeChat[claudeChat.length-1].role === chat.role){
let content = claudeChat[claudeChat.length-1].content
if(multimodals && multimodals.length > 0 && !Array.isArray(content)){
content = [{
content = [{
type: 'text',
text: content
}]
@@ -2725,6 +3048,9 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
else if(body?.thinking?.budget_tokens && body?.thinking?.budget_tokens > 0){
body.thinking.type = 'enabled'
}
else if(body?.thinking?.budget_tokens === null){
delete body.thinking
}
if(systemPrompt === ''){
delete body.system
@@ -2783,6 +3109,18 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
const signed = await signer.sign(rq);
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: url,
body: params,
headers: signed.headers
})
}
}
const res = await globalFetch(url, {
method: "POST",
body: params,
@@ -2873,6 +3211,18 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
headers['anthropic-dangerous-direct-browser-access'] = 'true'
}
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
url: replacerURL,
body: body,
headers: headers
})
}
}
if(db.claudeRetrivalCaching){
registerClaudeObserver({
@@ -3100,6 +3450,15 @@ async function requestHorde(arg:RequestDataArgumentExtended):Promise<requestData
const currentChar = getCurrentCharacter()
const abortSignal = arg.abortSignal
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
error: "Preview body is not supported for Horde"
})
}
}
const prompt = applyChatTemplate(formated)
const realModel = aiModel.split(":::")[1]
@@ -3198,6 +3557,15 @@ async function requestWebLLM(arg:RequestDataArgumentExtended):Promise<requestDat
const temperature = arg.temperature
const realModel = aiModel.split(":::")[1]
const prompt = applyChatTemplate(formated)
if(arg.previewBody){
return {
type: 'success',
result: JSON.stringify({
error: "Preview body is not supported for WebLLM"
})
}
}
const v = await runTransformers(prompt, realModel, {
temperature: temperature,
max_new_tokens: maxTokens,

View File

@@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme';
import type { PromptItem, PromptSettings } from '../process/prompt';
import type { OobaChatCompletionRequestParams } from '../model/ooba';
export let appVer = "154.0.0"
export let appVer = "156.0.0"
export let webAppSubVer = ''
@@ -335,6 +335,7 @@ export function setDatabase(data:Database){
data.mancerHeader ??= ''
data.emotionProcesser ??= 'submodel'
data.translatorType ??= 'google'
data.htmlTranslation ??= false
data.deeplOptions ??= {
key:'',
freeApi: false
@@ -494,6 +495,9 @@ export function setDatabase(data:Database){
key: data.hypaCustomSettings?.key ?? "",
model: data.hypaCustomSettings?.model ?? "",
}
data.doNotChangeSeperateModels ??= false
data.modelTools ??= []
data.hotkeys ??= structuredClone(defaultHotkeys)
changeLanguage(data.language)
setDatabaseLite(data)
}
@@ -735,8 +739,9 @@ export interface Database{
mancerHeader:string
emotionProcesser:'submodel'|'embedding',
showMenuChatList?:boolean,
translatorType:'google'|'deepl'|'none'|'llm'|'deeplX',
translatorType:'google'|'deepl'|'none'|'llm'|'deeplX'|'bergamot',
translatorInputLanguage?:string
htmlTranslation?:boolean,
NAIadventure?:boolean,
NAIappendName?:boolean,
deeplOptions:{
@@ -930,7 +935,16 @@ export interface Database{
claudeRetrivalCaching: boolean
outputImageModal: boolean
playMessageOnTranslateEnd:boolean
seperateModelsForAxModels:boolean
seperateModels:{
memory: string
emotion: string
translate: string
otherAx: string
}
doNotChangeSeperateModels:boolean
modelTools: string[]
hotkeys:Hotkey[]
}
interface SeparateParameters{
@@ -1114,6 +1128,7 @@ export interface character{
hideChatIcon?:boolean
lastInteraction?:number
translatorNote?:string
doNotChangeSeperateModels?:boolean
}
@@ -1265,6 +1280,14 @@ export interface botPreset{
reasonEffort?:number
thinkingTokens?:number
outputImageModal?:boolean
seperateModelsForAxModels?:boolean
seperateModels?:{
memory: string
emotion: string
translate: string
otherAx: string
}
modelTools?:string[]
}
@@ -1581,7 +1604,10 @@ export function saveCurrentPreset(){
image: pres?.[db.botPresetsId]?.image ?? '',
reasonEffort: db.reasoningEffort ?? 0,
thinkingTokens: db.thinkingTokens ?? null,
outputImageModal: db.outputImageModal ?? false
outputImageModal: db.outputImageModal ?? false,
seperateModelsForAxModels: db.doNotChangeSeperateModels ? false : db.seperateModelsForAxModels ?? false,
seperateModels: db.doNotChangeSeperateModels ? null : safeStructuredClone(db.seperateModels),
modelTools: safeStructuredClone(db.modelTools),
}
db.botPresets = pres
setDatabase(db)
@@ -1694,6 +1720,17 @@ export function setPreset(db:Database, newPres: botPreset){
db.reasoningEffort = newPres.reasonEffort ?? 0
db.thinkingTokens = newPres.thinkingTokens ?? null
db.outputImageModal = newPres.outputImageModal ?? false
if(!db.doNotChangeSeperateModels){
db.seperateModelsForAxModels = newPres.seperateModelsForAxModels ?? false
db.seperateModels = safeStructuredClone(newPres.seperateModels) ?? {
memory: '',
emotion: '',
translate: '',
otherAx: ''
}
}
db.modelTools = safeStructuredClone(newPres.modelTools ?? [])
return db
}
@@ -1708,6 +1745,7 @@ import { LLMFlags, LLMFormat } from '../model/modellist';
import type { Parameter } from '../process/request';
import type { HypaModel } from '../process/memory/hypamemory';
import type { SerializableHypaV3Data } from '../process/memory/hypav3';
import { defaultHotkeys, type Hotkey } from '../defaulthotkeys';
export async function downloadPreset(id:number, type:'json'|'risupreset'|'return' = 'json'){
saveCurrentPreset()

View File

@@ -98,6 +98,11 @@ export const LoadingStatusState = $state({
text: '',
})
export const QuickSettings = $state({
open: false,
index: 0
})
export const disableHighlight = writable(true)
ReloadGUIPointer.subscribe(() => {

View File

@@ -0,0 +1,145 @@
import { LatencyOptimisedTranslator, TranslatorBacking } from "@browsermt/bergamot-translator";
import { gunzipSync } from 'fflate';
// Cache Translations Models
class CacheDB {
private readonly dbName: string;
private readonly storeName: string = "cache";
constructor(dbName: string = "cache") {
this.dbName = dbName;
}
private async getDB(): Promise<IDBDatabase> {
return new Promise((resolve, reject) => {
const request = indexedDB.open(this.dbName, 1);
request.onupgradeneeded = (event: IDBVersionChangeEvent) => {
const db = (event.target as IDBOpenDBRequest).result;
if (!db.objectStoreNames.contains(this.storeName)) {
db.createObjectStore(this.storeName, { keyPath: "url" });
}
};
request.onsuccess = () => resolve(request.result);
request.onerror = () => reject(request.error);
});
}
async load(url: string, checksum: string): Promise<ArrayBuffer | null> {
const db = await this.getDB();
return new Promise((resolve, reject) => {
const transaction = db.transaction(this.storeName, "readonly");
const store = transaction.objectStore(this.storeName);
const request = store.get(url);
request.onsuccess = () => {
const result = request.result;
if (result && result.checksum === checksum) {
resolve(result.buffer);
} else {
resolve(null);
}
};
request.onerror = () => reject(request.error);
});
}
async save(url: string, checksum: string, buffer: ArrayBuffer): Promise<void> {
const db = await this.getDB();
return new Promise((resolve, reject) => {
const transaction = db.transaction(this.storeName, "readwrite");
const store = transaction.objectStore(this.storeName);
const request = store.put({ url, checksum, buffer });
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
}
async clear(): Promise<void> {
const db = await this.getDB();
return new Promise((resolve, reject) => {
const transaction = db.transaction(this.storeName, "readwrite");
const store = transaction.objectStore(this.storeName);
const request = store.clear();
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
}
}
// Mozilla Firefox Translations Models
class FirefoxBacking extends TranslatorBacking {
private cache: CacheDB;
downloadTimeout: number;
constructor(options?) {
const registryUrl = 'https://raw.githubusercontent.com/mozilla/firefox-translations-models/refs/heads/main/registry.json';
options = options || {};
options.registryUrl = options.registryUrl || registryUrl;
super(options);
this.cache = new CacheDB("firefox-translations-models");
}
async loadModelRegistery() {
const modelUrl = 'https://media.githubusercontent.com/media/mozilla/firefox-translations-models/refs/heads/main/models';
const registry = await super.loadModelRegistery();
for (const entry of registry) {
for(const name in entry.files) {
const file = entry.files[name];
file.name = `${modelUrl}/${file.modelType}/${entry.from}${entry.to}/${file.name}.gz`;
}
}
return registry;
}
async fetch(url, checksum, extra) {
const cacheBuffer = await this.cache.load(url, checksum);
if (cacheBuffer) { return cacheBuffer; }
const res = await fetch(url, {
credentials: 'omit',
});
// Decompress GZip
const buffer = await res.arrayBuffer();
const decomp = await decompressGZip(buffer);
await this.cache.save(url, checksum, decomp);
return decomp;
}
}
async function decompressGZip(buffer:ArrayBuffer) {
if (typeof DecompressionStream !== "undefined") {
const decompressor = new DecompressionStream('gzip');
const stream = new Response(buffer).body.pipeThrough(decompressor);
return await new Response(stream).arrayBuffer();
} else { // GZip decompression fallback
return gunzipSync(new Uint8Array(buffer)).buffer;
}
}
let translator = null;
let translateTask = null;
// Translate
export async function bergamotTranslate(text:string, from:string, to:string, html:boolean|null) {
translator ??= new LatencyOptimisedTranslator({}, new FirefoxBacking())
const result = await (translateTask = translate());
return result.target.text;
// Wait for previous tasks...
async function translate() {
await translateTask;
return translator.translate({
from: from, to: to,
text: text, html: html,
});
}
}
// Clear Cache
export async function clearCache() {
await new CacheDB("firefox-translations-models").clear();
}

View File

@@ -18,6 +18,8 @@ let cache={
trans: ['']
}
let bergamotTranslate: (text: string, from: string, to: string, html?: boolean) => Promise<string>|null = null
export const LLMCacheStorage = localforage.createInstance({
name: "LLMTranslateCache"
})
@@ -165,6 +167,14 @@ async function translateMain(text:string, arg:{from:string, to:string, host:stri
return f.data.data;
}
if(db.translatorType == "bergamot") {
if(!bergamotTranslate){
const bergamotTranslator = await import('./bergamotTranslator')
bergamotTranslate = bergamotTranslator.bergamotTranslate
}
return bergamotTranslate(text, arg.from, arg.to, false);
}
if(db.useExperimentalGoogleTranslator){
const hqAvailable = isTauri || isNodeServer || userScriptFetch
@@ -274,6 +284,17 @@ export async function translateHTML(html: string, reverse:boolean, charArg:simpl
return r
}
if(db.translatorType == "bergamot" && db.htmlTranslation) {
const from = db.aiModel.startsWith('novellist') ? 'ja' : 'en'
const to = db.translator || 'en'
if(!bergamotTranslate){
const bergamotTranslator = await import('./bergamotTranslator')
bergamotTranslate = bergamotTranslator.bergamotTranslate
}
return bergamotTranslate(html, from, to, true)
}
const dom = new DOMParser().parseFromString(html, 'text/html');
console.log(html)