Merge branch 'main' into local_lore2

This commit is contained in:
kwaroran
2025-03-05 05:38:11 +09:00
committed by GitHub
23 changed files with 1022 additions and 313 deletions

View File

@@ -1371,6 +1371,7 @@ export async function exportCharacterCard(char:character, type:'png'|'json'|'cha
}
card.data.assets[i].uri = 'embeded://' + path
const imageType = checkImageType(rData)
const metaPath = `x_meta/${name}.json`
if(imageType === 'PNG' && writer instanceof CharXWriter){
const metadatas:Record<string,string> = {}
const gen = PngChunk.readGenerator(rData)
@@ -1380,10 +1381,20 @@ export async function exportCharacterCard(char:character, type:'png'|'json'|'cha
}
metadatas[chunk.key] = chunk.value
}
console.log(metadatas)
if(Object.keys(metadatas).length > 0){
const metaPath = `x_meta/${name}.json`
await writer.write(metaPath, Buffer.from(JSON.stringify(metadatas, null, 4)), 6)
}
else{
await writer.write(metaPath, Buffer.from(JSON.stringify({
'type': imageType
}), 'utf-8'), 6)
}
}
else{
await writer.write(metaPath, Buffer.from(JSON.stringify({
'type': imageType
}), 'utf-8'), 6)
}
await writer.write(path, Buffer.from(await convertImage(rData)))
}

View File

@@ -32,7 +32,9 @@ export function createNewGroup(){
note: '',
name: 'Chat 1',
localLore: []
}], chatPage: 0,
}],
chatFolders: [],
chatPage: 0,
viewScreen: 'none',
globalLore: [],
characters: [],
@@ -402,6 +404,11 @@ export async function importChat(){
return
}
if(db.characters[selectedID].chatFolders
.filter(folder => folder.id === newChat.folderId).length === 0) {
newChat.folderId = null
}
db.characters[selectedID].chats.unshift(newChat)
setDatabase(db)
alertNormal(language.successImport)
@@ -585,6 +592,7 @@ export function createBlankChar():character{
name: 'Chat 1',
localLore: []
}],
chatFolders: [],
chatPage: 0,
emotionImages: [],
bias: [],

View File

@@ -756,6 +756,7 @@ interface GlobalFetchResult {
ok: boolean;
data: any;
headers: { [key: string]: string };
status: number;
}
/**
@@ -806,13 +807,13 @@ export async function globalFetch(url: string, arg: GlobalFetchArgs = {}): Promi
const method = arg.method ?? "POST";
db.requestmet = "normal";
if (arg.abortSignal?.aborted) { return { ok: false, data: 'aborted', headers: {} }; }
if (arg.abortSignal?.aborted) { return { ok: false, data: 'aborted', headers: {}, status: 400 }; }
const urlHost = new URL(url).hostname
const forcePlainFetch = ((knownHostes.includes(urlHost) && !isTauri) || db.usePlainFetch || arg.plainFetchForce) && !arg.plainFetchDeforce
if (knownHostes.includes(urlHost) && !isTauri && !isNodeServer) {
return { ok: false, headers: {}, data: 'You are trying local request on web version. This is not allowed due to browser security policy. Use the desktop version instead, or use a tunneling service like ngrok and set the CORS to allow all.' };
return { ok: false, headers: {}, status:400, data: 'You are trying local request on web version. This is not allowed due to browser security policy. Use the desktop version instead, or use a tunneling service like ngrok and set the CORS to allow all.' };
}
if (forcePlainFetch) {
@@ -832,7 +833,7 @@ export async function globalFetch(url: string, arg: GlobalFetchArgs = {}): Promi
} catch (error) {
console.error(error);
return { ok: false, data: `${error}`, headers: {} };
return { ok: false, data: `${error}`, headers: {}, status: 400 };
}
}
@@ -887,9 +888,9 @@ async function fetchWithPlainFetch(url: string, arg: GlobalFetchArgs): Promise<G
const data = arg.rawResponse ? new Uint8Array(await response.arrayBuffer()) : await response.json();
const ok = response.ok && response.status >= 200 && response.status < 300;
addFetchLogInGlobalFetch(data, ok, url, arg);
return { ok, data, headers: Object.fromEntries(response.headers) };
return { ok, data, headers: Object.fromEntries(response.headers), status: response.status };
} catch (error) {
return { ok: false, data: `${error}`, headers: {} };
return { ok: false, data: `${error}`, headers: {}, status: 400 };
}
}
@@ -907,9 +908,9 @@ async function fetchWithUSFetch(url: string, arg: GlobalFetchArgs): Promise<Glob
const data = arg.rawResponse ? new Uint8Array(await response.arrayBuffer()) : await response.json();
const ok = response.ok && response.status >= 200 && response.status < 300;
addFetchLogInGlobalFetch(data, ok, url, arg);
return { ok, data, headers: Object.fromEntries(response.headers) };
return { ok, data, headers: Object.fromEntries(response.headers), status: response.status };
} catch (error) {
return { ok: false, data: `${error}`, headers: {} };
return { ok: false, data: `${error}`, headers: {}, status: 400 };
}
}
@@ -927,7 +928,7 @@ async function fetchWithTauri(url: string, arg: GlobalFetchArgs): Promise<Global
const data = arg.rawResponse ? new Uint8Array(await response.arrayBuffer()) : await response.json();
const ok = response.status >= 200 && response.status < 300;
addFetchLogInGlobalFetch(data, ok, url, arg);
return { ok, data, headers: Object.fromEntries(response.headers) };
return { ok, data, headers: Object.fromEntries(response.headers), status: response.status };
} catch (error) {
}
@@ -946,6 +947,7 @@ async function fetchWithCapacitor(url: string, arg: GlobalFetchArgs): Promise<Gl
ok: true,
data: rawResponse ? new Uint8Array(res.data as ArrayBuffer) : res.data,
headers: res.headers,
status: res.status
};
}
@@ -975,21 +977,21 @@ async function fetchWithProxy(url: string, arg: GlobalFetchArgs): Promise<Global
if (arg.rawResponse) {
const data = new Uint8Array(await response.arrayBuffer());
addFetchLogInGlobalFetch("Uint8Array Response", isSuccess, url, arg);
return { ok: isSuccess, data, headers: Object.fromEntries(response.headers) };
return { ok: isSuccess, data, headers: Object.fromEntries(response.headers), status: response.status };
}
const text = await response.text();
try {
const data = JSON.parse(text);
addFetchLogInGlobalFetch(data, isSuccess, url, arg);
return { ok: isSuccess, data, headers: Object.fromEntries(response.headers) };
return { ok: isSuccess, data, headers: Object.fromEntries(response.headers), status: response.status };
} catch (error) {
const errorMsg = text.startsWith('<!DOCTYPE') ? "Responded HTML. Is your URL, API key, and password correct?" : text;
addFetchLogInGlobalFetch(text, false, url, arg);
return { ok: false, data: errorMsg, headers: Object.fromEntries(response.headers) };
return { ok: false, data: errorMsg, headers: Object.fromEntries(response.headers), status: response.status };
}
} catch (error) {
return { ok: false, data: `${error}`, headers: {} };
return { ok: false, data: `${error}`, headers: {}, status: 400 };
}
}

View File

@@ -22,7 +22,8 @@ export enum LLMFlags{
deepSeekPrefix,
deepSeekThinkingInput,
deepSeekThinkingOutput,
noCivilIntegrity
noCivilIntegrity,
claudeThinking,
}
export enum LLMProvider{
@@ -522,6 +523,23 @@ export const LLMModels: LLMModel[] = [
parameters: ClaudeParameters,
tokenizer: LLMTokenizer.Claude
},
{
name: "Claude 3.7 Sonnet",
id: 'claude-3-7-sonnet-latest',
shortName: "3.7 Sonnet",
provider: LLMProvider.Anthropic,
format: LLMFormat.Anthropic,
flags: [
LLMFlags.hasPrefill,
LLMFlags.hasImageInput,
LLMFlags.hasFirstSystemPrompt,
LLMFlags.hasStreaming,
LLMFlags.claudeThinking
],
recommended: true,
parameters: [...ClaudeParameters, 'thinking_tokens'],
tokenizer: LLMTokenizer.Claude
},
{
name: "Claude 3.5 Haiku",
id: 'claude-3-5-haiku-latest',
@@ -598,6 +616,22 @@ export const LLMModels: LLMModel[] = [
parameters: ClaudeParameters,
tokenizer: LLMTokenizer.Claude
},
{
name: "Claude 3.7 Sonnet (20250219)",
id: 'claude-3-7-sonnet-20250219',
shortName: "3.7 Sonnet",
provider: LLMProvider.Anthropic,
format: LLMFormat.Anthropic,
flags: [
LLMFlags.hasPrefill,
LLMFlags.hasImageInput,
LLMFlags.hasFirstSystemPrompt,
LLMFlags.hasStreaming,
LLMFlags.claudeThinking
],
parameters: [...ClaudeParameters, 'thinking_tokens'],
tokenizer: LLMTokenizer.Claude
},
{
name: 'Claude 3 Opus (20240229)',
id: 'claude-3-opus-20240229',

View File

@@ -37,14 +37,14 @@ export class HypaProcesser{
name: "hypaVector"
})
this.vectors = []
const db = getDatabase()
if(model === 'auto'){
const db = getDatabase()
this.model = db.hypaModel || 'MiniLM'
}
else{
this.model = model
}
this.customEmbeddingUrl = customEmbeddingUrl
this.customEmbeddingUrl = customEmbeddingUrl || db.hypaCustomSettings.url
}
async embedDocuments(texts: string[]): Promise<VectorArray[]> {
@@ -77,12 +77,17 @@ export class HypaProcesser{
}
const {customEmbeddingUrl} = this
const replaceUrl = customEmbeddingUrl.endsWith('/embeddings')?customEmbeddingUrl:appendLastPath(customEmbeddingUrl,'embeddings')
gf = await globalFetch(replaceUrl.toString(), {
body:{
"input": input
},
})
const db = getDatabase()
const fetchArgs = {
...(db.hypaCustomSettings.key ? {headers: {"Authorization": "Bearer " + db.hypaCustomSettings.key}} : {}),
body: {
"input": input,
...(db.hypaCustomSettings.model ? {"model": db.hypaCustomSettings.model} : {})
}
};
gf = await globalFetch(replaceUrl.toString(), fetchArgs)
}
if(this.model === 'ada' || this.model === 'openai3small' || this.model === 'openai3large'){
const db = getDatabase()

View File

@@ -268,9 +268,7 @@ function getModuleByIds(ids:string[]){
modules.push(module)
}
}
if(db.moduleIntergration){
modules = deduplicateModuleById(modules)
}
modules = deduplicateModuleById(modules)
return modules
}

View File

@@ -15,7 +15,6 @@ import { supportsInlayImage } from "./files/inlays";
import { Capacitor } from "@capacitor/core";
import { getFreeOpenRouterModel } from "../model/openrouter";
import { runTransformers } from "./transformers";
import {createParser} from 'eventsource-parser'
import {Ollama} from 'ollama/dist/browser.mjs'
import { applyChatTemplate } from "./templates/chatTemplate";
import { OobaParams } from "./prompt";
@@ -59,7 +58,8 @@ type requestDataResponse = {
noRetry?: boolean,
special?: {
emotion?: string
}
},
failByServerError?: boolean
}|{
type: "streaming",
result: ReadableStream<StreamResponseChunk>,
@@ -92,12 +92,28 @@ interface OaiFunctions {
}
export type Parameter = 'temperature'|'top_k'|'repetition_penalty'|'min_p'|'top_a'|'top_p'|'frequency_penalty'|'presence_penalty'|'reasoning_effort'
export type Parameter = 'temperature'|'top_k'|'repetition_penalty'|'min_p'|'top_a'|'top_p'|'frequency_penalty'|'presence_penalty'|'reasoning_effort'|'thinking_tokens'
export type ModelModeExtended = 'model'|'submodel'|'memory'|'emotion'|'otherAx'|'translate'
type ParameterMap = {
[key in Parameter]?: string;
};
function setObjectValue<T>(obj: T, key: string, value: any): T {
const splitKey = key.split('.');
if(splitKey.length > 1){
const firstKey = splitKey.shift()
if(!obj[firstKey]){
obj[firstKey] = {};
}
obj[firstKey] = setObjectValue(obj[firstKey], splitKey.join('.'), value);
return obj;
}
obj[key] = value;
return obj;
}
function applyParameters(data: { [key: string]: any }, parameters: Parameter[], rename: ParameterMap, ModelMode:ModelModeExtended, arg:{
ignoreTopKIfZero?:boolean
} = {}): { [key: string]: any } {
@@ -157,6 +173,10 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
value = db.seperateParameters[ModelMode].top_p
break
}
case 'thinking_tokens':{
value = db.seperateParameters[ModelMode].thinking_tokens
break
}
case 'frequency_penalty':{
value = db.seperateParameters[ModelMode].frequency_penalty === -1000 ? -1000 : (db.seperateParameters[ModelMode].frequency_penalty / 100)
break
@@ -175,7 +195,7 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
continue
}
data[rename[parameter] ?? parameter] = value
data = setObjectValue(data, rename[parameter] ?? parameter, value)
}
return data
}
@@ -223,13 +243,17 @@ function applyParameters(data: { [key: string]: any }, parameters: Parameter[],
value = db.PresensePenalty === -1000 ? -1000 : (db.PresensePenalty / 100)
break
}
case 'thinking_tokens':{
value = db.thinkingTokens
break
}
}
if(value === -1000){
continue
}
data[rename[parameter] ?? parameter] = value
data = setObjectValue(data, rename[parameter] ?? parameter, value)
}
return data
}
@@ -247,7 +271,7 @@ export async function requestChatData(arg:requestDataArgument, model:ModelModeEx
try{
const currentChar = getCurrentCharacter()
if(currentChar.type !== 'group'){
if(currentChar?.type !== 'group'){
const perf = performance.now()
const d = await runTrigger(currentChar, 'request', {
chat: getCurrentChat(),
@@ -305,6 +329,13 @@ export async function requestChatData(arg:requestDataArgument, model:ModelModeEx
if(da.type !== 'fail' || da.noRetry){
return da
}
if(da.failByServerError){
await sleep(1000)
if(db.antiServerOverloads){
trys -= 0.5 // reduce trys by 0.5, so that it will retry twice as much
}
}
trys += 1
if(trys > db.requestRetrys){
@@ -1710,17 +1741,6 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
})
}
}
else if(chat.role === 'assistant' && arg.modelInfo.flags.includes(LLMFlags.geminiThinking)){
reformatedChat.push({
role: 'MODEL',
parts: [chat.thoughts?.length > 0 ? {
text: chat.thoughts.join('\n\n')
} : null, {
text: chat.content
}]
})
}
else if(chat.role === 'assistant' || chat.role === 'user'){
reformatedChat.push({
role: chat.role === 'user' ? 'USER' : 'MODEL',
@@ -1896,6 +1916,101 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
url = `https://generativelanguage.googleapis.com/v1beta/models/${arg.modelInfo.internalID}:generateContent?key=${db.google.accessToken}`
}
const fallBackGemini = async (originalError:string):Promise<requestDataResponse> => {
if(!db.antiServerOverloads){
return {
type: 'fail',
result: originalError,
failByServerError: true
}
}
if(arg?.abortSignal?.aborted){
return {
type: 'fail',
result: originalError,
failByServerError: true
}
}
if(arg.modelInfo.format === LLMFormat.VertexAIGemini){
return {
type: 'fail',
result: originalError,
failByServerError: true
}
}
try {
const OAIMessages:OpenAIChat[] = body.contents.map((v) => {
return {
role: v.role === 'USER' ? 'user' : 'assistant',
content: v.parts.map((v) => {
return v.text ?? ''
}).join('\n')
}
})
if(body?.systemInstruction?.parts?.[0]?.text){
OAIMessages.unshift({
role: 'system',
content: body.systemInstruction.parts[0].text
})
}
await sleep(2000)
const res = await fetch('https://generativelanguage.googleapis.com/v1beta/openai/chat/completions', {
body: JSON.stringify({
model: arg.modelInfo.internalID,
messages: OAIMessages,
max_tokens: maxTokens,
temperature: body.generation_config?.temperature,
top_p: body.generation_config?.topP,
presence_penalty: body.generation_config?.presencePenalty,
frequency_penalty: body.generation_config?.frequencyPenalty,
}),
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${db.google.accessToken}`
},
signal: arg.abortSignal
})
if(!res.ok){
return {
type: 'fail',
result: originalError,
failByServerError: true
}
}
if(arg?.abortSignal?.aborted){
return {
type: 'fail',
result: originalError
}
}
const d = await res.json()
if(d?.choices?.[0]?.message?.content){
return {
type: 'success',
result: d.choices[0].message.content
}
}
else{
return {
type: 'fail',
result: originalError,
failByServerError: true
}
}
} catch (error) {
return {
type: 'fail',
result: originalError,
failByServerError: true
}
}
}
if(arg.modelInfo.format === LLMFormat.GoogleCloud && arg.useStreaming){
headers['Content-Type'] = 'application/json'
@@ -1907,9 +2022,13 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
})
if(f.status !== 200){
const text = await textifyReadableStream(f.body)
if(text.includes('RESOURCE_EXHAUSTED')){
return fallBackGemini(text)
}
return {
type: 'fail',
result: await textifyReadableStream(f.body)
result: text
}
}
@@ -1974,8 +2093,13 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise
chatId: arg.chatId,
abortSignal: arg.abortSignal,
})
if(!res.ok){
const text = JSON.stringify(res.data)
if(text.includes('RESOURCE_EXHAUSTED')){
return fallBackGemini(text)
}
return {
type: 'fail',
result: `${JSON.stringify(res.data)}`
@@ -2541,14 +2665,23 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
})
}
console.log(arg.modelInfo.parameters)
let body = applyParameters({
model: arg.modelInfo.internalID,
messages: finalChat,
system: systemPrompt.trim(),
max_tokens: maxTokens,
stream: useStreaming ?? false
}, ['temperature', 'top_k', 'top_p'], {}, arg.mode)
}, arg.modelInfo.parameters, {
'thinking_tokens': 'thinking.budget_tokens'
}, arg.mode)
if(body?.thinking?.budget_tokens === 0){
delete body.thinking
}
else if(body?.thinking?.budget_tokens && body?.thinking?.budget_tokens > 0){
body.thinking.type = 'enabled'
}
if(systemPrompt === ''){
delete body.system
@@ -2645,8 +2778,18 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
"accept": "application/json",
}
let betas:string[] = []
if(db.claudeCachingExperimental){
headers['anthropic-beta'] = 'prompt-caching-2024-07-31'
betas.push('prompt-caching-2024-07-31')
}
if(body.max_tokens > 8192){
betas.push('output-128k-2025-02-19')
}
if(betas.length > 0){
headers['anthropic-beta'] = betas.join(',')
}
if(db.usePlainFetch){
@@ -2668,113 +2811,116 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
result: await textifyReadableStream(res.body)
}
}
let rerequesting = false
let breakError = ''
let thinking = false
const stream = new ReadableStream<StreamResponseChunk>({
async start(controller){
let text = ''
let reader = res.body.getReader()
let parserData = ''
const decoder = new TextDecoder()
const parser = createParser(async (e) => {
const parseEvent = (async (e:string) => {
try {
if(e.type === 'event'){
switch(e.event){
case 'content_block_delta': {
if(e.data){
text += JSON.parse(e.data).delta?.text
controller.enqueue({
"0": text
})
}
break
const parsedData = JSON.parse(e)
if(parsedData?.type === 'content_block_delta'){
if(parsedData?.delta?.type === 'text' || parsedData.delta?.type === 'text_delta'){
if(thinking){
text += "</Thoughts>\n\n"
thinking = false
}
case 'error': {
if(e.data){
const errormsg:string = JSON.parse(e.data).error?.message
if(errormsg && errormsg.toLocaleLowerCase().includes('overload') && db.antiClaudeOverload){
console.log('Overload detected, retrying...')
reader.cancel()
rerequesting = true
await sleep(2000)
body.max_tokens -= await tokenize(text)
if(body.max_tokens < 0){
body.max_tokens = 0
}
if(body.messages.at(-1)?.role !== 'assistant'){
body.messages.push({
role: 'assistant',
content: [{
type: 'text',
text: ''
}]
})
}
let block = body.messages[body.messages.length-1].content
if(typeof block === 'string'){
body.messages[body.messages.length-1].content += text
}
else if(block[0].type === 'text'){
block[0].text += text
}
const res = await fetchNative(replacerURL, {
body: JSON.stringify(body),
headers: {
"Content-Type": "application/json",
"x-api-key": apiKey,
"anthropic-version": "2023-06-01",
"accept": "application/json",
},
method: "POST",
chatId: arg.chatId
})
if(res.status !== 200){
breakError = 'Error: ' + await textifyReadableStream(res.body)
break
}
reader = res.body.getReader()
rerequesting = false
break
}
text += "Error:" + JSON.parse(e.data).error?.message
if(arg.extractJson && (db.jsonSchemaEnabled || arg.schema)){
controller.enqueue({
"0": extractJSON(text, db.jsonSchema)
})
}
else{
controller.enqueue({
"0": text
})
}
}
break
text += parsedData.delta?.text ?? ''
}
if(parsedData?.delta?.type === 'thinking' || parsedData.delta?.type === 'thinking_delta'){
if(!thinking){
text += "<Thoughts>\n"
thinking = true
}
text += parsedData.delta?.thinking ?? ''
}
if(parsedData?.delta?.type === 'redacted_thinking'){
if(!thinking){
text += "<Thoughts>\n"
thinking = true
}
text += '\n{{redacted_thinking}}\n'
}
}
} catch (error) {}
if(parsedData?.type === 'error'){
const errormsg:string = parsedData?.error?.message
if(errormsg && errormsg.toLocaleLowerCase().includes('overload') && db.antiServerOverloads){
// console.log('Overload detected, retrying...')
controller.enqueue({
"0": "Overload detected, retrying..."
})
return 'overload'
}
text += "Error:" + parsedData?.error?.message
}
}
catch (error) {
}
})
let breakWhile = false
let i = 0;
let prevText = ''
while(true){
if(rerequesting){
if(breakError){
controller.enqueue({
"0": breakError
})
try {
if(arg?.abortSignal?.aborted || breakWhile){
break
}
await sleep(1000)
continue
}
try {
const {done, value} = await reader.read()
if(done){
if(rerequesting){
continue
}
break
}
parser.feed(decoder.decode(value))
parserData += (decoder.decode(value))
let parts = parserData.split('\n')
for(;i<parts.length-1;i++){
prevText = text
if(parts?.[i]?.startsWith('data: ')){
const d = await parseEvent(parts[i].slice(6))
if(d === 'overload'){
parserData = ''
prevText = ''
text = ''
reader.cancel()
const res = await fetchNative(replacerURL, {
body: JSON.stringify(body),
headers: headers,
method: "POST",
chatId: arg.chatId
})
if(res.status !== 200){
controller.enqueue({
"0": await textifyReadableStream(res.body)
})
breakWhile = true
break
}
reader = res.body.getReader()
break
}
}
}
i--;
text = prevText
controller.enqueue({
"0": text
})
} catch (error) {
await sleep(1)
}
@@ -2799,24 +2945,55 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
})
if(!res.ok){
const stringlified = JSON.stringify(res.data)
return {
type: 'fail',
result: JSON.stringify(res.data)
result: stringlified,
failByServerError: stringlified?.toLocaleLowerCase()?.includes('overload')
}
}
if(res.data.error){
const stringlified = JSON.stringify(res.data.error)
return {
type: 'fail',
result: JSON.stringify(res.data.error)
result: stringlified,
failByServerError: stringlified?.toLocaleLowerCase()?.includes('overload')
}
}
const resText = res?.data?.content?.[0]?.text
if(!resText){
const contents = res?.data?.content
if(!contents || contents.length === 0){
return {
type: 'fail',
result: JSON.stringify(res.data)
}
}
let resText = ''
let thinking = false
for(const content of contents){
if(content.type === 'text'){
if(thinking){
resText += "</Thoughts>\n\n"
thinking = false
}
resText += content.text
}
if(content.type === 'thinking'){
if(!thinking){
resText += "<Thoughts>\n"
thinking = true
}
resText += content.thinking ?? ''
}
if(content.type === 'redacted_thinking'){
if(!thinking){
resText += "<Thoughts>\n"
thinking = true
}
resText += '\n{{redacted_thinking}}\n'
}
}
if(arg.extractJson && db.jsonSchemaEnabled){
return {
type: 'success',
@@ -2829,6 +3006,7 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
}
}
async function requestHorde(arg:RequestDataArgumentExtended):Promise<requestDataResponse> {
const formated = arg.formated
const db = getDatabase()

View File

@@ -12,7 +12,7 @@ import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme';
import type { PromptItem, PromptSettings } from '../process/prompt';
import type { OobaChatCompletionRequestParams } from '../model/ooba';
export let appVer = "150.2.0"
export let appVer = "150.4.1"
export let webAppSubVer = ''
@@ -484,6 +484,16 @@ export function setDatabase(data:Database){
doNotSummarizeUserMessage: data.hypaV3Settings?.doNotSummarizeUserMessage ?? false
}
data.returnCSSError ??= true
data.useExperimentalGoogleTranslator ??= false
if(data.antiClaudeOverload){ //migration
data.antiClaudeOverload = false
data.antiServerOverloads = true
}
data.hypaCustomSettings = {
url: data.hypaCustomSettings?.url ?? "",
key: data.hypaCustomSettings?.key ?? "",
model: data.hypaCustomSettings?.model ?? "",
}
changeLanguage(data.language)
setDatabaseLite(data)
}
@@ -897,7 +907,7 @@ export interface Database{
preserveOrphanedMemory: boolean
processRegexScript: boolean
doNotSummarizeUserMessage: boolean
},
}
OaiCompAPIKeys: {[key:string]:string}
inlayErrorResponse:boolean
reasoningEffort:number
@@ -905,6 +915,14 @@ export interface Database{
showTranslationLoading: boolean
showDeprecatedTriggerV1:boolean
returnCSSError:boolean
useExperimentalGoogleTranslator:boolean
thinkingTokens: number
antiServerOverloads: boolean
hypaCustomSettings: {
url: string,
key: string,
model: string,
}
}
interface SeparateParameters{
@@ -917,6 +935,7 @@ interface SeparateParameters{
frequency_penalty?:number
presence_penalty?:number
reasoning_effort?:number
thinking_tokens?:number
}
export interface customscript{
@@ -961,6 +980,7 @@ export interface character{
desc:string
notes:string
chats:Chat[]
chatFolders: ChatFolder[]
chatPage: number
viewScreen: 'emotion'|'none'|'imggen'|'vn',
bias: [string, number][]
@@ -1099,6 +1119,7 @@ export interface groupChat{
image?:string
firstMessage:string
chats:Chat[]
chatFolders: ChatFolder[]
chatPage: number
name:string
viewScreen: 'single'|'multiple'|'none'|'emp',
@@ -1231,6 +1252,7 @@ export interface botPreset{
image?:string
regex?:customscript[]
reasonEffort?:number
thinkingTokens?:number
}
@@ -1305,6 +1327,14 @@ export interface Chat{
bindedPersona?:string
fmIndex?:number
hypaV3Data?:SerializableHypaV3Data
folderId?:string
}
export interface ChatFolder{
id:string
name?:string
color?:string
folded:boolean
}
export interface Message{
@@ -1537,6 +1567,7 @@ export function saveCurrentPreset(){
regex: db.presetRegex,
image: pres?.[db.botPresetsId]?.image ?? '',
reasonEffort: db.reasoningEffort ?? 0,
thinkingTokens: db.thinkingTokens ?? null,
}
db.botPresets = pres
setDatabase(db)
@@ -1647,6 +1678,7 @@ export function setPreset(db:Database, newPres: botPreset){
db.enableCustomFlags = newPres.enableCustomFlags ?? false
db.presetRegex = newPres.regex ?? []
db.reasoningEffort = newPres.reasonEffort ?? 0
db.thinkingTokens = newPres.thinkingTokens ?? null
return db
}

View File

@@ -313,7 +313,9 @@ export class ChatTokenizer {
this.chatAdditionalTokens = chatAdditionalTokens
this.useName = useName
}
async tokenizeChat(data:OpenAIChat) {
async tokenizeChat(data:OpenAIChat, args:{
countThoughts?:boolean,
} = {}) {
let encoded = (await encode(data.content)).length + this.chatAdditionalTokens
if(data.name && this.useName ==='name'){
encoded += (await encode(data.name)).length + 1
@@ -323,7 +325,7 @@ export class ChatTokenizer {
encoded += await this.tokenizeMultiModal(multimodal)
}
}
if(data.thoughts && data.thoughts.length > 0){
if(data.thoughts && data.thoughts.length > 0 && args.countThoughts){
for(const thought of data.thoughts){
encoded += (await encode(thought)).length + 1
}

View File

@@ -1,7 +1,7 @@
import { get } from "svelte/store"
import { translatorPlugin } from "../plugins/plugins"
import { getDatabase, type character, type customscript, type groupChat } from "../storage/database.svelte"
import { globalFetch, isTauri } from "../globalApi.svelte"
import { globalFetch, isNodeServer, isTauri } from "../globalApi.svelte"
import { alertError } from "../alert"
import { requestChatData } from "../process/request"
import { doingChat, type OpenAIChat } from "../process/index.svelte"
@@ -41,7 +41,7 @@ export async function translate(text:string, reverse:boolean) {
return runTranslator(text, reverse, db.translator,db.aiModel.startsWith('novellist') ? 'ja' : 'en')
}
export async function runTranslator(text:string, reverse:boolean, from:string,target:string) {
export async function runTranslator(text:string, reverse:boolean, from:string,target:string, exarg?:{translatorNote?:string}) {
const arg = {
from: reverse ? from : target,
@@ -50,6 +50,7 @@ export async function runTranslator(text:string, reverse:boolean, from:string,ta
host: 'translate.googleapis.com',
translatorNote: exarg?.translatorNote
}
const texts = text.split('\n')
let chunks:[string,boolean][] = [['', true]]
@@ -104,11 +105,11 @@ export async function runTranslator(text:string, reverse:boolean, from:string,ta
}
async function translateMain(text:string, arg:{from:string, to:string, host:string}){
async function translateMain(text:string, arg:{from:string, to:string, host:string, translatorNote?:string}){
let db = getDatabase()
if(db.translatorType === 'llm'){
const tr = arg.to || 'en'
return translateLLM(text, {to: tr, from: arg.from})
return translateLLM(text, {to: tr, from: arg.from, translatorNote: arg.translatorNote})
}
if(db.translatorType === 'deepl'){
const body = {
@@ -163,6 +164,31 @@ async function translateMain(text:string, arg:{from:string, to:string, host:stri
return f.data.data;
}
if(db.useExperimentalGoogleTranslator){
const hqAvailable = isTauri || isNodeServer || userScriptFetch
if(hqAvailable){
try {
const ua = navigator.userAgent
const d = await globalFetch(`https://translate.google.com/m?tl=${arg.to}&sl=${arg.from}&q=${encodeURIComponent(text)}`, {
headers: {
"User-Agent": ua,
"Accept": "*/*",
},
method: "GET",
})
const parser = new DOMParser()
const dom = parser.parseFromString(d.data, 'text/html')
const result = dom.querySelector('.result-container')?.textContent?.trim()
if(result){
return result
}
} catch (error) {
}
}
}
const url = `https://${arg.host}/translate_a/single?client=gtx&dt=t&sl=${db.translatorInputLanguage}&tl=${arg.to}&q=` + encodeURIComponent(text)
@@ -448,7 +474,7 @@ function needSuperChunkedTranslate(){
return getDatabase().translatorType === 'deeplX'
}
async function translateLLM(text:string, arg:{to:string, from:string, regenerate?:boolean}):Promise<string>{
async function translateLLM(text:string, arg:{to:string, from:string, regenerate?:boolean,translatorNote?:string}):Promise<string>{
if(!arg.regenerate){
const cacheMatch = await LLMCacheStorage.getItem(text)
if(cacheMatch){
@@ -465,12 +491,17 @@ async function translateLLM(text:string, arg:{to:string, from:string, regenerate
const db = getDatabase()
const charIndex = get(selectedCharID)
const currentChar = db.characters[charIndex]
let translatorNote
if (currentChar?.type === "character") {
let translatorNote = ""
console.log(arg.translatorNote)
if(arg.translatorNote){
translatorNote = arg.translatorNote
}
else if (currentChar?.type === "character") {
translatorNote = currentChar.translatorNote ?? ""
} else {
translatorNote = ""
}
console.log(translatorNote)
let formated:OpenAIChat[] = []
let prompt = db.translatorPrompt || `You are a translator. translate the following html or text into {{slot}}. do not output anything other than the translation.`

View File

@@ -1015,4 +1015,7 @@ export const sortableOptions = {
delay: 300, // time in milliseconds to define when the sorting should start
delayOnTouchOnly: true,
filter: '.no-sort',
onMove: (event) => {
return event.related.className.indexOf('no-sort') === -1
}
} as const