diff --git a/package.json b/package.json
index 32109138..99f759a0 100644
--- a/package.json
+++ b/package.json
@@ -69,7 +69,6 @@
"mnemonist": "^0.40.3",
"mobile-drag-drop": "3.0.0-rc.0",
"msgpackr": "1.10.1",
- "node-fetch": "2",
"node-html-parser": "^6.1.12",
"ollama": "^0.5.0",
"pdfjs-dist": "^4.0.379",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index c460c2c9..f247c718 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -158,15 +158,15 @@ importers:
ml-distance:
specifier: ^4.0.1
version: 4.0.1
+ mnemonist:
+ specifier: ^0.40.3
+ version: 0.40.3
mobile-drag-drop:
specifier: 3.0.0-rc.0
version: 3.0.0-rc.0
msgpackr:
specifier: 1.10.1
version: 1.10.1
- node-fetch:
- specifier: '2'
- version: 2.7.0
node-html-parser:
specifier: ^6.1.12
version: 6.1.12
@@ -2756,6 +2756,9 @@ packages:
ml-tree-similarity@1.0.0:
resolution: {integrity: sha512-XJUyYqjSuUQkNQHMscr6tcjldsOoAekxADTplt40QKfwW6nd++1wHWV9AArl0Zvw/TIHgNaZZNvr8QGvE8wLRg==}
+ mnemonist@0.40.3:
+ resolution: {integrity: sha512-Vjyr90sJ23CKKH/qPAgUKicw/v6pRoamxIEDFOF8uSgFME7DqPRpHgRTejWVjkdGg5dXj0/NyxZHZ9bcjH+2uQ==}
+
mobile-drag-drop@3.0.0-rc.0:
resolution: {integrity: sha512-f8wIDTbBYLBW/+5sei1cqUE+StyDpf/LP+FRZELlVX6tmOOmELk84r3wh1z3woxCB9G5octhF06K5COvFjGgqg==}
@@ -2900,6 +2903,9 @@ packages:
object-inspect@1.13.1:
resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==}
+ obliterator@2.0.5:
+ resolution: {integrity: sha512-42CPE9AhahZRsMNslczq0ctAEtqk8Eka26QofnqC346BZdHDySk3LWka23LI7ULIw11NmltpiLagIq8gBozxTw==}
+
ollama@0.5.0:
resolution: {integrity: sha512-CRtRzsho210EGdK52GrUMohA2pU+7NbgEaBG3DcYeRmvQthDO7E2LHOkLlUUeaYUlNmEd8icbjC02ug9meSYnw==}
@@ -6505,6 +6511,10 @@ snapshots:
binary-search: 1.3.6
num-sort: 2.1.0
+ mnemonist@0.40.3:
+ dependencies:
+ obliterator: 2.0.5
+
mobile-drag-drop@3.0.0-rc.0: {}
modify-values@1.0.1: {}
@@ -6665,6 +6675,8 @@ snapshots:
object-inspect@1.13.1: {}
+ obliterator@2.0.5: {}
+
ollama@0.5.0:
dependencies:
whatwg-fetch: 3.6.20
diff --git a/server/node/server.cjs b/server/node/server.cjs
index b2871d9d..0828d9f7 100644
--- a/server/node/server.cjs
+++ b/server/node/server.cjs
@@ -11,8 +11,7 @@ app.use(express.raw({ type: 'application/octet-stream', limit: '50mb' }));
const {pipeline} = require('stream/promises')
const https = require('https');
const sslPath = path.join(process.cwd(), 'server/node/ssl/certificate');
-const EXTERNAL_HUB_URL = 'https://sv.risuai.xyz';
-const fetch = require('node-fetch');
+const hubURL = 'https://sv.risuai.xyz';
let password = ''
@@ -31,12 +30,17 @@ function isHex(str) {
}
app.get('/', async (req, res, next) => {
- console.log("[Server] Connected")
+
+ const clientIP = req.headers['x-forwarded-for'] || req.ip || req.socket.remoteAddress || 'Unknown IP';
+ const timestamp = new Date().toISOString();
+ console.log(`[Server] ${timestamp} | Connection from: ${clientIP}`);
+
try {
const mainIndex = await fs.readFile(path.join(process.cwd(), 'dist', 'index.html'))
const root = htmlparser.parse(mainIndex)
const head = root.querySelector('head')
head.innerHTML = `` + head.innerHTML
+
res.send(root.toString())
} catch (error) {
console.log(error)
@@ -139,116 +143,69 @@ const reverseProxyFunc_get = async (req, res, next) => {
}
}
-// Risu Realm Proxy
-async function hubProxyHandler(req, res, next) {
+async function hubProxyFunc(req, res) {
+
try {
- // Extract request path and query parameters
const pathAndQuery = req.originalUrl.replace(/^\/hub-proxy/, '');
- const externalURL = EXTERNAL_HUB_URL + pathAndQuery;
-
- console.log(`[Hub Proxy] Forwarding ${req.method} request to: ${externalURL}`);
-
- // Prepare headers to send to the realm server (including Accept-Encoding modification)
+ const externalURL = hubURL + pathAndQuery;
+
const headersToSend = { ...req.headers };
- delete headersToSend['host'];
- delete headersToSend['connection'];
- headersToSend['accept-encoding'] = 'gzip, deflate'; // Exclude zstd, etc.
- if (!headersToSend['x-forwarded-for']) {
- headersToSend['x-forwarded-for'] = req.ip;
- }
-
- // Execute the fetch request to the realm server
+ delete headersToSend.host;
+ delete headersToSend.connection;
+
const response = await fetch(externalURL, {
method: req.method,
headers: headersToSend,
- body: (req.method !== 'GET' && req.method !== 'HEAD') ? req.body : undefined,
+ body: req.method !== 'GET' && req.method !== 'HEAD' ? req : undefined,
+ redirect: 'manual',
+ duplex: 'half'
});
-
- console.log(`[Hub Proxy] Received status ${response.status} from external server`);
-
- // Handle the realm server response
- // Clean up response headers and extract Content-Type
- const responseHeaders = {};
- // Check the Content-Type of the realm server response (use default if missing)
- let contentType = response.headers.get('content-type') || 'application/octet-stream';
-
- response.headers.forEach((value, key) => {
- const lowerKey = key.toLowerCase();
- // List of headers not to be forwarded to the client
- const excludedHeaders = [
- 'transfer-encoding', 'connection', 'content-encoding',
- 'access-control-allow-origin', 'access-control-allow-methods',
- 'access-control-allow-headers', 'content-security-policy',
- 'content-security-policy-report-only', 'clear-site-data',
- 'strict-transport-security', 'expect-ct',
- 'cf-ray', 'cf-cache-status', 'report-to', 'nel', 'server', 'server-timing', 'alt-svc'
- ];
- if (!excludedHeaders.includes(lowerKey)) {
- responseHeaders[key] = value;
- }
- });
-
- // Set the status code and cleaned headers for the client
- res.status(response.status).set(responseHeaders);
-
- // Determine body processing method based on Content-Type
- try {
- if (contentType.startsWith('application/json')) {
- // JSON response: read as text and send
- const bodyText = await response.text();
- console.log(`[Hub Proxy] Processing JSON response (size: ${bodyText.length})`);
- res.setHeader('Content-Type', contentType); // Set the final Content-Type
- res.send(bodyText);
-
- } else if (contentType.startsWith('image/')) {
- // Image response: read as buffer and send
- const bodyBuffer = await response.buffer(); // Assuming 'fetch' response object has a .buffer() method or similar
- console.log(`[Hub Proxy] Processing Image response (type: ${contentType}, size: ${bodyBuffer.length} bytes)`);
- res.setHeader('Content-Type', contentType); // Set the final Content-Type
- res.send(bodyBuffer);
-
- } else {
- // Other responses (HTML, other text, unknown binary, etc.): read as buffer and send safely
- const bodyBuffer = await response.buffer(); // Assuming 'fetch' response object has a .buffer() method or similar
- console.log(`[Hub Proxy] Processing Other response as buffer (type: ${contentType}, size: ${bodyBuffer.length} bytes)`);
- // Use original Content-Type if available, otherwise use octet-stream (already handled by default assignment)
- res.setHeader('Content-Type', contentType);
- res.send(bodyBuffer);
- }
- } catch (bodyError) {
- // If an error occurs while reading/processing the response body
- console.error("[Hub Proxy] Error reading/processing response body:", bodyError);
- if (!res.headersSent) {
- res.status(500).send({ error: 'Failed to process response body from hub server.' });
- } else {
- console.error("[Hub Proxy] Headers already sent, cannot send body error to client.");
- res.end();
- }
- return; // End the handler
+
+ for (const [key, value] of response.headers.entries()) {
+ res.setHeader(key, value);
}
+ res.status(response.status);
+
+ if (response.status >= 300 && response.status < 400) {
+ // Redirect handling (due to ‘/redirect/docs/lua’)
+ const redirectUrl = response.headers.get('location');
+ if (redirectUrl) {
+
+ if (redirectUrl.startsWith('http')) {
+
+ if (redirectUrl.startsWith(hubURL)) {
+ const newPath = redirectUrl.replace(hubURL, '/hub-proxy');
+ res.setHeader('location', newPath);
+ }
+
+ } else if (redirectUrl.startsWith('/')) {
+
+ res.setHeader('location', `/hub-proxy${redirectUrl}`);
+ }
+ }
+ return res.end();
+ }
+
+ await pipeline(response.body, res);
+
} catch (error) {
- // Fetch request itself failed or other exceptions
- console.error("[Hub Proxy] Request failed:", error);
+ console.error("[Hub Proxy] Error:", error);
if (!res.headersSent) {
- res.status(502).send({ error: 'Proxy failed to connect to or get response from the hub server.' });
+ res.status(502).send({ error: 'Proxy request failed: ' + error.message });
} else {
- console.error("[Hub Proxy] Headers already sent, cannot send connection error to client.");
res.end();
}
}
}
-app.get('/hub-proxy/*', hubProxyHandler);
-app.post('/hub-proxy/*', hubProxyHandler);
-app.put('/hub-proxy/*', hubProxyHandler);
-
app.get('/proxy', reverseProxyFunc_get);
app.get('/proxy2', reverseProxyFunc_get);
+app.get('/hub-proxy/*', hubProxyFunc);
app.post('/proxy', reverseProxyFunc);
app.post('/proxy2', reverseProxyFunc);
-
+app.post('/hub-proxy/*', hubProxyFunc);
app.get('/api/password', async(req, res)=> {
if(password === ''){
@@ -408,9 +365,6 @@ async function getHttpsOptions() {
const keyPath = path.join(sslPath, 'server.key');
const certPath = path.join(sslPath, 'server.crt');
- console.log(keyPath)
- console.log(certPath)
-
try {
await fs.access(keyPath);
diff --git a/src/lang/cn.ts b/src/lang/cn.ts
index 4a4116be..efdf13de 100644
--- a/src/lang/cn.ts
+++ b/src/lang/cn.ts
@@ -816,6 +816,7 @@ export const languageChinese = {
"nextSummarizationLabel": "HypaV3 将总结 [{0}]",
"nextSummarizationNoMessagesFoundLabel": "警告:未找到消息",
"nextSummarizationLoadingError": "加载下一个总结目标时出错:{0}",
+ "summarizationConditionLabel": "提示:当输入标记超过最大上下文大小时,HypaV3 将开始进行摘要处理。",
"emptySelectedFirstMessageLabel": "警告:所选的第一条消息为空"
},
}
diff --git a/src/lang/de.ts b/src/lang/de.ts
index c3ac6507..744b67a9 100644
--- a/src/lang/de.ts
+++ b/src/lang/de.ts
@@ -480,6 +480,7 @@ export const languageGerman = {
"nextSummarizationLabel": "HypaV3 wird [{0}] zusammenfassen",
"nextSummarizationNoMessagesFoundLabel": "WARNUNG: Keine Nachrichten gefunden",
"nextSummarizationLoadingError": "Fehler beim Laden des nächsten Zusammenfassungsziels: {0}",
+ "summarizationConditionLabel": "Hinweis: HypaV3 beginnt mit der Zusammenfassung, wenn die Eingabe-Tokens die maximale Kontextgröße überschreiten.",
"emptySelectedFirstMessageLabel": "WARNUNG: Ausgewählte erste Nachricht ist leer"
},
}
diff --git a/src/lang/en.ts b/src/lang/en.ts
index ead4740f..d20ba4be 100644
--- a/src/lang/en.ts
+++ b/src/lang/en.ts
@@ -1052,6 +1052,7 @@ export const languageEnglish = {
nextSummarizationLabel: "HypaV3 will summarize [{0}]",
nextSummarizationNoMessagesFoundLabel: "WARN: No messages found",
nextSummarizationLoadingError: "Error loading next summarization target: {0}",
+ summarizationConditionLabel: "Tip: HypaV3 begins summarization when input tokens exceed the maximum context size.",
emptySelectedFirstMessageLabel: "WARN: Selected first message is empty",
},
bulkEnabling: "Lorebook Bulk Enabling",
@@ -1116,5 +1117,8 @@ export const languageEnglish = {
doNotChangeFallbackModels: "Do Not Change Fallback Models on Preset Change",
customModels: "Custom Models",
igpPrompt: "IGP Prompt",
- useTokenizerCaching: "Tokenizer Caching"
+ useTokenizerCaching: "Tokenizer Caching",
+ hypaMemoryV2Modal: "Hypa V2 Modal",
+ hypaMemoryV3Modal: "Hypa V3 Modal",
+ showMenuHypaMemoryModal: "Show Menu Hypa Modal",
}
diff --git a/src/lang/es.ts b/src/lang/es.ts
index af4168b1..369cb839 100644
--- a/src/lang/es.ts
+++ b/src/lang/es.ts
@@ -725,6 +725,7 @@ export const languageSpanish = {
"nextSummarizationLabel": "HypaV3 resumirá [{0}]",
"nextSummarizationNoMessagesFoundLabel": "ADVERTENCIA: No se encontraron mensajes",
"nextSummarizationLoadingError": "Error al cargar el siguiente objetivo de resumen: {0}",
+ "summarizationConditionLabel": "Consejo: HypaV3 comienza a resumir cuando los tokens de entrada superan el tamaño máximo de contexto.",
"emptySelectedFirstMessageLabel": "ADVERTENCIA: El primer mensaje seleccionado está vacío"
},
}
diff --git a/src/lang/ko.ts b/src/lang/ko.ts
index 863ec86c..97c7fbf0 100644
--- a/src/lang/ko.ts
+++ b/src/lang/ko.ts
@@ -971,7 +971,8 @@ export const languageKorean = {
"nextSummarizationLabel": "HypaV3가 [{0}]를 요약할 예정입니다",
"nextSummarizationNoMessagesFoundLabel": "경고: 메시지를 찾을 수 없습니다",
"nextSummarizationLoadingError": "다음 요약 대상을 불러오는 동안 오류 발생: {0}",
- "emptySelectedFirstMessageLabel": "경고: 선택된 첫 메시지가 비어있습니다"
+ "summarizationConditionLabel": "팁: HypaV3는 입력 토큰이 최대 컨텍스트 크기를 넘으면 요약을 시작합니다.",
+ "emptySelectedFirstMessageLabel": "경고: 선택된 첫 메시지가 비어있습니다",
},
"bulkEnabling": "한번에 로어북 활성화 버튼",
"showTranslationLoading": "번역 로딩 보이기",
@@ -984,4 +985,7 @@ export const languageKorean = {
"childLoreDesc": "이것은 캐릭터 로어의 복사본이며, 삭제하거나 원본 로어에서 직접 비활성화하기 전에는 '언제나 활성화' 상태로 유지됩니다.",
"cachePoint": "캐시 포인트",
"all": "모두",
+ "hypaMemoryV2Modal": "하이파 V2 모달",
+ "hypaMemoryV3Modal": "하이파 V3 모달",
+ "showMenuHypaMemoryModal": "메뉴에서 하이파 모달 보이기",
}
diff --git a/src/lang/vi.ts b/src/lang/vi.ts
index 4cbe52b2..da26f795 100644
--- a/src/lang/vi.ts
+++ b/src/lang/vi.ts
@@ -454,6 +454,7 @@ export const LanguageVietnamese = {
"nextSummarizationLabel": "HypaV3 sẽ tóm tắt [{0}]",
"nextSummarizationNoMessagesFoundLabel": "CẢNH BÁO: Không tìm thấy tin nhắn",
"nextSummarizationLoadingError": "Lỗi khi tải mục tiêu tóm tắt tiếp theo: {0}",
+ "summarizationConditionLabel": "Mẹo: HypaV3 bắt đầu tóm tắt khi số lượng token đầu vào vượt quá kích thước ngữ cảnh tối đa.",
"emptySelectedFirstMessageLabel": "CẢNH BÁO: Tin nhắn đầu tiên được chọn trống"
},
}
diff --git a/src/lang/zh-Hant.ts b/src/lang/zh-Hant.ts
index 481587af..300ba5b9 100644
--- a/src/lang/zh-Hant.ts
+++ b/src/lang/zh-Hant.ts
@@ -849,6 +849,7 @@ export const languageChineseTraditional = {
"nextSummarizationLabel": "HypaV3 將摘要 [{0}]",
"nextSummarizationNoMessagesFoundLabel": "警告:找不到訊息",
"nextSummarizationLoadingError": "載入下一個摘要目標時出錯:{0}",
+ "summarizationConditionLabel": "提示:當輸入標記超過最大上下文大小時,HypaV3 將開始進行摘要處理。",
"emptySelectedFirstMessageLabel": "警告:選定的第一條訊息為空"
},
}
diff --git a/src/lib/ChatScreens/DefaultChatScreen.svelte b/src/lib/ChatScreens/DefaultChatScreen.svelte
index 9fe92edd..a90c2132 100644
--- a/src/lib/ChatScreens/DefaultChatScreen.svelte
+++ b/src/lib/ChatScreens/DefaultChatScreen.svelte
@@ -2,7 +2,7 @@
import Suggestion from './Suggestion.svelte';
import AdvancedChatEditor from './AdvancedChatEditor.svelte';
- import { CameraIcon, DatabaseIcon, DicesIcon, GlobeIcon, ImagePlusIcon, LanguagesIcon, Laugh, MenuIcon, MicOffIcon, PackageIcon, Plus, RefreshCcwIcon, ReplyIcon, Send, StepForwardIcon, XIcon } from "lucide-svelte";
+ import { CameraIcon, DatabaseIcon, DicesIcon, GlobeIcon, ImagePlusIcon, LanguagesIcon, Laugh, MenuIcon, MicOffIcon, PackageIcon, Plus, RefreshCcwIcon, ReplyIcon, Send, StepForwardIcon, XIcon, BrainIcon } from "lucide-svelte";
import { selectedCharID, PlaygroundStore, createSimpleCharacter } from "../../ts/stores.svelte";
import Chat from "./Chat.svelte";
import { type Message, type character, type groupChat } from "../../ts/storage/database.svelte";
@@ -12,7 +12,7 @@
import { findCharacterbyId, getUserIconProtrait, messageForm, sleep } from "../../ts/util";
import { language } from "../../lang";
import { isExpTranslator, translate } from "../../ts/translator/translator";
- import { alertError, alertNormal, alertWait } from "../../ts/alert";
+ import { alertError, alertNormal, alertWait, showHypaV2Alert, showHypaV3Alert } from "../../ts/alert";
import sendSound from '../../etc/send.mp3'
import { processScript } from "src/ts/process/scripts";
import CreatorQuote from "./CreatorQuote.svelte";
@@ -826,6 +826,30 @@
{language.chatList}
{/if}
+
+ {#if DBState.db.showMenuHypaMemoryModal}
+ {#if DBState.db.supaModelType !== 'none' && (DBState.db.hypav2 || DBState.db.hypaV3)}
+
{
+ if (DBState.db.hypav2) {
+ DBState.db.characters[$selectedCharID].chats[DBState.db.characters[$selectedCharID].chatPage].hypaV2Data ??= {
+ lastMainChunkID: 0,
+ mainChunks: [],
+ chunks: [],
+ }
+ showHypaV2Alert();
+ } else if (DBState.db.hypaV3) {
+ showHypaV3Alert();
+ }
+
+ openMenu = false
+ }}>
+
+
+ {DBState.db.hypav2 ? language.hypaMemoryV2Modal : language.hypaMemoryV3Modal}
+
+
+ {/if}
+ {/if}
{#if DBState.db.translator !== ''}
{
diff --git a/src/lib/Others/AlertComp.svelte b/src/lib/Others/AlertComp.svelte
index 16273635..7801aee6 100644
--- a/src/lib/Others/AlertComp.svelte
+++ b/src/lib/Others/AlertComp.svelte
@@ -72,7 +72,7 @@
{
- if(e.origin.startsWith("https://sv.risuai.xyz") || e.origin.startsWith("http://127.0.0.1")){
+ if(e.origin.startsWith("https://sv.risuai.xyz") || e.origin.startsWith("http://127.0.0.1") || e.origin === window.location.origin){
if(e.data.msg.data.vaild && $alertStore.type === 'login'){
$alertStore = {
type: 'none',
diff --git a/src/lib/Others/HypaV3Modal.svelte b/src/lib/Others/HypaV3Modal.svelte
index 51451563..bd1bebb5 100644
--- a/src/lib/Others/HypaV3Modal.svelte
+++ b/src/lib/Others/HypaV3Modal.svelte
@@ -91,6 +91,15 @@
let showImportantOnly = $state(false);
$effect.pre(() => {
+ untrack(() => {
+ DBState.db.characters[$selectedCharID].chats[
+ DBState.db.characters[$selectedCharID].chatPage
+ ].hypaV3Data ??= {
+ summaries: [],
+ lastSelectedSummaries: [],
+ };
+ });
+
summaryUIStates = hypaV3DataState.summaries.map((summary) => ({
originalRef: null,
isTranslating: false,
@@ -1359,14 +1368,18 @@
{/await}
-
- {#if !getFirstMessage()}
-
+
+
+ {language.hypaV3Modal.summarizationConditionLabel}
+
+
+
+ {#if !getFirstMessage()}
{language.hypaV3Modal.emptySelectedFirstMessageLabel}
-
- {/if}
+ {/if}
+
diff --git a/src/lib/Setting/Pages/AccessibilitySettings.svelte b/src/lib/Setting/Pages/AccessibilitySettings.svelte
index da9dc756..c5ba653a 100644
--- a/src/lib/Setting/Pages/AccessibilitySettings.svelte
+++ b/src/lib/Setting/Pages/AccessibilitySettings.svelte
@@ -43,6 +43,10 @@
+
+
+
+
diff --git a/src/lib/Setting/Pages/Module/ModuleMenu.svelte b/src/lib/Setting/Pages/Module/ModuleMenu.svelte
index 82e51fce..d06d16ac 100644
--- a/src/lib/Setting/Pages/Module/ModuleMenu.svelte
+++ b/src/lib/Setting/Pages/Module/ModuleMenu.svelte
@@ -2,6 +2,9 @@
import { language } from "src/lang";
import TextInput from "src/lib/UI/GUI/TextInput.svelte";
import LoreBookData from "src/lib/SideBars/LoreBook/LoreBookData.svelte";
+ import type { loreBook } from "src/ts/storage/database.svelte";
+ import LoreBookList from "src/lib/SideBars/LoreBook/LoreBookList.svelte";
+ import { type CCLorebook, convertExternalLorebook } from "src/ts/process/lorebook.svelte";
import type { RisuModule } from "src/ts/process/modules";
import { DownloadIcon, FolderUpIcon, PlusIcon, TrashIcon } from "lucide-svelte";
import RegexList from "src/lib/SideBars/Scripts/RegexList.svelte";
@@ -9,7 +12,8 @@
import Check from "src/lib/UI/GUI/CheckInput.svelte";
import Help from "src/lib/Others/Help.svelte";
import TextAreaInput from "src/lib/UI/GUI/TextAreaInput.svelte";
- import { getFileSrc, openURL, saveAsset } from "src/ts/globalApi.svelte";
+ import { getFileSrc, openURL, saveAsset, downloadFile } from "src/ts/globalApi.svelte";
+ import { alertNormal, alertError } from "src/ts/alert";
import { exportRegex, importRegex } from "src/ts/process/scripts";
import { selectMultipleFile } from "src/ts/util";
@@ -57,6 +61,48 @@
}
}
+ async function exportLoreBook(){
+ try {
+ const lore = currentModule.lorebook
+ const stringl = Buffer.from(JSON.stringify({
+ type: 'risu',
+ ver: 1,
+ data: lore
+ }), 'utf-8')
+
+ await downloadFile(`lorebook_export.json`, stringl)
+
+ alertNormal(language.successExport)
+ } catch (error) {
+ alertError(`${error}`)
+ }
+ }
+
+ async function importLoreBook(){
+ let lore = currentModule.lorebook
+ const lorebook = (await selectMultipleFile(['json', 'lorebook']))
+ if(!lorebook){
+ return
+ }
+ try {
+ for(const f of lorebook){
+ const importedlore = JSON.parse(Buffer.from(f.data).toString('utf-8'))
+ if(importedlore.type === 'risu' && importedlore.data){
+ const datas:loreBook[] = importedlore.data
+ for(const data of datas){
+ lore.push(data)
+ }
+ }
+ else if(importedlore.entries){
+ const entries:{[key:string]:CCLorebook} = importedlore.entries
+ lore.push(...convertExternalLorebook(entries))
+ }
+ }
+ } catch (error) {
+ alertError(`${error}`)
+ }
+ }
+
function addRegex(){
if(Array.isArray(currentModule.regex)){
currentModule.regex.push({
@@ -150,17 +196,18 @@
{/if}
{#if submenu === 1 && (Array.isArray(currentModule.lorebook))}
-
- {#each currentModule.lorebook as lore, i}
-
{
- currentModule.lorebook.splice(i, 1)
- currentModule.lorebook = currentModule.lorebook
- }}/>
- {/each}
+
+
+
+
+
-
{/if}
{#if submenu === 2 && (Array.isArray(currentModule.regex))}
diff --git a/src/lib/Setting/Pages/OtherBotSettings.svelte b/src/lib/Setting/Pages/OtherBotSettings.svelte
index 13b8e9bd..474eb59b 100644
--- a/src/lib/Setting/Pages/OtherBotSettings.svelte
+++ b/src/lib/Setting/Pages/OtherBotSettings.svelte
@@ -26,8 +26,10 @@
width: 512,
height: 512,
sampler: 'k_euler',
+ noise_schedule: 'native',
steps: 100,
scale: 1,
+ cfg_rescale: 0,
sm: false,
sm_dyn: false,
strength: 0.5,
@@ -245,10 +247,21 @@
{/if}
+ Noise Schedule
+
+ Choose...
+ native
+ karras
+ exponential
+ polyexponential
+
+
steps
CFG scale
+ CFG rescale
+
{#if !DBState.db.NAII2I || DBState.db.NAIImgConfig.sampler !== 'ddim_v3'}
diff --git a/src/lib/Setting/Pages/PersonaSettings.svelte b/src/lib/Setting/Pages/PersonaSettings.svelte
index f6824319..6e2e70c6 100644
--- a/src/lib/Setting/Pages/PersonaSettings.svelte
+++ b/src/lib/Setting/Pages/PersonaSettings.svelte
@@ -8,16 +8,70 @@
import { alertConfirm, alertSelect } from "src/ts/alert";
import { getCharImage } from "src/ts/characters";
import { changeUserPersona, exportUserPersona, importUserPersona, saveUserPersona, selectUserImg } from "src/ts/persona";
+ import Sortable from 'sortablejs/modular/sortable.core.esm.js';
+ import { onDestroy, onMount } from "svelte";
+ import { sleep, sortableOptions } from "src/ts/util";
import { setDatabase } from "src/ts/storage/database.svelte";
import { DBState } from 'src/ts/stores.svelte';
import { get } from "svelte/store";
+ import { v4 } from "uuid"
+ let stb: Sortable = null
+ let ele: HTMLDivElement = $state()
+ let sorted = $state(0)
+ let selectedId:string = null
+ const createStb = () => {
+ stb = Sortable.create(ele, {
+ onStart: async () => {
+ DBState.db.personas[DBState.db.selectedPersona].id ??= v4()
+ selectedId = DBState.db.personas[DBState.db.selectedPersona].id
+ saveUserPersona()
+ },
+ onEnd: async () => {
+ let idx:number[] = []
+ ele.querySelectorAll('[data-risu-idx]').forEach((e, i) => {
+ idx.push(parseInt(e.getAttribute('data-risu-idx')))
+ })
+ let newValue:{
+ personaPrompt:string
+ name:string
+ icon:string
+ largePortrait?:boolean
+ id?:string
+ }[] = []
+ idx.forEach((i) => {
+ newValue.push(DBState.db.personas[i])
+ })
+ DBState.db.personas = newValue
+ const selectedPersona = DBState.db.personas.findIndex((e) => e.id === selectedId)
+ changeUserPersona(selectedPersona !== -1 ? selectedPersona : 0, 'noSave')
+ try {
+ stb.destroy()
+ } catch (error) {}
+ sorted += 1
+ await sleep(1)
+ createStb()
+ },
+ ...sortableOptions
+ })
+ }
+
+ onMount(createStb)
+
+ onDestroy(() => {
+ if(stb){
+ try {
+ stb.destroy()
+ } catch (error) {}
+ }
+ })
{language.persona}
-
+{#key sorted}
+
{#each DBState.db.personas as persona, i}
-
+{/key}
diff --git a/src/lib/Setting/Pages/UserSettings.svelte b/src/lib/Setting/Pages/UserSettings.svelte
index 95a4a6c5..2cdcb012 100644
--- a/src/lib/Setting/Pages/UserSettings.svelte
+++ b/src/lib/Setting/Pages/UserSettings.svelte
@@ -19,7 +19,7 @@
{
- if(e.origin.startsWith("https://sv.risuai.xyz") || e.origin.startsWith("http://127.0.0.1")){
+ if(e.origin.startsWith("https://sv.risuai.xyz") || e.origin.startsWith("http://127.0.0.1") || e.origin === window.location.origin){
if(e.data.msg.type === 'drive'){
await loadRisuAccountData()
DBState.db.account.data.refresh_token = e.data.msg.data.refresh_token
diff --git a/src/lib/SideBars/CharConfig.svelte b/src/lib/SideBars/CharConfig.svelte
index 7af6beca..c4c90a6e 100644
--- a/src/lib/SideBars/CharConfig.svelte
+++ b/src/lib/SideBars/CharConfig.svelte
@@ -1101,20 +1101,16 @@
}}
className="mt-4"
>
- {language.HypaMemory} V2 Data
+ {language.hypaMemoryV2Modal}
{:else if DBState.db.supaModelType !== 'none' && DBState.db.hypaV3}
{
- DBState.db.characters[$selectedCharID].chats[DBState.db.characters[$selectedCharID].chatPage].hypaV3Data ??= {
- summaries: [],
- lastSelectedSummaries: [],
- }
showHypaV3Alert()
}}
className="mt-4"
>
- {language.HypaMemory} V3 Data
+ {language.hypaMemoryV3Modal}
{:else if DBState.db.characters[$selectedCharID].chats[DBState.db.characters[$selectedCharID].chatPage].supaMemoryData && DBState.db.characters[$selectedCharID].chats[DBState.db.characters[$selectedCharID].chatPage].supaMemoryData.length > 4 || DBState.db.characters[$selectedCharID].supaMemory}
{language.SuperMemory}
diff --git a/src/lib/SideBars/LoreBook/LoreBookList.svelte b/src/lib/SideBars/LoreBook/LoreBookList.svelte
index 89b1f455..213eec65 100644
--- a/src/lib/SideBars/LoreBook/LoreBookList.svelte
+++ b/src/lib/SideBars/LoreBook/LoreBookList.svelte
@@ -11,9 +11,10 @@
globalMode?: boolean;
submenu?: number;
lorePlus?: boolean;
+ externalLoreBooks?: loreBook[];
}
- let { globalMode = false, submenu = 0, lorePlus = false }: Props = $props();
+ let { globalMode = false, submenu = 0, lorePlus = false, externalLoreBooks = null }: Props = $props();
let stb: Sortable = null
let ele: HTMLDivElement = $state()
let sorted = $state(0)
@@ -31,6 +32,13 @@
})
DBState.db.loreBook[DBState.db.loreBookPage].data = newLore
}
+ else if(externalLoreBooks){
+ let newLore:loreBook[] = []
+ idx.forEach((i) => {
+ newLore.push(externalLoreBooks[i])
+ })
+ externalLoreBooks = newLore
+ }
else if(submenu === 1){
let newLore:loreBook[] = []
idx.forEach((i) => {
@@ -97,6 +105,18 @@
}} onOpen={onOpen} onClose={onClose}/>
{/each}
{/if}
+ {:else if externalLoreBooks}
+ {#if externalLoreBooks.length === 0}
+ No Lorebook
+ {:else}
+ {#each externalLoreBooks as book, i}
+ {
+ let lore = externalLoreBooks
+ lore.splice(i, 1)
+ externalLoreBooks = lore
+ }} onOpen={onOpen} onClose={onClose}/>
+ {/each}
+ {/if}
{:else if submenu === 0}
{#if DBState.db.characters[$selectedCharID].globalLore.length === 0}
No Lorebook
diff --git a/src/ts/process/lorebook.svelte.ts b/src/ts/process/lorebook.svelte.ts
index 5e578be7..d428c6e1 100644
--- a/src/ts/process/lorebook.svelte.ts
+++ b/src/ts/process/lorebook.svelte.ts
@@ -55,7 +55,8 @@ export async function loadLoreBookV3Prompt(){
const recursiveScanning = char.loreSettings?.recursiveScanning ?? true
let recursivePrompt:{
prompt: string,
- source: string
+ source: string,
+ data: string
}[] = []
let matchLog:{
prompt: string,
@@ -75,23 +76,27 @@ export async function loadLoreBookV3Prompt(){
let mList:{
source:string
prompt:string
+ data:string
}[] = sliced.map((msg, i) => {
if(msg.role === 'user'){
return {
source: `message ${i} by user`,
- prompt: `\x01{{${DBState.db.username}}}:` + msg.data + '\x01'
+ prompt: `\x01{{${DBState.db.username}}}:` + msg.data + '\x01',
+ data: msg.data
}
}
else{
return {
source: `message ${i} by char`,
- prompt: `\x01{{${msg.name ?? (msg.saying ? findCharacterbyId(msg.saying)?.name : null) ?? char.name}}}:` + msg.data + '\x01'
+ prompt: `\x01{{${msg.name ?? (msg.saying ? findCharacterbyId(msg.saying)?.name : null) ?? char.name}}}:` + msg.data + '\x01',
+ data: msg.data
}
}
}).concat(recursivePrompt.map((msg) => {
return {
source: 'lorebook ' + msg.source,
- prompt: msg.prompt
+ prompt: msg.prompt,
+ data: msg.data
}
}))
@@ -106,7 +111,7 @@ export async function loadLoreBookV3Prompt(){
arg.keys[0] = regexString.replace('/'+regexFlag,'')
try {
const regex = new RegExp(arg.keys[0],regexFlag)
- const d = regex.test(mText.prompt)
+ const d = regex.test(mText.data)
if(d){
matchLog.push({
prompt: mText.prompt,
@@ -127,7 +132,8 @@ export async function loadLoreBookV3Prompt(){
mList = mList.map((m) => {
return {
source: m.source,
- prompt: m.prompt.toLocaleLowerCase().replace(/\{\{\/\/(.+?)\}\}/g,'').replace(/\{\{comment:(.+?)\}\}/g,'')
+ prompt: m.prompt.toLocaleLowerCase().replace(/\{\{\/\/(.+?)\}\}/g,'').replace(/\{\{comment:(.+?)\}\}/g,''),
+ data: m.data.toLocaleLowerCase().replace(/\{\{\/\/(.+?)\}\}/g,'').replace(/\{\{comment:(.+?)\}\}/g,'')
}
})
@@ -135,7 +141,7 @@ export async function loadLoreBookV3Prompt(){
let allModeMatched = true
for(const m of mList){
- let mText = m.prompt
+ let mText = m.data
if(arg.fullWordMatching){
const splited = mText.split(' ')
for(const key of arg.keys){
@@ -510,7 +516,7 @@ export async function importLoreBook(mode:'global'|'local'|'sglobal'){
}
}
-interface CCLorebook{
+export interface CCLorebook{
key:string[]
comment:string
content:string
diff --git a/src/ts/process/stableDiff.ts b/src/ts/process/stableDiff.ts
index fb24033a..74e56dd2 100644
--- a/src/ts/process/stableDiff.ts
+++ b/src/ts/process/stableDiff.ts
@@ -132,7 +132,7 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
"parameters": {
"params_version": 3,
"add_original_image": true,
- "cfg_rescale": 0,
+ "cfg_rescale": db.NAIImgConfig.cfg_rescale,
"controlnet_strength": 1,
"dynamic_thresholding": false,
"n_samples": 1,
@@ -145,7 +145,7 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
"sm": false,
"sm_dyn": false,
"noise": db.NAIImgConfig.noise,
- "noise_schedule": "native",
+ "noise_schedule": db.NAIImgConfig.noise_schedule,
"strength": db.NAIImgConfig.strength,
"ucPreset": 3,
"uncond_scale": 1,
@@ -435,7 +435,7 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
}
await new Promise(r => setTimeout(r, 1000))
} // Check history until the generation is complete.
- const genImgInfo = Object.values(item.outputs).flatMap((output: any) => output.images)[0];
+ const genImgInfo = Object.values(item.outputs).flatMap((output: any) => output.images || [])[0];
const imgResponse = await fetchNative(createUrl('/view', {
filename: genImgInfo.filename,
diff --git a/src/ts/storage/database.svelte.ts b/src/ts/storage/database.svelte.ts
index 1c0f6aad..7a5fb478 100644
--- a/src/ts/storage/database.svelte.ts
+++ b/src/ts/storage/database.svelte.ts
@@ -255,8 +255,10 @@ export function setDatabase(data:Database){
width:512,
height:768,
sampler:"k_dpmpp_sde",
+ noise_schedule:"native",
steps:28,
scale:5,
+ cfg_rescale: 0,
sm:true,
sm_dyn:false,
noise:0.0,
@@ -1023,6 +1025,7 @@ export interface Database{
}[]
igpPrompt:string
useTokenizerCaching:boolean
+ showMenuHypaMemoryModal:boolean
}
interface SeparateParameters{
@@ -1408,8 +1411,10 @@ export interface NAIImgConfig{
width:number,
height:number,
sampler:string,
+ noise_schedule:string,
steps:number,
scale:number,
+ cfg_rescale:number,
sm:boolean,
sm_dyn:boolean,
noise:number,
diff --git a/src/ts/tokenizer.ts b/src/ts/tokenizer.ts
index e71528a4..25fb442d 100644
--- a/src/ts/tokenizer.ts
+++ b/src/ts/tokenizer.ts
@@ -6,9 +6,27 @@ import { supportsInlayImage } from "./process/files/inlays";
import { risuChatParser } from "./parser.svelte";
import { tokenizeGGUFModel } from "./process/models/local";
import { globalFetch } from "./globalApi.svelte";
-import { getModelInfo, LLMTokenizer } from "./model/modellist";
+import { getModelInfo, LLMTokenizer, type LLMModel } from "./model/modellist";
import { pluginV2 } from "./plugins/plugins";
import type { GemmaTokenizer } from "@huggingface/transformers";
+import { LRUMap } from 'mnemonist';
+
+const MAX_CACHE_SIZE = 1500;
+
+const encodeCache = new LRUMap(MAX_CACHE_SIZE);
+
+function getHash(
+ data: string,
+ aiModel: string,
+ customTokenizer: string,
+ currentPluginProvider: string,
+ googleClaudeTokenizing: boolean,
+ modelInfo: LLMModel,
+ pluginTokenizer: string
+): string {
+ const combined = `${data}::${aiModel}::${customTokenizer}::${currentPluginProvider}::${googleClaudeTokenizing ? '1' : '0'}::${modelInfo.tokenizer}::${pluginTokenizer}`;
+ return combined;
+}
export const tokenizerList = [
@@ -25,100 +43,114 @@ export const tokenizerList = [
] as const
export async function encode(data:string):Promise<(number[]|Uint32Array|Int32Array)>{
- let db = getDatabase()
+ const db = getDatabase();
+ const modelInfo = getModelInfo(db.aiModel);
+ const pluginTokenizer = pluginV2.providerOptions.get(db.currentPluginProvider)?.tokenizer ?? "none";
+
+ let cacheKey = ''
+ if(db.useTokenizerCaching){
+ cacheKey = getHash(
+ data,
+ db.aiModel,
+ db.customTokenizer,
+ db.currentPluginProvider,
+ db.googleClaudeTokenizing,
+ modelInfo,
+ pluginTokenizer
+ );
+ const cachedResult = encodeCache.get(cacheKey);
+ if (cachedResult !== undefined) {
+ return cachedResult;
+ }
+ }
+
+ let result: number[] | Uint32Array | Int32Array;
+
if(db.aiModel === 'openrouter' || db.aiModel === 'reverse_proxy'){
switch(db.customTokenizer){
case 'mistral':
- return await tokenizeWebTokenizers(data, 'mistral')
+ result = await tokenizeWebTokenizers(data, 'mistral'); break;
case 'llama':
- return await tokenizeWebTokenizers(data, 'llama')
+ result = await tokenizeWebTokenizers(data, 'llama'); break;
case 'novelai':
- return await tokenizeWebTokenizers(data, 'novelai')
+ result = await tokenizeWebTokenizers(data, 'novelai'); break;
case 'claude':
- return await tokenizeWebTokenizers(data, 'claude')
+ result = await tokenizeWebTokenizers(data, 'claude'); break;
case 'novellist':
- return await tokenizeWebTokenizers(data, 'novellist')
+ result = await tokenizeWebTokenizers(data, 'novellist'); break;
case 'llama3':
- return await tokenizeWebTokenizers(data, 'llama')
+ result = await tokenizeWebTokenizers(data, 'llama'); break;
case 'gemma':
- return await gemmaTokenize(data)
+ result = await gemmaTokenize(data); break;
case 'cohere':
- return await tokenizeWebTokenizers(data, 'cohere')
+ result = await tokenizeWebTokenizers(data, 'cohere'); break;
case 'deepseek':
- return await tokenizeWebTokenizers(data, 'DeepSeek')
+ result = await tokenizeWebTokenizers(data, 'DeepSeek'); break;
default:
- return await tikJS(data, 'o200k_base')
+ result = await tikJS(data, 'o200k_base'); break;
}
- }
-
- const modelInfo = getModelInfo(db.aiModel)
-
- if(db.aiModel === 'custom' && pluginV2.providerOptions.get(db.currentPluginProvider)?.tokenizer){
- const tokenizer = pluginV2.providerOptions.get(db.currentPluginProvider)?.tokenizer
- switch(tokenizer){
+ } else if (db.aiModel === 'custom' && pluginTokenizer) {
+ switch(pluginTokenizer){
case 'mistral':
- return await tokenizeWebTokenizers(data, 'mistral')
+ result = await tokenizeWebTokenizers(data, 'mistral'); break;
case 'llama':
- return await tokenizeWebTokenizers(data, 'llama')
+ result = await tokenizeWebTokenizers(data, 'llama'); break;
case 'novelai':
- return await tokenizeWebTokenizers(data, 'novelai')
+ result = await tokenizeWebTokenizers(data, 'novelai'); break;
case 'claude':
- return await tokenizeWebTokenizers(data, 'claude')
+ result = await tokenizeWebTokenizers(data, 'claude'); break;
case 'novellist':
- return await tokenizeWebTokenizers(data, 'novellist')
+ result = await tokenizeWebTokenizers(data, 'novellist'); break;
case 'llama3':
- return await tokenizeWebTokenizers(data, 'llama')
+ result = await tokenizeWebTokenizers(data, 'llama'); break;
case 'gemma':
- return await gemmaTokenize(data)
+ result = await gemmaTokenize(data); break;
case 'cohere':
- return await tokenizeWebTokenizers(data, 'cohere')
+ result = await tokenizeWebTokenizers(data, 'cohere'); break;
case 'o200k_base':
- return await tikJS(data, 'o200k_base')
+ result = await tikJS(data, 'o200k_base'); break;
case 'cl100k_base':
- return await tikJS(data, 'cl100k_base')
+ result = await tikJS(data, 'cl100k_base'); break;
case 'custom':
- return await pluginV2.providerOptions.get(db.currentPluginProvider)?.tokenizerFunc?.(data) ?? [0]
+ result = await pluginV2.providerOptions.get(db.currentPluginProvider)?.tokenizerFunc?.(data) ?? [0]; break;
default:
- return await tikJS(data, 'o200k_base')
+ result = await tikJS(data, 'o200k_base'); break;
+ }
+ }
+
+ // Fallback
+ if (result === undefined) {
+ if(modelInfo.tokenizer === LLMTokenizer.NovelList){
+ result = await tokenizeWebTokenizers(data, 'novellist');
+ } else if(modelInfo.tokenizer === LLMTokenizer.Claude){
+ result = await tokenizeWebTokenizers(data, 'claude');
+ } else if(modelInfo.tokenizer === LLMTokenizer.NovelAI){
+ result = await tokenizeWebTokenizers(data, 'novelai');
+ } else if(modelInfo.tokenizer === LLMTokenizer.Mistral){
+ result = await tokenizeWebTokenizers(data, 'mistral');
+ } else if(modelInfo.tokenizer === LLMTokenizer.Llama){
+ result = await tokenizeWebTokenizers(data, 'llama');
+ } else if(modelInfo.tokenizer === LLMTokenizer.Local){
+ result = await tokenizeGGUFModel(data);
+ } else if(modelInfo.tokenizer === LLMTokenizer.tiktokenO200Base){
+ result = await tikJS(data, 'o200k_base');
+ } else if(modelInfo.tokenizer === LLMTokenizer.GoogleCloud && db.googleClaudeTokenizing){
+ result = await tokenizeGoogleCloud(data);
+ } else if(modelInfo.tokenizer === LLMTokenizer.Gemma || modelInfo.tokenizer === LLMTokenizer.GoogleCloud){
+ result = await gemmaTokenize(data);
+ } else if(modelInfo.tokenizer === LLMTokenizer.DeepSeek){
+ result = await tokenizeWebTokenizers(data, 'DeepSeek');
+ } else if(modelInfo.tokenizer === LLMTokenizer.Cohere){
+ result = await tokenizeWebTokenizers(data, 'cohere');
+ } else {
+ result = await tikJS(data);
}
}
-
- if(modelInfo.tokenizer === LLMTokenizer.NovelList){
- const nv= await tokenizeWebTokenizers(data, 'novellist')
- return nv
- }
- if(modelInfo.tokenizer === LLMTokenizer.Claude){
- return await tokenizeWebTokenizers(data, 'claude')
- }
- if(modelInfo.tokenizer === LLMTokenizer.NovelAI){
- return await tokenizeWebTokenizers(data, 'novelai')
- }
- if(modelInfo.tokenizer === LLMTokenizer.Mistral){
- return await tokenizeWebTokenizers(data, 'mistral')
- }
- if(modelInfo.tokenizer === LLMTokenizer.Llama){
- return await tokenizeWebTokenizers(data, 'llama')
- }
- if(modelInfo.tokenizer === LLMTokenizer.Local){
- return await tokenizeGGUFModel(data)
- }
- if(modelInfo.tokenizer === LLMTokenizer.tiktokenO200Base){
- return await tikJS(data, 'o200k_base')
- }
- if(modelInfo.tokenizer === LLMTokenizer.GoogleCloud && db.googleClaudeTokenizing){
- return await tokenizeGoogleCloud(data)
- }
- if(modelInfo.tokenizer === LLMTokenizer.Gemma || modelInfo.tokenizer === LLMTokenizer.GoogleCloud){
- return await gemmaTokenize(data)
- }
- if(modelInfo.tokenizer === LLMTokenizer.DeepSeek){
- return await tokenizeWebTokenizers(data, 'DeepSeek')
- }
- if(modelInfo.tokenizer === LLMTokenizer.Cohere){
- return await tokenizeWebTokenizers(data, 'cohere')
+ if(db.useTokenizerCaching){
+ encodeCache.set(cacheKey, result);
}
- return await tikJS(data)
+ return result;
}
type tokenizerType = 'novellist'|'claude'|'novelai'|'llama'|'mistral'|'llama3'|'gemma'|'cohere'|'googleCloud'|'DeepSeek'
@@ -177,6 +209,7 @@ async function gemmaTokenize(text:string) {
async function tikJS(text:string, model='cl100k_base') {
if(!tikParser || lastTikModel !== model){
+ tikParser?.free()
if(model === 'cl100k_base'){
const {Tiktoken} = await import('@dqbd/tiktoken')
const cl100k_base = await import("@dqbd/tiktoken/encoders/cl100k_base.json");