Optimize tauri write binary file (#540)

It's a known issue that IPC call in Tauri V1 is slow
(https://github.com/tauri-apps/tauri/issues/4197) Specifically,
writeBinaryFile can take several seconds if the file is around like 10
MB because of some internal serealization bottleneck of Tauri. This is
really bad as we use writebinaryFile every second to save database.

This PR just create separate http server only handling the bottleneck
part: writebinaryfile of database.bin and assets.
This commit is contained in:
kwaroran
2024-07-02 05:57:55 +09:00
committed by GitHub
4 changed files with 131 additions and 12 deletions

View File

@@ -1,4 +1,5 @@
import { writeBinaryFile,BaseDirectory, readBinaryFile, exists, createDir, readDir, removeFile } from "@tauri-apps/api/fs"
import { changeFullscreen, checkNullish, findCharacterbyId, sleep } from "../util"
import { convertFileSrc, invoke } from "@tauri-apps/api/tauri"
import { v4 as uuidv4, v4 } from 'uuid';
@@ -55,6 +56,26 @@ interface fetchLog{
let fetchLog:fetchLog[] = []
async function writeBinaryFileFast(appPath: string, data: Uint8Array) {
const secret = await invoke('get_http_secret') as string;
const port = await invoke('get_http_port') as number;
const apiUrl = `http://127.0.0.1:${port}/?path=${encodeURIComponent(appPath)}`;
const response = await fetch(apiUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'x-tauri-secret': secret
},
body: new Blob([data])
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
}
export async function downloadFile(name:string, dat:Uint8Array|ArrayBuffer|string) {
if(typeof(dat) === 'string'){
dat = Buffer.from(dat, 'utf-8')
@@ -233,7 +254,7 @@ export async function saveAsset(data:Uint8Array, customId:string = '', fileName:
fileExtension = fileName.split('.').pop()
}
if(isTauri){
await writeBinaryFile(`assets/${id}.${fileExtension}`, data ,{dir: BaseDirectory.AppData})
await writeBinaryFileFast(`assets/${id}.${fileExtension}`, data);
return `assets/${id}.${fileExtension}`
}
else{
@@ -299,8 +320,8 @@ export async function saveDb(){
db.saveTime = Math.floor(Date.now() / 1000)
const dbData = encodeRisuSave(db)
if(isTauri){
await writeBinaryFile('database/database.bin', dbData, {dir: BaseDirectory.AppData})
await writeBinaryFile(`database/dbbackup-${(Date.now()/100).toFixed()}.bin`, dbData, {dir: BaseDirectory.AppData})
await writeBinaryFileFast('database/database.bin', dbData);
await writeBinaryFileFast(`database/dbbackup-${(Date.now()/100).toFixed()}.bin`, dbData);
}
else{
if(!forageStorage.isAccount){
@@ -393,9 +414,7 @@ export async function loadData() {
await createDir('assets', {dir: BaseDirectory.AppData})
}
if(!await exists('database/database.bin', {dir: BaseDirectory.AppData})){
await writeBinaryFile('database/database.bin',
encodeRisuSave({})
,{dir: BaseDirectory.AppData})
await writeBinaryFileFast('database/database.bin', encodeRisuSave({}));
}
try {
setDatabase(
@@ -1586,4 +1605,4 @@ export class BlankWriter{
async end(){
//do nothing, just to make compatible with other writer
}
}
}

View File

@@ -16,7 +16,7 @@ const magicCompressedHeader = new Uint8Array([0, 82, 73, 83, 85, 83, 65, 86, 69,
export function encodeRisuSave(data:any, compression:'noCompression'|'compression' = 'noCompression'){
let encoded:Uint8Array = packr.encode(data)
if(isTauri || compression === 'compression'){
if(compression === 'compression'){
encoded = fflate.compressSync(encoded)
const result = new Uint8Array(encoded.length + magicCompressedHeader.length);
result.set(magicCompressedHeader, 0)