[feat] new pngchunk encode/decoderr

This commit is contained in:
kwaroran
2024-01-03 04:13:19 +09:00
parent 239d819383
commit a8e50dcd77
10 changed files with 196 additions and 24 deletions

View File

@@ -32,6 +32,7 @@
"buffer": "^6.0.3",
"core-js": "^3.33.3",
"cors": "^2.8.5",
"crc": "^4.3.2",
"dompurify": "^3.0.6",
"exifr": "^7.1.3",
"express": "^4.18.2",
@@ -63,8 +64,7 @@
"tippy.js": "^6.3.7",
"uuid": "^9.0.1",
"wasmoon": "^1.15.1",
"web-streams-polyfill": "^3.2.1",
"yuso": "^0.1.3"
"web-streams-polyfill": "^3.2.1"
},
"devDependencies": {
"@capacitor/assets": "^3.0.4",

22
pnpm-lock.yaml generated
View File

@@ -59,6 +59,9 @@ dependencies:
cors:
specifier: ^2.8.5
version: 2.8.5
crc:
specifier: ^4.3.2
version: 4.3.2(buffer@6.0.3)
dompurify:
specifier: ^3.0.6
version: 3.0.6
@@ -155,9 +158,6 @@ dependencies:
web-streams-polyfill:
specifier: ^3.2.1
version: 3.2.1
yuso:
specifier: ^0.1.3
version: 0.1.3
devDependencies:
'@capacitor/assets':
@@ -2243,6 +2243,18 @@ packages:
engines: {node: '>=0.8'}
dev: false
/crc@4.3.2(buffer@6.0.3):
resolution: {integrity: sha512-uGDHf4KLLh2zsHa8D8hIQ1H/HtFQhyHrc0uhHBcoKGol/Xnb+MPYfUMw7cvON6ze/GUESTudKayDcJC5HnJv1A==}
engines: {node: '>=12'}
peerDependencies:
buffer: '>=6.0.3'
peerDependenciesMeta:
buffer:
optional: true
dependencies:
buffer: 6.0.3
dev: false
/create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
dev: true
@@ -5901,7 +5913,3 @@ packages:
resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==}
engines: {node: '>=6'}
dev: true
/yuso@0.1.3:
resolution: {integrity: sha512-CvJIuUBQ+SX9H8GT6K0sDL8Hs/SeLISBsJhfIjgNwzA0fudfNXNSq2RYsFp9lTW/fhsgJweGkleuYfdQyRUtDA==}
dev: false

View File

@@ -16,9 +16,9 @@
let gridOpen = false
DataBase.subscribe(db => {
// if(db.didFirstSetup !== didFirstSetup){
// didFirstSetup = db.didFirstSetup || false
// }
if(db.didFirstSetup !== didFirstSetup){
didFirstSetup = db.didFirstSetup || false
}
})
</script>

View File

@@ -9,8 +9,9 @@ import { checkCharOrder, downloadFile, readImage, saveAsset } from "./storage/gl
import { cloneDeep } from "lodash"
import { selectedCharID } from "./stores"
import { convertImage } from "./parser"
import * as yuso from 'yuso'
import { reencodeImage } from "./image"
import { PngChunk } from "./pngChunk"
export const hubURL = "https://sv.risuai.xyz"
@@ -60,7 +61,11 @@ async function importCharacterProcess(f:{
await sleep(10)
const img = f.data
const readed = yuso.decode(img, 'chara')
const readed = PngChunk.read(img, ['chara'])?.['chara']
if(!readed){
alertError(language.errors.noData)
return
}
{
const charaData:CharacterCardV2 = JSON.parse(Buffer.from(readed, 'base64').toString('utf-8'))
if(await importSpecv2(charaData, img)){
@@ -504,13 +509,16 @@ export async function exportSpecV2(char:character, type:'png'|'json' = 'png') {
await sleep(10)
img = await reencodeImage(img)
img = yuso.encode(img, "chara",Buffer.from(JSON.stringify(card)).toString('base64'))
alertStore.set({
type: 'wait',
msg: 'Loading... (Writing)'
})
img = (await PngChunk.write(img, {
"chara":Buffer.from(JSON.stringify(card)).toString('base64')
})) as Uint8Array
char.image = ''
await sleep(10)
await downloadFile(`${char.name.replace(/[<>:"/\\|?*\.\,]/g, "")}_export.png`, img)
@@ -519,6 +527,7 @@ export async function exportSpecV2(char:character, type:'png'|'json' = 'png') {
}
catch(e){
console.error(e, e.stack)
alertError(`${e}`)
}
}

View File

@@ -7,9 +7,9 @@ import { checkNullish, findCharacterbyId, selectMultipleFile, selectSingleFile,
import { v4 as uuidv4 } from 'uuid';
import { selectedCharID } from "./stores";
import { checkCharOrder, downloadFile, getFileSrc } from "./storage/globalApi";
import * as yuso from 'yuso'
import { reencodeImage } from "./image";
import { updateInlayScreen } from "./process/inlayScreen";
import { PngChunk } from "./pngChunk";
export function createNewCharacter() {
let db = get(DataBase)
@@ -473,7 +473,7 @@ export async function addDefaultCharacters() {
for(const img of imgs){
const imgBuffer = await (await img).arrayBuffer()
const readed = yuso.decode(Buffer.from(imgBuffer), "risuai")
const readed = PngChunk.read(Buffer.from(imgBuffer), ["risuai"])?.risuai
await sleep(10)
const va = decodeMsgpack(Buffer.from(readed,'base64')) as any
if(va.type !== 101){

View File

@@ -50,7 +50,7 @@ export async function SaveLocalBackup(){
if(!key || !key.endsWith('.png')){
continue
}
await writer.write(key, await readBinaryFile(asset.path))
await writer.writeBackup(key, await readBinaryFile(asset.path))
}
}
else{
@@ -63,7 +63,7 @@ export async function SaveLocalBackup(){
if(!key || !key.endsWith('.png')){
continue
}
await writer.write(key, await forageStorage.getItem(key))
await writer.writeBackup(key, await forageStorage.getItem(key))
if(forageStorage.isAccount){
await sleep(1000)
}
@@ -74,7 +74,7 @@ export async function SaveLocalBackup(){
alertWait(`Saving local Backup... (Saving database)`)
await writer.write('database.risudat', dbData)
await writer.writeBackup('database.risudat', dbData)
alertNormal('Success')

View File

@@ -98,6 +98,7 @@ export function supportsInlayImage(){
}
export async function reencodeImage(img:Uint8Array){
const canvas = document.createElement('canvas')
const imgObj = new Image()
imgObj.src = URL.createObjectURL(new Blob([img], {type: `image/png`}))

View File

@@ -2,11 +2,11 @@ import { get } from "svelte/store"
import { DataBase, saveImage, setDatabase } from "./storage/database"
import { selectSingleFile, sleep } from "./util"
import { alertError, alertNormal, alertStore } from "./alert"
import * as yuso from 'yuso'
import { downloadFile, readImage } from "./storage/globalApi"
import { language } from "src/lang"
import { cloneDeep } from "lodash"
import { reencodeImage } from "./image"
import { PngChunk } from "./pngChunk"
export async function selectUserImg() {
const selected = await selectSingleFile(['png'])
@@ -81,7 +81,9 @@ export async function exportUserPersona(){
await sleep(10)
img = yuso.encode(await reencodeImage(img), "persona",Buffer.from(JSON.stringify(card)).toString('base64'))
img = (await PngChunk.write(await reencodeImage(img), {
"persona":Buffer.from(JSON.stringify(card)).toString('base64')
})) as Uint8Array
alertStore.set({
type: 'wait',
@@ -98,7 +100,12 @@ export async function importUserPersona(){
try {
const v = await selectSingleFile(['png'])
const data:PersonaCard = JSON.parse(Buffer.from(yuso.decode(v.data, "persona"), 'base64').toString('utf-8'))
const decoded = PngChunk.read(v.data, ['persona'])?.persona
if(!decoded){
alertError(language.errors.noData)
return
}
const data:PersonaCard = JSON.parse(Buffer.from(decoded, 'base64').toString('utf-8'))
if(data.name && data.personaPrompt){
let db = get(DataBase)
db.personas.push({

144
src/ts/pngChunk.ts Normal file
View File

@@ -0,0 +1,144 @@
import { Buffer } from 'buffer';
import crc32 from 'crc/crc32';
import type { LocalWriter } from './storage/globalApi';
export const PngChunk = {
read: (data:Uint8Array, chunkName:string[], arg:{checkCrc?:boolean} = {}) => {
let pos = 8
let chunks:{[key:string]:string} = {}
while(pos < data.length){
const len = data[pos] * 0x1000000 + data[pos+1] * 0x10000 + data[pos+2] * 0x100 + data[pos+3]
const type = data.slice(pos+4,pos+8)
const typeString = new TextDecoder().decode(type)
console.log(typeString, len)
if(arg.checkCrc){
const crc = data[pos+8+len] * 0x1000000 + data[pos+9+len] * 0x10000 + data[pos+10+len] * 0x100 + data[pos+11+len]
const crcCheck = crc32(data.slice(pos+4,pos+8+len))
if(crc !== crcCheck){
throw new Error('crc check failed')
}
}
if(typeString === 'IEND'){
break
}
if(typeString === 'tEXt'){
const chunkData = data.slice(pos+8,pos+8+len)
let key=''
let value=''
for(let i=0;i<10;i++){
if(chunkData[i] === 0){
key = new TextDecoder().decode(chunkData.slice(0,i))
value = new TextDecoder().decode(chunkData.slice(i))
break
}
}
if(chunkName.includes(key)){
chunks[key] = value
}
}
pos += 12 + len
}
return chunks
},
trim: (data:Uint8Array) => {
let pos = 8
let newData:Uint8Array[] = []
while(pos < data.length){
const len = data[pos] * 0x1000000 + data[pos+1] * 0x10000 + data[pos+2] * 0x100 + data[pos+3]
const type = data.slice(pos+4,pos+8)
const typeString = new TextDecoder().decode(type)
if(typeString === 'IEND'){
newData.push(data.slice(pos,pos+12+len))
break
}
if(typeString === 'tEXt'){
pos += 12 + len
}
else{
newData.push(data.slice(pos,pos+12+len))
pos += 12 + len
}
}
newData.push(data.slice(pos))
return Buffer.concat(newData)
},
write: async (data:Uint8Array, chunks:{[key:string]:string}, options:{writer?:LocalWriter} = {}):Promise<void | Buffer> => {
let pos = 8
let newData:Uint8Array[] = []
async function pushData(data:Uint8Array){
if(options.writer){
await options.writer.write(data)
}
else{
newData.push(data)
}
}
await pushData(data.slice(0,8))
while(pos < data.length){
const len = data[pos] * 0x1000000 + data[pos+1] * 0x10000 + data[pos+2] * 0x100 + data[pos+3]
const type = data.slice(pos+4,pos+8)
const typeString = new TextDecoder().decode(type)
if(typeString === 'IEND'){
break
}
if(typeString === 'tEXt'){
pos += 12 + len
}
else{
await pushData(data.slice(pos,pos+12+len))
pos += 12 + len
}
}
for(const key in chunks){
const keyData = new TextEncoder().encode(key)
const value = Buffer.from(chunks[key])
const lenNum = value.byteLength + keyData.byteLength + 1
//idk, but uint32array is not working
const length = new Uint8Array([
lenNum / 0x1000000 % 0x100,
lenNum / 0x10000 % 0x100,
lenNum / 0x100 % 0x100,
lenNum % 0x100
])
const type = new TextEncoder().encode('tEXt')
await pushData(length)
await pushData(type)
await pushData(keyData)
await pushData(new Uint8Array([0]))
await pushData(value)
const crc = crc32(Buffer.concat([type,keyData,new Uint8Array([0]),value]))
await pushData(new Uint8Array([
crc / 0x1000000 % 0x100,
crc / 0x10000 % 0x100,
crc / 0x100 % 0x100,
crc % 0x100
]))
}
//create IEND chunk
{
const length = new Uint8Array((new Uint32Array([0])).buffer)
const type = new TextEncoder().encode('IEND')
await pushData(length)
await pushData(type)
const crc = crc32(type)
await pushData(new Uint8Array([
crc / 0x1000000 % 0x100,
crc / 0x10000 % 0x100,
crc / 0x100 % 0x100,
crc % 0x100
]))
}
if(options.writer){
await options.writer.close()
}
else{
return Buffer.concat(newData)
}
},
}

View File

@@ -1195,7 +1195,7 @@ export class LocalWriter{
this.writer = writableStream.getWriter()
return true
}
async write(name:string,data: Uint8Array){
async writeBackup(name:string,data: Uint8Array){
const encodedName = new TextEncoder().encode(getBasename(name))
const nameLength = new Uint32Array([encodedName.byteLength])
await this.writer.write(new Uint8Array(nameLength.buffer))
@@ -1204,6 +1204,9 @@ export class LocalWriter{
await this.writer.write(new Uint8Array(dataLength.buffer))
await this.writer.write(data)
}
async write(data:Uint8Array) {
await this.writer.write(data)
}
async close(){
await this.writer.close()
}