[feat] added inlay emotion/imggen & rework imggen
This commit is contained in:
@@ -9,6 +9,7 @@ import { selectedCharID } from "./stores";
|
||||
import { checkCharOrder, downloadFile, getFileSrc, readImage } from "./storage/globalApi";
|
||||
import * as yuso from 'yuso'
|
||||
import { reencodeImage } from "./image";
|
||||
import { updateInlayScreen } from "./process/inlayScreen";
|
||||
|
||||
export function createNewCharacter() {
|
||||
let db = get(DataBase)
|
||||
@@ -300,6 +301,9 @@ export function characterFormatUpdate(index:number|character){
|
||||
depth: 0,
|
||||
prompt: ''
|
||||
}
|
||||
if(!cha.newGenData){
|
||||
cha = updateInlayScreen(cha)
|
||||
}
|
||||
}
|
||||
else{
|
||||
if((!cha.characterTalks) || cha.characterTalks.length !== cha.characters.length){
|
||||
|
||||
@@ -24,12 +24,21 @@ export async function postInlayImage(){
|
||||
|
||||
const extention = img.name.split('.').at(-1)
|
||||
|
||||
//darw in canvas to convert to png
|
||||
const imgObj = new Image()
|
||||
imgObj.src = URL.createObjectURL(new Blob([img.data], {type: `image/${extention}`}))
|
||||
|
||||
return await writeInlayImage(imgObj, {
|
||||
name: img.name,
|
||||
ext: extention
|
||||
})
|
||||
}
|
||||
|
||||
export async function writeInlayImage(imgObj:HTMLImageElement, arg:{name?:string, ext?:string} = {}) {
|
||||
|
||||
let drawHeight = 0
|
||||
let drawWidth = 0
|
||||
const canvas = document.createElement('canvas')
|
||||
const ctx = canvas.getContext('2d')
|
||||
const imgObj = new Image()
|
||||
let drawHeight, drawWidth = 0
|
||||
imgObj.src = URL.createObjectURL(new Blob([img.data], {type: `image/${extention}`}))
|
||||
await new Promise((resolve) => {
|
||||
imgObj.onload = () => {
|
||||
drawHeight = imgObj.height
|
||||
@@ -59,9 +68,9 @@ export async function postInlayImage(){
|
||||
const imgid = v4()
|
||||
|
||||
await inlayStorage.setItem(imgid, {
|
||||
name: img.name,
|
||||
name: arg.name ?? imgid,
|
||||
data: dataURI,
|
||||
ext: extention,
|
||||
ext: arg.ext ?? 'png',
|
||||
height: drawHeight,
|
||||
width: drawWidth
|
||||
})
|
||||
|
||||
@@ -65,7 +65,7 @@ DOMPurify.addHook("uponSanitizeAttribute", (node, data) => {
|
||||
})
|
||||
|
||||
|
||||
const assetRegex = /{{(raw|img|video|audio|bg)::(.+?)}}/g
|
||||
const assetRegex = /{{(raw|img|video|audio|bg|emotion)::(.+?)}}/g
|
||||
|
||||
async function parseAdditionalAssets(data:string, char:simpleCharacterArgument|character, mode:'normal'|'back', mode2:'unset'|'pre'|'post' = 'unset'){
|
||||
const db = get(DataBase)
|
||||
@@ -74,13 +74,28 @@ async function parseAdditionalAssets(data:string, char:simpleCharacterArgument|c
|
||||
if(char.additionalAssets){
|
||||
|
||||
let assetPaths:{[key:string]:string} = {}
|
||||
let emoPaths:{[key:string]:string} = {}
|
||||
|
||||
for(const asset of char.additionalAssets){
|
||||
const assetPath = await getFileSrc(asset[1])
|
||||
assetPaths[asset[0].toLocaleLowerCase()] = assetPath
|
||||
}
|
||||
if(char.emotionImages){
|
||||
for(const emo of char.emotionImages){
|
||||
const emoPath = await getFileSrc(emo[1])
|
||||
emoPaths[emo[0].toLocaleLowerCase()] = emoPath
|
||||
}
|
||||
}
|
||||
data = data.replaceAll(assetRegex, (full:string, type:string, name:string) => {
|
||||
name = name.toLocaleLowerCase()
|
||||
if(type === 'emotion'){
|
||||
console.log(emoPaths, name)
|
||||
const path = emoPaths[name]
|
||||
if(!path){
|
||||
return ''
|
||||
}
|
||||
return `<img src="${path}" alt="${path}" style="${assetWidthString} "/>`
|
||||
}
|
||||
const path = assetPaths[name]
|
||||
if(!path){
|
||||
return ''
|
||||
@@ -129,6 +144,7 @@ export interface simpleCharacterArgument{
|
||||
customscript: customscript[]
|
||||
chaId: string,
|
||||
virtualscript?: string
|
||||
emotionImages?: [string, string][]
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ import { cipherChat, decipherChat } from "./cipherChat";
|
||||
import { getInlayImage, supportsInlayImage } from "../image";
|
||||
import { getGenerationModelString } from "./models/modelString";
|
||||
import { sendPeerChar } from "../sync/multiuser";
|
||||
import { runInlayScreen } from "./inlayScreen";
|
||||
|
||||
export interface OpenAIChat{
|
||||
role: 'system'|'user'|'assistant'|'function'
|
||||
@@ -299,6 +300,21 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
content: risuChatParser(db.personaPrompt, {chara: currentChar})
|
||||
})
|
||||
}
|
||||
|
||||
if(currentChar.inlayViewScreen){
|
||||
if(currentChar.viewScreen === 'emotion'){
|
||||
unformated.postEverything.push({
|
||||
role: 'system',
|
||||
content: currentChar.newGenData.emotionInstructions.replaceAll('{{slot}}', currentChar.emotionImages.map((v) => v[0]).join(', '))
|
||||
})
|
||||
}
|
||||
if(currentChar.viewScreen === 'imggen'){
|
||||
unformated.postEverything.push({
|
||||
role: 'system',
|
||||
content: currentChar.newGenData.instructions
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if(lorepmt.special_act){
|
||||
unformated.postEverything.push({
|
||||
@@ -498,10 +514,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
}
|
||||
let inlays:string[] = []
|
||||
if(db.inlayImage){
|
||||
const inlayMatch = formedChat.match(/{{inlay::(.+?)}}/g)
|
||||
if(inlayMatch){
|
||||
for(const inlay of inlayMatch){
|
||||
inlays.push(inlay)
|
||||
if(msg.role === 'char'){
|
||||
formedChat = formedChat.replace(/{{inlay::(.+?)}}/g, '')
|
||||
}
|
||||
else{
|
||||
const inlayMatch = formedChat.match(/{{inlay::(.+?)}}/g)
|
||||
if(inlayMatch){
|
||||
for(const inlay of inlayMatch){
|
||||
inlays.push(inlay)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -653,7 +674,6 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
function pushPrompts(cha:OpenAIChat[]){
|
||||
for(const chat of cha){
|
||||
if(!chat.content){
|
||||
@@ -913,9 +933,17 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
|
||||
currentChat = db.characters[selectedChar].chats[selectedChat]
|
||||
const triggerResult = await runTrigger(currentChar, 'output', {chat:currentChat})
|
||||
console.log(triggerResult)
|
||||
if(triggerResult && triggerResult.chat){
|
||||
db.characters[selectedChar].chats[selectedChat] = triggerResult.chat
|
||||
currentChat = triggerResult.chat
|
||||
}
|
||||
const inlayr = runInlayScreen(currentChar, currentChat.message[msgIndex].data)
|
||||
currentChat.message[msgIndex].data = inlayr.text
|
||||
db.characters[selectedChar].chats[selectedChat] = currentChat
|
||||
setDatabase(db)
|
||||
if(inlayr.promise){
|
||||
const t = await inlayr.promise
|
||||
currentChat.message[msgIndex].data = t
|
||||
db.characters[selectedChar].chats[selectedChat] = currentChat
|
||||
setDatabase(db)
|
||||
}
|
||||
await sayTTS(currentChar, result)
|
||||
@@ -938,6 +966,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
result2 = await processScriptFull(nowChatroom, reformatContent(beforeChat.data + mess), 'editoutput', msgIndex)
|
||||
}
|
||||
result = result2.data
|
||||
const inlayResult = runInlayScreen(currentChar, result)
|
||||
result = inlayResult.text
|
||||
emoChanged = result2.emoChanged
|
||||
if(i === 0 && arg.continue){
|
||||
db.characters[selectedChar].chats[selectedChat].message[msgIndex] = {
|
||||
@@ -949,6 +979,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
model: generationModel,
|
||||
generationId: generationId,
|
||||
}
|
||||
}
|
||||
if(inlayResult.promise){
|
||||
const p = await inlayResult.promise
|
||||
db.characters[selectedChar].chats[selectedChat].message[msgIndex].data = p
|
||||
}
|
||||
}
|
||||
else{
|
||||
@@ -962,6 +996,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
generationId: generationId,
|
||||
}
|
||||
})
|
||||
const ind = db.characters[selectedChar].chats[selectedChat].message.length - 1
|
||||
if(inlayResult.promise){
|
||||
const p = await inlayResult.promise
|
||||
db.characters[selectedChar].chats[selectedChat].message[ind].data = p
|
||||
}
|
||||
}
|
||||
db.characters[selectedChar].reloadKeys += 1
|
||||
await sayTTS(currentChar, result)
|
||||
@@ -1005,154 +1044,144 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
}
|
||||
}
|
||||
|
||||
if(currentChar.viewScreen === 'emotion' && (!emoChanged) && (abortSignal.aborted === false)){
|
||||
if(!currentChar.inlayViewScreen){
|
||||
if(currentChar.viewScreen === 'emotion' && (!emoChanged) && (abortSignal.aborted === false)){
|
||||
|
||||
let currentEmotion = currentChar.emotionImages
|
||||
let emotionList = currentEmotion.map((a) => {
|
||||
return a[0]
|
||||
})
|
||||
let charemotions = get(CharEmotion)
|
||||
|
||||
let tempEmotion = charemotions[currentChar.chaId]
|
||||
if(!tempEmotion){
|
||||
tempEmotion = []
|
||||
}
|
||||
if(tempEmotion.length > 4){
|
||||
tempEmotion.splice(0, 1)
|
||||
}
|
||||
|
||||
if(db.emotionProcesser === 'embedding'){
|
||||
const hypaProcesser = new HypaProcesser('MiniLM')
|
||||
await hypaProcesser.addText(emotionList.map((v) => 'emotion:' + v))
|
||||
let searched = (await hypaProcesser.similaritySearchScored(result)).map((v) => {
|
||||
v[0] = v[0].replace("emotion:",'')
|
||||
return v
|
||||
})
|
||||
|
||||
//give panaltys
|
||||
for(let i =0;i<tempEmotion.length;i++){
|
||||
const emo = tempEmotion[i]
|
||||
//give panalty index
|
||||
const index = searched.findIndex((v) => {
|
||||
return v[0] === emo[0]
|
||||
})
|
||||
|
||||
const modifier = ((5 - ((tempEmotion.length - (i + 1))))) / 200
|
||||
|
||||
if(index !== -1){
|
||||
searched[index][1] -= modifier
|
||||
}
|
||||
}
|
||||
|
||||
//make a sorted array by score
|
||||
const emoresult = searched.sort((a,b) => {
|
||||
return b[1] - a[1]
|
||||
}).map((v) => {
|
||||
return v[0]
|
||||
})
|
||||
|
||||
console.log(searched)
|
||||
|
||||
for(const emo of currentEmotion){
|
||||
if(emo[0] === emoresult[0]){
|
||||
const emos:[string, string,number] = [emo[0], emo[1], Date.now()]
|
||||
tempEmotion.push(emos)
|
||||
charemotions[currentChar.chaId] = tempEmotion
|
||||
CharEmotion.set(charemotions)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function shuffleArray(array:string[]) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
let emobias:{[key:number]:number} = {}
|
||||
|
||||
for(const emo of emotionList){
|
||||
const tokens = await tokenizeNum(emo)
|
||||
for(const token of tokens){
|
||||
emobias[token] = 10
|
||||
}
|
||||
}
|
||||
|
||||
for(let i =0;i<tempEmotion.length;i++){
|
||||
const emo = tempEmotion[i]
|
||||
|
||||
const tokens = await tokenizeNum(emo[0])
|
||||
const modifier = 20 - ((tempEmotion.length - (i + 1)) * (20/4))
|
||||
|
||||
for(const token of tokens){
|
||||
emobias[token] -= modifier
|
||||
if(emobias[token] < -100){
|
||||
emobias[token] = -100
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const promptbody:OpenAIChat[] = [
|
||||
{
|
||||
role:'system',
|
||||
content: `${db.emotionPrompt2 || "From the list below, choose a word that best represents a character's outfit description, action, or emotion in their dialogue. Prioritize selecting words related to outfit first, then action, and lastly emotion. Print out the chosen word."}\n\n list: ${shuffleArray(emotionList).join(', ')} \noutput only one word.`
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `"Good morning, Master! Is there anything I can do for you today?"`
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: 'happy'
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: result
|
||||
},
|
||||
]
|
||||
|
||||
const rq = await requestChatData({
|
||||
formated: promptbody,
|
||||
bias: emobias,
|
||||
currentChar: currentChar,
|
||||
temperature: 0.4,
|
||||
maxTokens: 30,
|
||||
}, 'submodel', abortSignal)
|
||||
|
||||
if(rq.type === 'fail' || rq.type === 'streaming' || rq.type === 'multiline'){
|
||||
if(abortSignal.aborted){
|
||||
return true
|
||||
}
|
||||
alertError(`${rq.result}`)
|
||||
return true
|
||||
}
|
||||
else{
|
||||
emotionList = currentEmotion.map((a) => {
|
||||
let currentEmotion = currentChar.emotionImages
|
||||
let emotionList = currentEmotion.map((a) => {
|
||||
return a[0]
|
||||
})
|
||||
try {
|
||||
const emotion:string = rq.result.replace(/ |\n/g,'').trim().toLocaleLowerCase()
|
||||
let emotionSelected = false
|
||||
let charemotions = get(CharEmotion)
|
||||
|
||||
let tempEmotion = charemotions[currentChar.chaId]
|
||||
if(!tempEmotion){
|
||||
tempEmotion = []
|
||||
}
|
||||
if(tempEmotion.length > 4){
|
||||
tempEmotion.splice(0, 1)
|
||||
}
|
||||
|
||||
if(db.emotionProcesser === 'embedding'){
|
||||
const hypaProcesser = new HypaProcesser('MiniLM')
|
||||
await hypaProcesser.addText(emotionList.map((v) => 'emotion:' + v))
|
||||
let searched = (await hypaProcesser.similaritySearchScored(result)).map((v) => {
|
||||
v[0] = v[0].replace("emotion:",'')
|
||||
return v
|
||||
})
|
||||
|
||||
//give panaltys
|
||||
for(let i =0;i<tempEmotion.length;i++){
|
||||
const emo = tempEmotion[i]
|
||||
//give panalty index
|
||||
const index = searched.findIndex((v) => {
|
||||
return v[0] === emo[0]
|
||||
})
|
||||
|
||||
const modifier = ((5 - ((tempEmotion.length - (i + 1))))) / 200
|
||||
|
||||
if(index !== -1){
|
||||
searched[index][1] -= modifier
|
||||
}
|
||||
}
|
||||
|
||||
//make a sorted array by score
|
||||
const emoresult = searched.sort((a,b) => {
|
||||
return b[1] - a[1]
|
||||
}).map((v) => {
|
||||
return v[0]
|
||||
})
|
||||
|
||||
console.log(searched)
|
||||
|
||||
for(const emo of currentEmotion){
|
||||
if(emo[0] === emotion){
|
||||
if(emo[0] === emoresult[0]){
|
||||
const emos:[string, string,number] = [emo[0], emo[1], Date.now()]
|
||||
tempEmotion.push(emos)
|
||||
charemotions[currentChar.chaId] = tempEmotion
|
||||
CharEmotion.set(charemotions)
|
||||
emotionSelected = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if(!emotionSelected){
|
||||
|
||||
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function shuffleArray(array:string[]) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
let emobias:{[key:number]:number} = {}
|
||||
|
||||
for(const emo of emotionList){
|
||||
const tokens = await tokenizeNum(emo)
|
||||
for(const token of tokens){
|
||||
emobias[token] = 10
|
||||
}
|
||||
}
|
||||
|
||||
for(let i =0;i<tempEmotion.length;i++){
|
||||
const emo = tempEmotion[i]
|
||||
|
||||
const tokens = await tokenizeNum(emo[0])
|
||||
const modifier = 20 - ((tempEmotion.length - (i + 1)) * (20/4))
|
||||
|
||||
for(const token of tokens){
|
||||
emobias[token] -= modifier
|
||||
if(emobias[token] < -100){
|
||||
emobias[token] = -100
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const promptbody:OpenAIChat[] = [
|
||||
{
|
||||
role:'system',
|
||||
content: `${db.emotionPrompt2 || "From the list below, choose a word that best represents a character's outfit description, action, or emotion in their dialogue. Prioritize selecting words related to outfit first, then action, and lastly emotion. Print out the chosen word."}\n\n list: ${shuffleArray(emotionList).join(', ')} \noutput only one word.`
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `"Good morning, Master! Is there anything I can do for you today?"`
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: 'happy'
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: result
|
||||
},
|
||||
]
|
||||
|
||||
const rq = await requestChatData({
|
||||
formated: promptbody,
|
||||
bias: emobias,
|
||||
currentChar: currentChar,
|
||||
temperature: 0.4,
|
||||
maxTokens: 30,
|
||||
}, 'submodel', abortSignal)
|
||||
|
||||
if(rq.type === 'fail' || rq.type === 'streaming' || rq.type === 'multiline'){
|
||||
if(abortSignal.aborted){
|
||||
return true
|
||||
}
|
||||
alertError(`${rq.result}`)
|
||||
return true
|
||||
}
|
||||
else{
|
||||
emotionList = currentEmotion.map((a) => {
|
||||
return a[0]
|
||||
})
|
||||
try {
|
||||
const emotion:string = rq.result.replace(/ |\n/g,'').trim().toLocaleLowerCase()
|
||||
let emotionSelected = false
|
||||
for(const emo of currentEmotion){
|
||||
if(emotion.includes(emo[0])){
|
||||
if(emo[0] === emotion){
|
||||
const emos:[string, string,number] = [emo[0], emo[1], Date.now()]
|
||||
tempEmotion.push(emos)
|
||||
charemotions[currentChar.chaId] = tempEmotion
|
||||
@@ -1161,47 +1190,55 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
break
|
||||
}
|
||||
}
|
||||
if(!emotionSelected){
|
||||
for(const emo of currentEmotion){
|
||||
if(emotion.includes(emo[0])){
|
||||
const emos:[string, string,number] = [emo[0], emo[1], Date.now()]
|
||||
tempEmotion.push(emos)
|
||||
charemotions[currentChar.chaId] = tempEmotion
|
||||
CharEmotion.set(charemotions)
|
||||
emotionSelected = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if(!emotionSelected && emotionList.includes('neutral')){
|
||||
const emo = currentEmotion[emotionList.indexOf('neutral')]
|
||||
const emos:[string, string,number] = [emo[0], emo[1], Date.now()]
|
||||
tempEmotion.push(emos)
|
||||
charemotions[currentChar.chaId] = tempEmotion
|
||||
CharEmotion.set(charemotions)
|
||||
emotionSelected = true
|
||||
}
|
||||
} catch (error) {
|
||||
alertError(language.errors.httpError + `${error}`)
|
||||
return true
|
||||
}
|
||||
if(!emotionSelected && emotionList.includes('neutral')){
|
||||
const emo = currentEmotion[emotionList.indexOf('neutral')]
|
||||
const emos:[string, string,number] = [emo[0], emo[1], Date.now()]
|
||||
tempEmotion.push(emos)
|
||||
charemotions[currentChar.chaId] = tempEmotion
|
||||
CharEmotion.set(charemotions)
|
||||
emotionSelected = true
|
||||
}
|
||||
|
||||
return true
|
||||
|
||||
|
||||
}
|
||||
else if(currentChar.viewScreen === 'imggen'){
|
||||
if(chatProcessIndex !== -1){
|
||||
alertError("Stable diffusion in group chat is not supported")
|
||||
}
|
||||
|
||||
const msgs = db.characters[selectedChar].chats[selectedChat].message
|
||||
let msgStr = ''
|
||||
for(let i = (msgs.length - 1);i>=0;i--){
|
||||
if(msgs[i].role === 'char'){
|
||||
msgStr = `character: ${msgs[i].data.replace(/\n/, ' ')} \n` + msgStr
|
||||
}
|
||||
else{
|
||||
msgStr = `user: ${msgs[i].data.replace(/\n/, ' ')} \n` + msgStr
|
||||
break
|
||||
}
|
||||
} catch (error) {
|
||||
alertError(language.errors.httpError + `${error}`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
|
||||
|
||||
}
|
||||
else if(currentChar.viewScreen === 'imggen'){
|
||||
if(chatProcessIndex !== -1){
|
||||
alertError("Stable diffusion in group chat is not supported")
|
||||
}
|
||||
|
||||
const msgs = db.characters[selectedChar].chats[selectedChat].message
|
||||
let msgStr = ''
|
||||
for(let i = (msgs.length - 1);i>=0;i--){
|
||||
if(msgs[i].role === 'char'){
|
||||
msgStr = `character: ${msgs[i].data.replace(/\n/, ' ')} \n` + msgStr
|
||||
}
|
||||
else{
|
||||
msgStr = `user: ${msgs[i].data.replace(/\n/, ' ')} \n` + msgStr
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const ch = await stableDiff(currentChar, msgStr)
|
||||
if(ch){
|
||||
db.characters[selectedChar].chats[selectedChat].sdData = ch
|
||||
setDatabase(db)
|
||||
await stableDiff(currentChar, msgStr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
92
src/ts/process/inlayScreen.ts
Normal file
92
src/ts/process/inlayScreen.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { writeInlayImage } from "../image";
|
||||
import type { character } from "../storage/database";
|
||||
import { generateAIImage } from "./stableDiff";
|
||||
|
||||
export function runInlayScreen(char:character, data:string):{text:string, promise?:Promise<string>} {
|
||||
if(char.inlayViewScreen){
|
||||
if(char.viewScreen === 'emotion'){
|
||||
return {text: data.replace(/<Emotion="(.+?)">/g, '{{emotion::$1}}')}
|
||||
}
|
||||
if(char.viewScreen === 'imggen'){
|
||||
return {
|
||||
text: data.replace(/<ImgGen="(.+?)">/g,'[Generating...]'),
|
||||
promise : (async () => {
|
||||
const promises:Promise<string|false>[] = [];
|
||||
const neg = char.newGenData.negative
|
||||
data.replace(/<ImgGen="(.+?)">/g, (match, p1) => {
|
||||
const prompt = char.newGenData.prompt.replaceAll('{{slot}}', p1)
|
||||
promises.push((async () => {
|
||||
const v = await generateAIImage(prompt, char, neg, 'inlay')
|
||||
if(!v){
|
||||
return ''
|
||||
}
|
||||
const imgHTML = new Image()
|
||||
imgHTML.src = v
|
||||
const inlay = await writeInlayImage(imgHTML)
|
||||
return inlay
|
||||
})())
|
||||
return match
|
||||
})
|
||||
const d = await Promise.all(promises)
|
||||
return data.replace(/<ImgGen="(.+?)">/g, () => {
|
||||
const result = d.shift()
|
||||
if(result === false){
|
||||
return ''
|
||||
}
|
||||
return result
|
||||
})
|
||||
})()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return {text: data}
|
||||
}
|
||||
|
||||
export function updateInlayScreen(char:character):character {
|
||||
switch(char.viewScreen){
|
||||
case 'emotion':
|
||||
if(char.inlayViewScreen){
|
||||
char.newGenData = {
|
||||
prompt: '',
|
||||
negative: '',
|
||||
instructions: '',
|
||||
emotionInstructions: `You must always output the character's emotional image as a command at the end of a conversation. The command must be selected from a given list, and it's better to have variety than to repeat images used in previous chats. Use one image, depending on the character's emotion. See the list below. Form: <Emotion="<image command>"> Example: <Emotion="Agree"> List of commands: {{slot}}`,
|
||||
}
|
||||
return char
|
||||
}
|
||||
char.newGenData = {
|
||||
prompt: '',
|
||||
negative: '',
|
||||
instructions: '',
|
||||
emotionInstructions: `You must always output the character's emotional image as a command. The command must be selected from a given list, only output the command, depending on the character's emotion. List of commands: {{slot}}`
|
||||
}
|
||||
return char
|
||||
case 'imggen':
|
||||
if(char.inlayViewScreen){
|
||||
char.newGenData = {
|
||||
prompt: 'best quality, {{slot}}',
|
||||
negative: 'worse quality',
|
||||
instructions: 'You must always output the character\'s image as a keyword-formatted prompts that can be used in stable diffusion at the end of a conversation. Use one image, depending on character, place, situation, etc. keyword should be long enough. Form: <ImgGen="<keyword-formatted prompt>">',
|
||||
emotionInstructions: ''
|
||||
}
|
||||
return char
|
||||
}
|
||||
char.newGenData = {
|
||||
prompt: 'best quality, {{slot}}',
|
||||
negative: 'worse quality',
|
||||
instructions: 'You must always output the character\'s image as a keyword-formatted prompts that can be used in stable diffusion. only output the that prompt, depending on character, place, situation, etc. keyword should be long enough.',
|
||||
emotionInstructions: ''
|
||||
}
|
||||
return char
|
||||
default:
|
||||
char.newGenData = {
|
||||
prompt: '',
|
||||
negative: '',
|
||||
instructions: '',
|
||||
emotionInstructions: ''
|
||||
}
|
||||
return char
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@ import { CharEmotion } from "../stores"
|
||||
import type { OpenAIChat } from "."
|
||||
import { processZip } from "./processzip"
|
||||
export async function stableDiff(currentChar:character,prompt:string){
|
||||
const mainPrompt = "assistant is a chat analyzer.\nuser will input a data of situation with key and values before chat, and a chat of a user and character.\nView the status of the chat and change the data.\nif data's key starts with $, it must change it every time.\nif data value is none, it must change it."
|
||||
let db = get(DataBase)
|
||||
|
||||
if(db.sdProvider === ''){
|
||||
@@ -15,53 +14,14 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
return false
|
||||
}
|
||||
|
||||
let proompt = 'Data:'
|
||||
|
||||
let currentSd:[string,string][] = []
|
||||
|
||||
const sdData = currentChar.chats[currentChar.chatPage].sdData
|
||||
if(sdData){
|
||||
const das = sdData.split('\n')
|
||||
for(const data of das){
|
||||
const splited = data.split(':::')
|
||||
currentSd.push([splited[0].trim(), splited[1].trim()])
|
||||
}
|
||||
}
|
||||
else{
|
||||
currentSd = JSON.parse(JSON.stringify(currentChar.sdData))
|
||||
}
|
||||
|
||||
for(const d of currentSd){
|
||||
let val = d[1].trim()
|
||||
if(val === ''){
|
||||
val = 'none'
|
||||
}
|
||||
|
||||
if(!d[0].startsWith('|') || d[0] === 'negative' || d[0] === 'always'){
|
||||
proompt += `\n${d[0].trim()}: ${val}`
|
||||
}
|
||||
}
|
||||
|
||||
proompt += `\n\nChat:\n${prompt}`
|
||||
const proompt = `Chat:\n${prompt}`
|
||||
|
||||
const promptbody:OpenAIChat[] = [
|
||||
{
|
||||
|
||||
role:'system',
|
||||
content: mainPrompt
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `Data:\ncharacter's appearance: red hair, cute, black eyes\ncurrent situation: none\n$character's pose: none\n$character's emotion: none\n\nChat:\nuser: *eats breakfeast* \n I'm ready.\ncharacter: Lemon waits patiently outside your room while you get ready. Once you are dressed and have finished your breakfast, she escorts you to the door.\n"Have a good day at school, Master. Don't forget to study hard and make the most of your time there," Lemon reminds you with a smile as she sees you off.`
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: "character's appearance: red hair, cute, black eyes\ncurrent situation: waking up in the morning\n$character's pose: standing\n$character's emotion: apologetic"
|
||||
},
|
||||
{
|
||||
|
||||
role:'system',
|
||||
content: mainPrompt
|
||||
content: currentChar.newGenData.instructions
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
@@ -69,7 +29,6 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
},
|
||||
]
|
||||
|
||||
console.log(proompt)
|
||||
const rq = await requestChatData({
|
||||
formated: promptbody,
|
||||
currentChar: currentChar,
|
||||
@@ -83,38 +42,20 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
alertError(`${rq.result}`)
|
||||
return false
|
||||
}
|
||||
else{
|
||||
const res = rq.result
|
||||
const das = res.split('\n')
|
||||
for(const data of das){
|
||||
const splited = data.split(':')
|
||||
if(splited.length === 2){
|
||||
for(let i=0;i<currentSd.length;i++){
|
||||
if(currentSd[i][0].trim() === splited[0]){
|
||||
currentSd[i][1] = splited[1].trim()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let returnSdData = currentSd.map((val) => {
|
||||
return val.join(':::')
|
||||
}).join('\n')
|
||||
const r = rq.result
|
||||
|
||||
|
||||
const genPrompt = currentChar.newGenData.prompt.replaceAll('{{slot}}', r)
|
||||
const neg = currentChar.newGenData.negative
|
||||
|
||||
return await generateAIImage(genPrompt, currentChar, neg, '')
|
||||
}
|
||||
|
||||
export async function generateAIImage(genPrompt:string, currentChar:character, neg:string, returnSdData:string){
|
||||
const db = get(DataBase)
|
||||
if(db.sdProvider === 'webui'){
|
||||
|
||||
let prompts:string[] = []
|
||||
let neg = ''
|
||||
for(let i=0;i<currentSd.length;i++){
|
||||
if(currentSd[i][0] !== 'negative'){
|
||||
prompts.push(currentSd[i][1])
|
||||
}
|
||||
else{
|
||||
neg = currentSd[i][1]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const uri = new URL(db.webUiUrl)
|
||||
uri.pathname = '/sdapi/v1/txt2img'
|
||||
@@ -126,7 +67,7 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
"seed": -1,
|
||||
"steps": db.sdSteps,
|
||||
"cfg_scale": db.sdCFG,
|
||||
"prompt": prompts.join(','),
|
||||
"prompt": genPrompt,
|
||||
"negative_prompt": neg,
|
||||
"sampler_name": db.sdConfig.sampler_name,
|
||||
"enable_hr": db.sdConfig.enable_hr,
|
||||
@@ -139,6 +80,16 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
}
|
||||
})
|
||||
|
||||
if(returnSdData === 'inlay'){
|
||||
if(da.ok){
|
||||
return `data:image/png;base64,${da.data.images[0]}`
|
||||
}
|
||||
else{
|
||||
alertError(JSON.stringify(da.data))
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
if(da.ok){
|
||||
let charemotions = get(CharEmotion)
|
||||
const img = `data:image/png;base64,${da.data.images[0]}`
|
||||
@@ -162,19 +113,6 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
}
|
||||
if(db.sdProvider === 'novelai'){
|
||||
|
||||
let prompts:string[] = []
|
||||
let neg = ''
|
||||
for(let i=0;i<currentSd.length;i++){
|
||||
if(currentSd[i][0] !== 'negative'){
|
||||
prompts.push(currentSd[i][1])
|
||||
}
|
||||
else{
|
||||
neg = currentSd[i][1]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
let reqlist= {}
|
||||
|
||||
if(db.NAII2I){
|
||||
@@ -192,7 +130,7 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
reqlist = {
|
||||
body: {
|
||||
"action": "img2img",
|
||||
"input": prompts.join(','),
|
||||
"input": genPrompt,
|
||||
"model": db.NAIImgModel,
|
||||
"parameters": {
|
||||
"seed": seed,
|
||||
@@ -226,7 +164,7 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
}else{
|
||||
reqlist = {
|
||||
body: {
|
||||
"input": prompts.join(','),
|
||||
"input": genPrompt,
|
||||
"model": db.NAIImgModel,
|
||||
"parameters": {
|
||||
"width": db.NAIImgConfig.width,
|
||||
@@ -249,6 +187,17 @@ export async function stableDiff(currentChar:character,prompt:string){
|
||||
try {
|
||||
const da = await globalFetch(db.NAIImgUrl, reqlist)
|
||||
|
||||
if(returnSdData === 'inlay'){
|
||||
if(da.ok){
|
||||
const img = await processZip(da.data);
|
||||
return img
|
||||
}
|
||||
else{
|
||||
alertError(Buffer.from(da.data).toString())
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
if(da.ok){
|
||||
let charemotions = get(CharEmotion)
|
||||
const img = await processZip(da.data);
|
||||
|
||||
@@ -588,6 +588,12 @@ export interface character{
|
||||
globalLore: loreBook[]
|
||||
chaId: string
|
||||
sdData: [string, string][]
|
||||
newGenData?: {
|
||||
prompt: string,
|
||||
negative: string,
|
||||
instructions: string,
|
||||
emotionInstructions: string,
|
||||
}
|
||||
customscript: customscript[]
|
||||
triggerscript: triggerscript[]
|
||||
utilityBot: boolean
|
||||
@@ -641,6 +647,7 @@ export interface character{
|
||||
extentions?:{[key:string]:any}
|
||||
largePortrait?:boolean
|
||||
lorePlus?:boolean
|
||||
inlayViewScreen?:boolean
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ function createSimpleCharacter(char:character|groupChat){
|
||||
chaId: char.chaId,
|
||||
additionalAssets: char.additionalAssets,
|
||||
virtualscript: char.virtualscript,
|
||||
emotionImages: char.emotionImages,
|
||||
}
|
||||
|
||||
return simpleChar
|
||||
@@ -71,7 +72,9 @@ function updateCurrentCharacter(){
|
||||
if(isEqual(gotCharacter, currentChar)){
|
||||
return
|
||||
}
|
||||
ShowVN.set(currentChar?.viewScreen === 'vn')
|
||||
if((currentChar?.viewScreen === 'vn') !== get(ShowVN)){
|
||||
ShowVN.set(currentChar?.viewScreen === 'vn')
|
||||
}
|
||||
|
||||
console.log("Character updated")
|
||||
CurrentCharacter.set(cloneDeep(currentChar))
|
||||
|
||||
Reference in New Issue
Block a user