Add chat process indication mark

This commit is contained in:
kwaroran
2024-04-11 04:43:56 +09:00
parent 746d226723
commit 3752b4e550
2 changed files with 33 additions and 3 deletions

View File

@@ -6,7 +6,7 @@
import Chat from "./Chat.svelte";
import { DataBase, type Message, type character, type groupChat } from "../../ts/storage/database";
import { getCharImage } from "../../ts/characters";
import { doingChat, sendChat } from "../../ts/process/index";
import { chatProcessStage, doingChat, sendChat } from "../../ts/process/index";
import { findCharacterbyId, messageForm, sleep } from "../../ts/util";
import { language } from "../../lang";
import { isExpTranslator, translate } from "../../ts/translator/translator";
@@ -439,7 +439,7 @@
{#if $doingChat || doingChatInputTranslate}
<div
class="mr-2 bg-selected flex justify-center items-center text-gray-100 w-12 h-12 rounded-md hover:bg-green-500 transition-colors" on:click={abortChat}>
<div class="loadmove" class:autoload={autoMode}>
<div class="loadmove chat-process-stage-{$chatProcessStage}" class:autoload={autoMode}>
</div>
</div>
{:else}
@@ -731,6 +731,26 @@
border-left: 0.4rem solid var(--risu-theme-borderc);
}
.chat-process-stage-1{
border-top: 0.4rem solid #60a5fa;
border-left: 0.4rem solid #60a5fa;
}
.chat-process-stage-2{
border-top: 0.4rem solid #db2777;
border-left: 0.4rem solid #db2777;
}
.chat-process-stage-3{
border-top: 0.4rem solid #34d399;
border-left: 0.4rem solid #34d399;
}
.chat-process-stage-4{
border-top: 0.4rem solid #8b5cf6;
border-left: 0.4rem solid #8b5cf6;
}
.autoload{
border-top: 0.4rem solid #10b981;
border-left: 0.4rem solid #10b981;

View File

@@ -52,11 +52,12 @@ export interface OpenAIChatFull extends OpenAIChat{
}
export const doingChat = writable(false)
export const chatProcessStage = writable(0)
export const abortChat = writable(false)
export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:number,signal?:AbortSignal,continue?:boolean} = {}):Promise<boolean> {
chatProcessStage.set(0)
const abortSignal = arg.signal ?? (new AbortController()).signal
let isAborted = false
@@ -185,6 +186,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
chatProcessStage.set(1)
let unformated = {
'main':([] as OpenAIChat[]),
'jailbreak':([] as OpenAIChat[]),
@@ -641,6 +643,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
if(nowChatroom.supaMemory && db.supaMemoryType !== 'none'){
chatProcessStage.set(2)
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer, {
asHyper: db.supaMemoryType !== 'subModel' && db.hypaMemory
})
@@ -651,7 +654,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
chats = sp.chats
currentTokens = sp.currentTokens
currentChat.supaMemoryData = sp.memory ?? currentChat.supaMemoryData
db.characters[selectedChar].chats[selectedChat].supaMemoryData = currentChat.supaMemoryData
console.log(currentChat.supaMemoryData)
DataBase.set(db)
currentChat.lastMemory = sp.lastId ?? currentChat.lastMemory
chatProcessStage.set(1)
}
else{
while(currentTokens > maxContextTokens){
@@ -961,6 +968,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
outputTokens: outputTokens,
maxContext: maxContextTokens,
}
chatProcessStage.set(3)
const req = await requestChatData({
formated: formated,
biasString: biases,
@@ -1120,6 +1128,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
}
chatProcessStage.set(4)
sendPeerChar()
if(req.special){