Add generation info viewer

This commit is contained in:
kwaroran
2024-03-19 14:50:48 +09:00
parent eddd7afa3c
commit 31856a9c48
8 changed files with 185 additions and 64 deletions

View File

@@ -28,6 +28,8 @@ export const languageEnglish = {
networkFetch: "This happens when the network is unstable or the server is down.",
networkFetchWeb: "This can be a CORS error. this only happens when using web version dude to limitations of the browser. try using desktop local version, or other version of RisuAI.",
networkFetchPlain: "This can be a plain fetch error. try disabling force plain fetch option in settings.",
requestLogRemoved: "This request log is removed.",
requestLogRemovedDesc: "This request log removes when client is refreshed or reloaded.",
},
showHelp: "Show Help",
help:{
@@ -541,4 +543,7 @@ export const languageEnglish = {
enable: "Enable",
postFile: "Post File",
requestInfoInsideChat: "Show Request Info Inside Chat",
inputTokens: "Input Tokens",
outputTokens: "Output Tokens",
tokenWarning: "Token caculation can be inaccurate. it is recommended to use it as a reference.",
}

View File

@@ -2,7 +2,7 @@
import { ArrowLeft, ArrowRight, PencilIcon, LanguagesIcon, RefreshCcwIcon, TrashIcon, CopyIcon, Volume2Icon, BotIcon } from "lucide-svelte";
import { ParseMarkdown, type simpleCharacterArgument } from "../../ts/parser";
import AutoresizeArea from "../UI/GUI/TextAreaResizable.svelte";
import { alertConfirm, alertError } from "../../ts/alert";
import { alertConfirm, alertError, alertRequestData } from "../../ts/alert";
import { language } from "../../lang";
import { DataBase, type MessageGenerationInfo } from "../../ts/storage/database";
import { CurrentCharacter, CurrentChat } from "../../ts/stores";
@@ -205,6 +205,9 @@
<div>
<button class="text-sm p-1 text-textcolor2 border-darkborderc float-end mr-2 my-2
hover:ring-borderc hover:ring rounded-md hover:text-textcolor transition-all flex justify-center items-center"
on:click={() => {
alertRequestData(MessageGenerationInfo)
}}
>
<BotIcon size={20} />
<span class="ml-1">

View File

@@ -1,6 +1,5 @@
<script>
import { onMount } from 'svelte';
import { alertStore } from "../../ts/alert";
<script lang="ts">
import { alertStore, alertGenerationInfoStore } from "../../ts/alert";
import { DataBase } from '../../ts/storage/database';
import { getCharImage } from '../../ts/characters';
import { ParseMarkdown } from '../../ts/parser';
@@ -15,11 +14,13 @@
import { CCLicenseData } from "src/ts/creation/license";
import OptionInput from "../UI/GUI/OptionInput.svelte";
import { language } from 'src/lang';
import { getFetchData } from 'src/ts/storage/globalApi';
let btn
let input = ''
let cardExportType = ''
let cardExportPassword = ''
let cardLicense = ''
let generationInfoMenuIndex = 0
$: (() => {
if(btn){
btn.focus()
@@ -29,6 +30,14 @@
}
})()
const beautifyJSON = (data:string) =>{
try {
return JSON.stringify(JSON.parse(data), null, 2)
} catch (error) {
return data
}
}
</script>
<svelte:window on:message={async (e) => {
@@ -66,7 +75,7 @@
<div class="text-textcolor">You should accept RisuRealm's <a class="text-green-600 hover:text-green-500 transition-colors duration-200 cursor-pointer" on:click={() => {
openURL('https://sv.risuai.xyz/hub/tos')
}}>Terms of Service</a> to continue</div>
{:else if $alertStore.type !== 'select'}
{:else if $alertStore.type !== 'select' && $alertStore.type !== 'requestdata'}
<span class="text-gray-300">{$alertStore.msg}</span>
{#if $alertStore.submsg}
<span class="text-gray-500 text-sm">{$alertStore.submsg}</span>
@@ -162,6 +171,60 @@
{/if}
{/each}
</div>
{:else if $alertStore.type === 'requestdata'}
<div class="flex flex-wrap gap-2">
<Button selected={generationInfoMenuIndex === 0} size="sm" on:click={() => {generationInfoMenuIndex = 0}}>
Tokens
</Button>
<Button selected={generationInfoMenuIndex === 2} size="sm" on:click={() => {generationInfoMenuIndex = 2}}>
Log
</Button>
<button class="ml-auto" on:click={() => {
alertStore.set({
type: 'none',
msg: ''
})
}}>✖</button>
</div>
{#if generationInfoMenuIndex === 0}
<div class="mt-4 flex justify-center w-full">
<div class="w-32 h-32 border-darkborderc border-4 rounded-lg" style:background={
`linear-gradient(0deg,
rgb(59,130,246) 0%,
rgb(59,130,246) ${($alertGenerationInfoStore.inputTokens / $alertGenerationInfoStore.maxContext) * 100}%,
rgb(34 197 94) ${($alertGenerationInfoStore.inputTokens / $alertGenerationInfoStore.maxContext) * 100}%,
rgb(34 197 94) ${(($alertGenerationInfoStore.outputTokens + $alertGenerationInfoStore.inputTokens) / $alertGenerationInfoStore.maxContext) * 100}%,
rgb(156 163 175) ${(($alertGenerationInfoStore.outputTokens + $alertGenerationInfoStore.inputTokens) / $alertGenerationInfoStore.maxContext) * 100}%,
rgb(156 163 175) 100%)`
}>
</div>
</div>
<div class="grid grid-cols-2 gap-y-2 gap-x-4 mt-4">
<span class="text-blue-500">{language.inputTokens}</span>
<span class="text-blue-500 justify-self-end">{$alertGenerationInfoStore.inputTokens ?? '?'} {language.tokens}</span>
<span class="text-green-500">{language.outputTokens}</span>
<span class="text-green-500 justify-self-end">{$alertGenerationInfoStore.outputTokens ?? '?'} {language.tokens}</span>
<span class="text-gray-400">{language.maxContextSize}</span>
<span class="text-gray-400 justify-self-end">{$alertGenerationInfoStore.maxContext ?? '?'} {language.tokens}</span>
</div>
<span class="text-textcolor2 text-sm">{language.tokenWarning}</span>
{/if}
{#if generationInfoMenuIndex === 2}
{#await getFetchData($alertStore.msg) then data}
{#if !data}
<span class="text-gray-300 text-lg mt-2">{language.errors.requestLogRemoved}</span>
<span class="text-gray-500">{language.errors.requestLogRemovedDesc}</span>
{:else}
<h1 class="text-2xl font-bold my-4">URL</h1>
<code class="text-gray-300 border border-darkborderc p-2 rounded-md whitespace-pre-wrap">{data.url}</code>
<h1 class="text-2xl font-bold my-4">Request Body</h1>
<code class="text-gray-300 border border-darkborderc p-2 rounded-md whitespace-pre-wrap">{beautifyJSON(data.body)}</code>
<h1 class="text-2xl font-bold my-4">Response</h1>
<code class="text-gray-300 border border-darkborderc p-2 rounded-md whitespace-pre-wrap">{beautifyJSON(data.response)}</code>
{/if}
{/await}
{/if}
{/if}
</div>
</div>

View File

@@ -3,10 +3,10 @@ import { sleep } from "./util"
import { language } from "../lang"
import { isNodeServer, isTauri } from "./storage/globalApi"
import { Capacitor } from "@capacitor/core"
import { DataBase } from "./storage/database"
import { DataBase, type MessageGenerationInfo } from "./storage/database"
interface alertData{
type: 'error'| 'normal'|'none'|'ask'|'wait'|'selectChar'|'input'|'toast'|'wait2'|'markdown'|'select'|'login'|'tos'|'cardexport'
type: 'error'| 'normal'|'none'|'ask'|'wait'|'selectChar'|'input'|'toast'|'wait2'|'markdown'|'select'|'login'|'tos'|'cardexport'|'requestdata'
msg: string,
submsg?: string
}
@@ -16,6 +16,7 @@ export const alertStore = writable({
type: 'none',
msg: 'n',
} as alertData)
export const alertGenerationInfoStore = writable<MessageGenerationInfo>(null)
export function alertError(msg:string){
console.error(msg)
@@ -216,4 +217,12 @@ export async function alertInput(msg:string){
}
return get(alertStore).msg
}
export function alertRequestData(info:MessageGenerationInfo){
alertGenerationInfoStore.set(info)
alertStore.set({
'type': 'requestdata',
'msg': info.generationId ?? 'none'
})
}

View File

@@ -1,5 +1,5 @@
import { get, writable } from "svelte/store";
import { DataBase, setDatabase, type character } from "../storage/database";
import { DataBase, setDatabase, type character, type MessageGenerationInfo } from "../storage/database";
import { CharEmotion, selectedCharID } from "../stores";
import { ChatTokenizer, tokenize, tokenizeNum } from "../tokenizer";
import { language } from "../../lang";
@@ -907,32 +907,46 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: formated
})
{
//token rechecking
let tokens = 0
for(const chat of formated){
tokens += await tokenizer.tokenizeChat(chat)
}
//token rechecking
let inputTokens = 0
if(tokens > maxContextTokens){
let pointer = 0
while(tokens > maxContextTokens){
if(pointer >= formated.length){
alertError(language.errors.toomuchtoken + "\n\nAt token rechecking. Required Tokens: " + tokens)
return false
}
if(formated[pointer].removable){
tokens -= await tokenizer.tokenizeChat(formated[pointer])
formated[pointer].content = ''
}
pointer++
for(const chat of formated){
inputTokens += await tokenizer.tokenizeChat(chat)
}
if(inputTokens > maxContextTokens){
let pointer = 0
while(inputTokens > maxContextTokens){
if(pointer >= formated.length){
alertError(language.errors.toomuchtoken + "\n\nAt token rechecking. Required Tokens: " + inputTokens)
return false
}
formated = formated.filter((v) => {
return v.content !== ''
})
}
if(formated[pointer].removable){
inputTokens -= await tokenizer.tokenizeChat(formated[pointer])
formated[pointer].content = ''
}
pointer++
}
formated = formated.filter((v) => {
return v.content !== ''
})
}
//estimate tokens
let outputTokens = db.maxResponse
if(inputTokens + outputTokens > maxContextTokens){
outputTokens = maxContextTokens - inputTokens
}
const generationId = v4()
const generationModel = getGenerationModelString()
const generationInfo:MessageGenerationInfo = {
model: generationModel,
generationId: generationId,
inputTokens: inputTokens,
outputTokens: outputTokens,
maxContext: maxContextTokens,
}
const req = await requestChatData({
formated: formated,
biasString: biases,
@@ -941,6 +955,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
isGroupChat: nowChatroom.type === 'group',
bias: {},
continue: arg.continue,
chatId: generationId
}, 'model', abortSignal)
let result = ''
@@ -949,8 +964,6 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
if(abortSignal.aborted === true){
return false
}
const generationId = v4()
const generationModel = getGenerationModelString()
if(req.type === 'fail'){
alertError(req.result)
return false
@@ -969,10 +982,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: "",
saying: currentChar.chaId,
time: Date.now(),
generationInfo: {
model: generationModel,
generationId: generationId,
}
generationInfo,
})
}
db.characters[selectedChar].chats[selectedChat].isStreaming = true
@@ -1051,10 +1061,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: result,
saying: currentChar.chaId,
time: Date.now(),
generationInfo: {
model: generationModel,
generationId: generationId,
}
generationInfo
}
if(inlayResult.promise){
const p = await inlayResult.promise
@@ -1067,10 +1074,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: result,
saying: currentChar.chaId,
time: Date.now(),
generationInfo: {
model: generationModel,
generationId: generationId,
}
generationInfo
})
const ind = db.characters[selectedChar].chats[selectedChat].message.length - 1
if(inlayResult.promise){

View File

@@ -39,6 +39,7 @@ interface requestDataArgument{
isGroupChat?:boolean
useEmotion?:boolean
continue?:boolean
chatId?:string
}
type requestDataResponse = {
@@ -360,6 +361,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"Authorization": "Bearer " + db.mistralKey,
},
abortSignal,
chatId: arg.chatId
})
const dat = res.data as any
@@ -526,7 +528,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
body: JSON.stringify(body),
method: "POST",
headers: headers,
signal: abortSignal
signal: abortSignal,
chatId: arg.chatId
})
if(da.status !== 200){
@@ -607,7 +610,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
body: body,
headers: headers,
abortSignal,
useRisuToken:throughProxi
useRisuToken:throughProxi,
chatId: arg.chatId
})
const dat = res.data as any
@@ -727,7 +731,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
headers: {
"Authorization": "Bearer " + db.novelai.token
},
abortSignal
abortSignal,
chatId: arg.chatId
})
if((!da.ok )|| (!da.data.output)){
@@ -775,8 +780,9 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
},
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + db.openAIKey
"Authorization": "Bearer " + db.openAIKey,
},
chatId: arg.chatId
});
if(!response.ok){
@@ -883,7 +889,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const res = await globalFetch(blockingUrl, {
body: bodyTemplate,
headers: headers,
abortSignal
abortSignal,
chatId: arg.chatId
})
const dat = res.data as any
@@ -947,6 +954,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const response = await globalFetch(urlStr, {
body: bodyTemplate,
chatId: arg.chatId
})
if(!response.ok){
@@ -1024,7 +1032,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"Content-Type": "application/json",
"Authorization": "Bearer " + db.google.accessToken
},
abortSignal
abortSignal,
chatId: arg.chatId
})
if(res.ok){
console.log(res.data)
@@ -1215,6 +1224,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const res = await globalFetch(url, {
headers: headers,
body: body,
chatId: arg.chatId
})
if(!res.ok){
@@ -1276,7 +1286,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
headers: {
"content-type": "application/json",
},
abortSignal
abortSignal,
chatId: arg.chatId
})
if(!da.ok){
@@ -1329,7 +1340,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const response = await globalFetch(api_server_url + '/api', {
method: 'POST',
headers: headers,
body: send_body
body: send_body,
chatId: arg.chatId
});
if(!response.ok){
@@ -1645,7 +1657,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
method: "POST",
body: params,
headers: signed.headers,
plainFetchForce: true
plainFetchForce: true,
chatId: arg.chatId
})
if(!res.ok){
@@ -1677,7 +1690,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"anthropic-version": "2023-06-01",
"accept": "application/json",
},
method: "POST"
method: "POST",
chatId: arg.chatId
})
if(res.status !== 200){
@@ -1744,7 +1758,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"anthropic-version": "2023-06-01",
"accept": "application/json"
},
method: "POST"
method: "POST",
chatId: arg.chatId
})
if(!res.ok){
@@ -1906,7 +1921,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
method: "POST",
body: params,
headers: signed.headers,
plainFetchForce: true
plainFetchForce: true,
chatId: arg.chatId
})
@@ -1940,7 +1956,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"anthropic-version": "2023-06-01",
"accept": "application/json"
},
useRisuToken: aiModel === 'reverse_proxy'
useRisuToken: aiModel === 'reverse_proxy',
chatId: arg.chatId
})
if((!da.ok) || (da.data.error)){

View File

@@ -870,6 +870,9 @@ export interface Message{
export interface MessageGenerationInfo{
model?: string
generationId?: string
inputTokens?: number
outputTokens?: number
maxContext?: number
}
interface AINsettings{

View File

@@ -44,6 +44,7 @@ interface fetchLog{
date:string
url:string
responseType?:string
chatId?:string
}
let fetchLog:fetchLog[] = []
@@ -494,7 +495,8 @@ export function addFetchLog(arg:{
response:any,
success:boolean,
url:string,
resType?:string
resType?:string,
chatId?:string
}){
fetchLog.unshift({
body: typeof(arg.body) === 'string' ? arg.body : JSON.stringify(arg.body, null, 2),
@@ -503,11 +505,21 @@ export function addFetchLog(arg:{
responseType: arg.resType ?? 'json',
success: arg.success,
date: (new Date()).toLocaleTimeString(),
url: arg.url
url: arg.url,
chatId: arg.chatId
})
return fetchLog.length - 1
}
export async function getFetchData(id:string) {
for(const log of fetchLog){
if(log.chatId === id){
return log
}
}
return null
}
export async function globalFetch(url:string, arg:{
plainFetchForce?:boolean,
body?:any,
@@ -515,7 +527,8 @@ export async function globalFetch(url:string, arg:{
rawResponse?:boolean,
method?:"POST"|"GET",
abortSignal?:AbortSignal,
useRisuToken?:boolean
useRisuToken?:boolean,
chatId?:string
} = {}): Promise<{
ok: boolean;
data: any;
@@ -542,7 +555,8 @@ export async function globalFetch(url:string, arg:{
response: JSON.stringify(response, null, 2),
success: success,
date: (new Date()).toLocaleTimeString(),
url: url
url: url,
chatId: arg.chatId
})
}
catch{
@@ -552,7 +566,8 @@ export async function globalFetch(url:string, arg:{
response: `${response}`,
success: success,
date: (new Date()).toLocaleTimeString(),
url: url
url: url,
chatId: arg.chatId
})
}
}
@@ -1397,7 +1412,8 @@ export async function fetchNative(url:string, arg:{
headers?:{[key:string]:string},
method?:"POST",
signal?:AbortSignal,
useRisuTk?:boolean
useRisuTk?:boolean,
chatId?:string
}):Promise<{ body: ReadableStream<Uint8Array>; headers: Headers; status: number }> {
let headers = arg.headers ?? {}
const db = get(DataBase)
@@ -1408,7 +1424,8 @@ export async function fetchNative(url:string, arg:{
response: 'Streamed Fetch',
success: true,
url: url,
resType: 'stream'
resType: 'stream',
chatId: arg.chatId
})
if(isTauri || Capacitor.isNativePlatform()){
fetchIndex++