diff --git a/src/lang/en.ts b/src/lang/en.ts
index 0c8a930f..68de0f3d 100644
--- a/src/lang/en.ts
+++ b/src/lang/en.ts
@@ -28,6 +28,8 @@ export const languageEnglish = {
networkFetch: "This happens when the network is unstable or the server is down.",
networkFetchWeb: "This can be a CORS error. this only happens when using web version dude to limitations of the browser. try using desktop local version, or other version of RisuAI.",
networkFetchPlain: "This can be a plain fetch error. try disabling force plain fetch option in settings.",
+ requestLogRemoved: "This request log is removed.",
+ requestLogRemovedDesc: "This request log removes when client is refreshed or reloaded.",
},
showHelp: "Show Help",
help:{
@@ -541,4 +543,7 @@ export const languageEnglish = {
enable: "Enable",
postFile: "Post File",
requestInfoInsideChat: "Show Request Info Inside Chat",
+ inputTokens: "Input Tokens",
+ outputTokens: "Output Tokens",
+ tokenWarning: "Token caculation can be inaccurate. it is recommended to use it as a reference.",
}
\ No newline at end of file
diff --git a/src/lib/ChatScreens/Chat.svelte b/src/lib/ChatScreens/Chat.svelte
index ea79480d..028ea97d 100644
--- a/src/lib/ChatScreens/Chat.svelte
+++ b/src/lib/ChatScreens/Chat.svelte
@@ -2,7 +2,7 @@
import { ArrowLeft, ArrowRight, PencilIcon, LanguagesIcon, RefreshCcwIcon, TrashIcon, CopyIcon, Volume2Icon, BotIcon } from "lucide-svelte";
import { ParseMarkdown, type simpleCharacterArgument } from "../../ts/parser";
import AutoresizeArea from "../UI/GUI/TextAreaResizable.svelte";
- import { alertConfirm, alertError } from "../../ts/alert";
+ import { alertConfirm, alertError, alertRequestData } from "../../ts/alert";
import { language } from "../../lang";
import { DataBase, type MessageGenerationInfo } from "../../ts/storage/database";
import { CurrentCharacter, CurrentChat } from "../../ts/stores";
@@ -205,6 +205,9 @@
{
+ alertRequestData(MessageGenerationInfo)
+ }}
>
diff --git a/src/lib/Others/AlertComp.svelte b/src/lib/Others/AlertComp.svelte
index e69c1790..c00f37e6 100644
--- a/src/lib/Others/AlertComp.svelte
+++ b/src/lib/Others/AlertComp.svelte
@@ -1,6 +1,5 @@
-
{
@@ -66,7 +75,7 @@
- {:else if $alertStore.type !== 'select'}
+ {:else if $alertStore.type !== 'select' && $alertStore.type !== 'requestdata'}
{$alertStore.msg}
{#if $alertStore.submsg}
{$alertStore.submsg}
@@ -162,6 +171,60 @@
{/if}
{/each}
+ {:else if $alertStore.type === 'requestdata'}
+
+ {generationInfoMenuIndex = 0}}>
+ Tokens
+
+ {generationInfoMenuIndex = 2}}>
+ Log
+
+ {
+ alertStore.set({
+ type: 'none',
+ msg: ''
+ })
+ }}>✖
+
+ {#if generationInfoMenuIndex === 0}
+
+
+ {language.inputTokens}
+ {$alertGenerationInfoStore.inputTokens ?? '?'} {language.tokens}
+ {language.outputTokens}
+ {$alertGenerationInfoStore.outputTokens ?? '?'} {language.tokens}
+ {language.maxContextSize}
+ {$alertGenerationInfoStore.maxContext ?? '?'} {language.tokens}
+
+ {language.tokenWarning}
+ {/if}
+ {#if generationInfoMenuIndex === 2}
+ {#await getFetchData($alertStore.msg) then data}
+ {#if !data}
+ {language.errors.requestLogRemoved}
+ {language.errors.requestLogRemovedDesc}
+ {:else}
+ URL
+ {data.url}
+ Request Body
+ {beautifyJSON(data.body)}
+ Response
+ {beautifyJSON(data.response)}
+ {/if}
+ {/await}
+ {/if}
{/if}
diff --git a/src/ts/alert.ts b/src/ts/alert.ts
index 479f9468..84526c87 100644
--- a/src/ts/alert.ts
+++ b/src/ts/alert.ts
@@ -3,10 +3,10 @@ import { sleep } from "./util"
import { language } from "../lang"
import { isNodeServer, isTauri } from "./storage/globalApi"
import { Capacitor } from "@capacitor/core"
-import { DataBase } from "./storage/database"
+import { DataBase, type MessageGenerationInfo } from "./storage/database"
interface alertData{
- type: 'error'| 'normal'|'none'|'ask'|'wait'|'selectChar'|'input'|'toast'|'wait2'|'markdown'|'select'|'login'|'tos'|'cardexport'
+ type: 'error'| 'normal'|'none'|'ask'|'wait'|'selectChar'|'input'|'toast'|'wait2'|'markdown'|'select'|'login'|'tos'|'cardexport'|'requestdata'
msg: string,
submsg?: string
}
@@ -16,6 +16,7 @@ export const alertStore = writable({
type: 'none',
msg: 'n',
} as alertData)
+export const alertGenerationInfoStore = writable(null)
export function alertError(msg:string){
console.error(msg)
@@ -216,4 +217,12 @@ export async function alertInput(msg:string){
}
return get(alertStore).msg
+}
+
+export function alertRequestData(info:MessageGenerationInfo){
+ alertGenerationInfoStore.set(info)
+ alertStore.set({
+ 'type': 'requestdata',
+ 'msg': info.generationId ?? 'none'
+ })
}
\ No newline at end of file
diff --git a/src/ts/process/index.ts b/src/ts/process/index.ts
index 07d1cf89..9dd7ec31 100644
--- a/src/ts/process/index.ts
+++ b/src/ts/process/index.ts
@@ -1,5 +1,5 @@
import { get, writable } from "svelte/store";
-import { DataBase, setDatabase, type character } from "../storage/database";
+import { DataBase, setDatabase, type character, type MessageGenerationInfo } from "../storage/database";
import { CharEmotion, selectedCharID } from "../stores";
import { ChatTokenizer, tokenize, tokenizeNum } from "../tokenizer";
import { language } from "../../lang";
@@ -907,32 +907,46 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: formated
})
- {
- //token rechecking
- let tokens = 0
- for(const chat of formated){
- tokens += await tokenizer.tokenizeChat(chat)
- }
+ //token rechecking
+ let inputTokens = 0
- if(tokens > maxContextTokens){
- let pointer = 0
- while(tokens > maxContextTokens){
- if(pointer >= formated.length){
- alertError(language.errors.toomuchtoken + "\n\nAt token rechecking. Required Tokens: " + tokens)
- return false
- }
- if(formated[pointer].removable){
- tokens -= await tokenizer.tokenizeChat(formated[pointer])
- formated[pointer].content = ''
- }
- pointer++
+ for(const chat of formated){
+ inputTokens += await tokenizer.tokenizeChat(chat)
+ }
+
+ if(inputTokens > maxContextTokens){
+ let pointer = 0
+ while(inputTokens > maxContextTokens){
+ if(pointer >= formated.length){
+ alertError(language.errors.toomuchtoken + "\n\nAt token rechecking. Required Tokens: " + inputTokens)
+ return false
}
- formated = formated.filter((v) => {
- return v.content !== ''
- })
- }
+ if(formated[pointer].removable){
+ inputTokens -= await tokenizer.tokenizeChat(formated[pointer])
+ formated[pointer].content = ''
+ }
+ pointer++
+ }
+ formated = formated.filter((v) => {
+ return v.content !== ''
+ })
}
+ //estimate tokens
+ let outputTokens = db.maxResponse
+ if(inputTokens + outputTokens > maxContextTokens){
+ outputTokens = maxContextTokens - inputTokens
+ }
+ const generationId = v4()
+ const generationModel = getGenerationModelString()
+
+ const generationInfo:MessageGenerationInfo = {
+ model: generationModel,
+ generationId: generationId,
+ inputTokens: inputTokens,
+ outputTokens: outputTokens,
+ maxContext: maxContextTokens,
+ }
const req = await requestChatData({
formated: formated,
biasString: biases,
@@ -941,6 +955,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
isGroupChat: nowChatroom.type === 'group',
bias: {},
continue: arg.continue,
+ chatId: generationId
}, 'model', abortSignal)
let result = ''
@@ -949,8 +964,6 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
if(abortSignal.aborted === true){
return false
}
- const generationId = v4()
- const generationModel = getGenerationModelString()
if(req.type === 'fail'){
alertError(req.result)
return false
@@ -969,10 +982,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: "",
saying: currentChar.chaId,
time: Date.now(),
- generationInfo: {
- model: generationModel,
- generationId: generationId,
- }
+ generationInfo,
})
}
db.characters[selectedChar].chats[selectedChat].isStreaming = true
@@ -1051,10 +1061,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: result,
saying: currentChar.chaId,
time: Date.now(),
- generationInfo: {
- model: generationModel,
- generationId: generationId,
- }
+ generationInfo
}
if(inlayResult.promise){
const p = await inlayResult.promise
@@ -1067,10 +1074,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
data: result,
saying: currentChar.chaId,
time: Date.now(),
- generationInfo: {
- model: generationModel,
- generationId: generationId,
- }
+ generationInfo
})
const ind = db.characters[selectedChar].chats[selectedChat].message.length - 1
if(inlayResult.promise){
diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts
index ec8bbcfc..869ce04f 100644
--- a/src/ts/process/request.ts
+++ b/src/ts/process/request.ts
@@ -39,6 +39,7 @@ interface requestDataArgument{
isGroupChat?:boolean
useEmotion?:boolean
continue?:boolean
+ chatId?:string
}
type requestDataResponse = {
@@ -360,6 +361,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"Authorization": "Bearer " + db.mistralKey,
},
abortSignal,
+ chatId: arg.chatId
})
const dat = res.data as any
@@ -526,7 +528,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
body: JSON.stringify(body),
method: "POST",
headers: headers,
- signal: abortSignal
+ signal: abortSignal,
+ chatId: arg.chatId
})
if(da.status !== 200){
@@ -607,7 +610,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
body: body,
headers: headers,
abortSignal,
- useRisuToken:throughProxi
+ useRisuToken:throughProxi,
+ chatId: arg.chatId
})
const dat = res.data as any
@@ -727,7 +731,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
headers: {
"Authorization": "Bearer " + db.novelai.token
},
- abortSignal
+ abortSignal,
+ chatId: arg.chatId
})
if((!da.ok )|| (!da.data.output)){
@@ -775,8 +780,9 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
},
headers: {
"Content-Type": "application/json",
- "Authorization": "Bearer " + db.openAIKey
+ "Authorization": "Bearer " + db.openAIKey,
},
+ chatId: arg.chatId
});
if(!response.ok){
@@ -883,7 +889,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const res = await globalFetch(blockingUrl, {
body: bodyTemplate,
headers: headers,
- abortSignal
+ abortSignal,
+ chatId: arg.chatId
})
const dat = res.data as any
@@ -947,6 +954,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const response = await globalFetch(urlStr, {
body: bodyTemplate,
+ chatId: arg.chatId
})
if(!response.ok){
@@ -1024,7 +1032,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"Content-Type": "application/json",
"Authorization": "Bearer " + db.google.accessToken
},
- abortSignal
+ abortSignal,
+ chatId: arg.chatId
})
if(res.ok){
console.log(res.data)
@@ -1215,6 +1224,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const res = await globalFetch(url, {
headers: headers,
body: body,
+ chatId: arg.chatId
})
if(!res.ok){
@@ -1276,7 +1286,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
headers: {
"content-type": "application/json",
},
- abortSignal
+ abortSignal,
+ chatId: arg.chatId
})
if(!da.ok){
@@ -1329,7 +1340,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const response = await globalFetch(api_server_url + '/api', {
method: 'POST',
headers: headers,
- body: send_body
+ body: send_body,
+ chatId: arg.chatId
});
if(!response.ok){
@@ -1645,7 +1657,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
method: "POST",
body: params,
headers: signed.headers,
- plainFetchForce: true
+ plainFetchForce: true,
+ chatId: arg.chatId
})
if(!res.ok){
@@ -1677,7 +1690,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"anthropic-version": "2023-06-01",
"accept": "application/json",
},
- method: "POST"
+ method: "POST",
+ chatId: arg.chatId
})
if(res.status !== 200){
@@ -1744,7 +1758,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"anthropic-version": "2023-06-01",
"accept": "application/json"
},
- method: "POST"
+ method: "POST",
+ chatId: arg.chatId
})
if(!res.ok){
@@ -1906,7 +1921,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
method: "POST",
body: params,
headers: signed.headers,
- plainFetchForce: true
+ plainFetchForce: true,
+ chatId: arg.chatId
})
@@ -1940,7 +1956,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
"anthropic-version": "2023-06-01",
"accept": "application/json"
},
- useRisuToken: aiModel === 'reverse_proxy'
+ useRisuToken: aiModel === 'reverse_proxy',
+ chatId: arg.chatId
})
if((!da.ok) || (da.data.error)){
diff --git a/src/ts/storage/database.ts b/src/ts/storage/database.ts
index 5ed2e6b5..ab58f9d2 100644
--- a/src/ts/storage/database.ts
+++ b/src/ts/storage/database.ts
@@ -870,6 +870,9 @@ export interface Message{
export interface MessageGenerationInfo{
model?: string
generationId?: string
+ inputTokens?: number
+ outputTokens?: number
+ maxContext?: number
}
interface AINsettings{
diff --git a/src/ts/storage/globalApi.ts b/src/ts/storage/globalApi.ts
index 6992f6f6..5a343a9d 100644
--- a/src/ts/storage/globalApi.ts
+++ b/src/ts/storage/globalApi.ts
@@ -44,6 +44,7 @@ interface fetchLog{
date:string
url:string
responseType?:string
+ chatId?:string
}
let fetchLog:fetchLog[] = []
@@ -494,7 +495,8 @@ export function addFetchLog(arg:{
response:any,
success:boolean,
url:string,
- resType?:string
+ resType?:string,
+ chatId?:string
}){
fetchLog.unshift({
body: typeof(arg.body) === 'string' ? arg.body : JSON.stringify(arg.body, null, 2),
@@ -503,11 +505,21 @@ export function addFetchLog(arg:{
responseType: arg.resType ?? 'json',
success: arg.success,
date: (new Date()).toLocaleTimeString(),
- url: arg.url
+ url: arg.url,
+ chatId: arg.chatId
})
return fetchLog.length - 1
}
+export async function getFetchData(id:string) {
+ for(const log of fetchLog){
+ if(log.chatId === id){
+ return log
+ }
+ }
+ return null
+}
+
export async function globalFetch(url:string, arg:{
plainFetchForce?:boolean,
body?:any,
@@ -515,7 +527,8 @@ export async function globalFetch(url:string, arg:{
rawResponse?:boolean,
method?:"POST"|"GET",
abortSignal?:AbortSignal,
- useRisuToken?:boolean
+ useRisuToken?:boolean,
+ chatId?:string
} = {}): Promise<{
ok: boolean;
data: any;
@@ -542,7 +555,8 @@ export async function globalFetch(url:string, arg:{
response: JSON.stringify(response, null, 2),
success: success,
date: (new Date()).toLocaleTimeString(),
- url: url
+ url: url,
+ chatId: arg.chatId
})
}
catch{
@@ -552,7 +566,8 @@ export async function globalFetch(url:string, arg:{
response: `${response}`,
success: success,
date: (new Date()).toLocaleTimeString(),
- url: url
+ url: url,
+ chatId: arg.chatId
})
}
}
@@ -1397,7 +1412,8 @@ export async function fetchNative(url:string, arg:{
headers?:{[key:string]:string},
method?:"POST",
signal?:AbortSignal,
- useRisuTk?:boolean
+ useRisuTk?:boolean,
+ chatId?:string
}):Promise<{ body: ReadableStream; headers: Headers; status: number }> {
let headers = arg.headers ?? {}
const db = get(DataBase)
@@ -1408,7 +1424,8 @@ export async function fetchNative(url:string, arg:{
response: 'Streamed Fetch',
success: true,
url: url,
- resType: 'stream'
+ resType: 'stream',
+ chatId: arg.chatId
})
if(isTauri || Capacitor.isNativePlatform()){
fetchIndex++