add logging

This commit is contained in:
LightningHyperBlaze45654
2024-12-06 08:49:00 -08:00
parent e4e63dc237
commit 56646809a1
2 changed files with 9 additions and 5 deletions

View File

@@ -776,9 +776,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{
chats = hn.chats
currentTokens = hn.tokens
}
else if(DBState.db.hypav2){ //HypaV2 support needs to be changed like this.
else if(DBState.db.hypav2){
console.log("Current chat's hypaV2 Data: ", currentChat.hypaV2Data)
const sp = await hypaMemoryV2(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer)
console.log("All chats: ", chats)
if(sp.error){
console.log(sp)
alertError(sp.error)
@@ -788,7 +788,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
currentTokens = sp.currentTokens
currentChat.hypaV2Data = sp.memory ?? currentChat.hypaV2Data
DBState.db.characters[selectedChar].chats[selectedChat].hypaV2Data = currentChat.hypaV2Data
console.log(currentChat.hypaV2Data)
console.log("Current chat's HypaV2Data: ", currentChat.hypaV2Data)
}
else{
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer, {

View File

@@ -226,11 +226,14 @@ export async function hypaMemoryV2(
}
// Starting chat index of new mainChunk to be generated
// Token management loop(where using of )
// Token management loop(If current token exceeds allowed amount...)
while (currentTokens >= maxContextTokens) {
console.log("The current Token exceeded maxContextTokens. Current tokens: ", currentTokens, "\nMax Context Tokens: ", maxContextTokens)
const halfData: OpenAIChat[] = [];
let halfDataTokens = 0;
const startIdx = idx;
// Accumulate chats to summarize
while (
halfDataTokens < chunkSize &&
@@ -241,7 +244,8 @@ export async function hypaMemoryV2(
halfDataTokens += await tokenizer.tokenizeChat(chat);
halfData.push(chat);
}
const endIdx = idx - 1;
console.log(`Summarizing chats from index ${startIdx} to ${endIdx}.`);
if (halfData.length === 0) break;
const stringlizedChat = halfData