add logging
This commit is contained in:
@@ -226,11 +226,14 @@ export async function hypaMemoryV2(
|
||||
}
|
||||
// Starting chat index of new mainChunk to be generated
|
||||
|
||||
// Token management loop(where using of )
|
||||
// Token management loop(If current token exceeds allowed amount...)
|
||||
while (currentTokens >= maxContextTokens) {
|
||||
console.log("The current Token exceeded maxContextTokens. Current tokens: ", currentTokens, "\nMax Context Tokens: ", maxContextTokens)
|
||||
const halfData: OpenAIChat[] = [];
|
||||
let halfDataTokens = 0;
|
||||
|
||||
const startIdx = idx;
|
||||
|
||||
// Accumulate chats to summarize
|
||||
while (
|
||||
halfDataTokens < chunkSize &&
|
||||
@@ -241,7 +244,8 @@ export async function hypaMemoryV2(
|
||||
halfDataTokens += await tokenizer.tokenizeChat(chat);
|
||||
halfData.push(chat);
|
||||
}
|
||||
|
||||
const endIdx = idx - 1;
|
||||
console.log(`Summarizing chats from index ${startIdx} to ${endIdx}.`);
|
||||
if (halfData.length === 0) break;
|
||||
|
||||
const stringlizedChat = halfData
|
||||
|
||||
Reference in New Issue
Block a user