Merge branch 'main' of https://github.com/LightningHyperBlaze45654/RisuAI
This commit is contained in:
@@ -776,9 +776,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
chats = hn.chats
|
chats = hn.chats
|
||||||
currentTokens = hn.tokens
|
currentTokens = hn.tokens
|
||||||
}
|
}
|
||||||
else if(DBState.db.hypav2){ //HypaV2 support needs to be changed like this.
|
else if(DBState.db.hypav2){
|
||||||
|
console.log("Current chat's hypaV2 Data: ", currentChat.hypaV2Data)
|
||||||
const sp = await hypaMemoryV2(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer)
|
const sp = await hypaMemoryV2(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer)
|
||||||
console.log("All chats: ", chats)
|
|
||||||
if(sp.error){
|
if(sp.error){
|
||||||
console.log(sp)
|
console.log(sp)
|
||||||
alertError(sp.error)
|
alertError(sp.error)
|
||||||
@@ -788,7 +788,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
|||||||
currentTokens = sp.currentTokens
|
currentTokens = sp.currentTokens
|
||||||
currentChat.hypaV2Data = sp.memory ?? currentChat.hypaV2Data
|
currentChat.hypaV2Data = sp.memory ?? currentChat.hypaV2Data
|
||||||
DBState.db.characters[selectedChar].chats[selectedChat].hypaV2Data = currentChat.hypaV2Data
|
DBState.db.characters[selectedChar].chats[selectedChat].hypaV2Data = currentChat.hypaV2Data
|
||||||
console.log(currentChat.hypaV2Data)
|
console.log("Current chat's HypaV2Data: ", currentChat.hypaV2Data)
|
||||||
}
|
}
|
||||||
else{
|
else{
|
||||||
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer, {
|
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, tokenizer, {
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ async function summary(
|
|||||||
return { success: true, data: result };
|
return { success: true, data: result };
|
||||||
} // No, I am not going to touch any http API calls.
|
} // No, I am not going to touch any http API calls.
|
||||||
|
|
||||||
function isSubset<T>(subset: Set<T>, superset: Set<T>): boolean { // simple helper function. Check if subset IS a subset of superset given.
|
function isSubset<T>(subset: Set<T>, superset: Set<T>): boolean {
|
||||||
for (const item of subset) {
|
for (const item of subset) {
|
||||||
if (!superset.has(item)) {
|
if (!superset.has(item)) {
|
||||||
return false;
|
return false;
|
||||||
@@ -154,7 +154,7 @@ function cleanInvalidChunks(
|
|||||||
chats: OpenAIChat[],
|
chats: OpenAIChat[],
|
||||||
data: HypaV2Data,
|
data: HypaV2Data,
|
||||||
): void {
|
): void {
|
||||||
const currentChatMemos = new Set(chats.map((chat) => chat.memo)); // if chunk's memo set is not subset of this, the chunk's content -> delete
|
const currentChatMemos = new Set(chats.map((chat) => chat.memo));
|
||||||
|
|
||||||
// mainChunks filtering
|
// mainChunks filtering
|
||||||
data.mainChunks = data.mainChunks.filter((mainChunk) => {
|
data.mainChunks = data.mainChunks.filter((mainChunk) => {
|
||||||
@@ -225,11 +225,14 @@ export async function hypaMemoryV2(
|
|||||||
}
|
}
|
||||||
// Starting chat index of new mainChunk to be generated
|
// Starting chat index of new mainChunk to be generated
|
||||||
|
|
||||||
// Token management loop(where using of )
|
// Token management loop(If current token exceeds allowed amount...)
|
||||||
while (currentTokens >= maxContextTokens) {
|
while (currentTokens >= maxContextTokens) {
|
||||||
|
console.log("The current Token exceeded maxContextTokens. Current tokens: ", currentTokens, "\nMax Context Tokens: ", maxContextTokens)
|
||||||
const halfData: OpenAIChat[] = [];
|
const halfData: OpenAIChat[] = [];
|
||||||
let halfDataTokens = 0;
|
let halfDataTokens = 0;
|
||||||
|
|
||||||
|
const startIdx = idx;
|
||||||
|
|
||||||
// Accumulate chats to summarize
|
// Accumulate chats to summarize
|
||||||
while (
|
while (
|
||||||
halfDataTokens < chunkSize &&
|
halfDataTokens < chunkSize &&
|
||||||
@@ -240,7 +243,8 @@ export async function hypaMemoryV2(
|
|||||||
halfDataTokens += await tokenizer.tokenizeChat(chat);
|
halfDataTokens += await tokenizer.tokenizeChat(chat);
|
||||||
halfData.push(chat);
|
halfData.push(chat);
|
||||||
}
|
}
|
||||||
|
const endIdx = idx - 1;
|
||||||
|
console.log(`Summarizing chats from index ${startIdx} to ${endIdx}.`);
|
||||||
if (halfData.length === 0) break;
|
if (halfData.length === 0) break;
|
||||||
|
|
||||||
const stringlizedChat = halfData
|
const stringlizedChat = halfData
|
||||||
|
|||||||
Reference in New Issue
Block a user