Merge branch 'main' of https://github.com/LightningHyperBlaze45654/RisuAI
This commit is contained in:
@@ -1,10 +1,9 @@
|
||||
import { alertError } from "src/ts/alert";
|
||||
import type { OpenAIChat } from "..";
|
||||
import type { OpenAIChat } from "../index.svelte";
|
||||
import { HypaProcesser } from "./hypamemory";
|
||||
import { language } from "src/lang";
|
||||
import type { ChatTokenizer } from "src/ts/tokenizer";
|
||||
import { get } from "svelte/store";
|
||||
import { DataBase } from "src/ts/storage/database";
|
||||
import { getDatabase } from "src/ts/storage/database.svelte";
|
||||
|
||||
const maxRecentChatQuery = 4;
|
||||
export async function hanuraiMemory(chats:OpenAIChat[],arg:{
|
||||
@@ -12,7 +11,7 @@ export async function hanuraiMemory(chats:OpenAIChat[],arg:{
|
||||
maxContextTokens:number,
|
||||
tokenizer:ChatTokenizer
|
||||
}){
|
||||
const db = get(DataBase)
|
||||
const db = getDatabase()
|
||||
const tokenizer = arg.tokenizer
|
||||
const processer = new HypaProcesser('MiniLM')
|
||||
let addTexts:string[] = []
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import localforage from "localforage";
|
||||
import { globalFetch } from "src/ts/storage/globalApi";
|
||||
import { globalFetch } from "src/ts/globalApi.svelte";
|
||||
import { runEmbedding } from "../transformers";
|
||||
import { alertError } from "src/ts/alert";
|
||||
import { appendLastPath } from "src/ts/util";
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { DataBase, type Chat, type character, type groupChat } from "src/ts/storage/database";
|
||||
import type { OpenAIChat } from "..";
|
||||
import { getDatabase, type Chat, type character, type groupChat } from "src/ts/storage/database.svelte";
|
||||
import type { OpenAIChat } from "../index.svelte";
|
||||
import type { ChatTokenizer } from "src/ts/tokenizer";
|
||||
import { get } from "svelte/store";
|
||||
import { requestChatData } from "../request";
|
||||
import { HypaProcesser } from "./hypamemory";
|
||||
import { globalFetch } from "src/ts/storage/globalApi";
|
||||
import { globalFetch } from "src/ts/globalApi.svelte";
|
||||
import { runSummarizer } from "../transformers";
|
||||
import { last, remove } from "lodash";
|
||||
import { parseChatML } from "src/ts/parser.svelte";
|
||||
|
||||
export interface HypaV2Data {
|
||||
chunks: {
|
||||
@@ -20,7 +19,7 @@ export interface HypaV2Data {
|
||||
}
|
||||
|
||||
async function summary(stringlizedChat: string): Promise<{ success: boolean; data: string }> {
|
||||
const db = get(DataBase);
|
||||
const db = getDatabase();
|
||||
console.log("Summarizing");
|
||||
|
||||
if (db.supaModelType === 'distilbart') {
|
||||
@@ -85,7 +84,10 @@ async function summary(stringlizedChat: string): Promise<{ success: boolean; dat
|
||||
};
|
||||
}
|
||||
} else {
|
||||
const promptbody: OpenAIChat[] = [
|
||||
|
||||
let parsedPrompt = parseChatML(supaPrompt.replaceAll('{{slot}}', stringlizedChat))
|
||||
|
||||
const promptbody: OpenAIChat[] = parsedPrompt ?? [
|
||||
{
|
||||
role: "user",
|
||||
content: stringlizedChat
|
||||
@@ -101,7 +103,7 @@ async function summary(stringlizedChat: string): Promise<{ success: boolean; dat
|
||||
bias: {},
|
||||
useStreaming: false,
|
||||
noMultiGen: true
|
||||
}, 'submodel');
|
||||
}, 'memory');
|
||||
if (da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline') {
|
||||
return {
|
||||
success: false,
|
||||
@@ -123,7 +125,7 @@ export async function hypaMemoryV2(
|
||||
arg: { asHyper?: boolean, summaryModel?: string, summaryPrompt?: string, hypaModel?: string } = {}
|
||||
): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?: string; memory?: HypaV2Data; }> {
|
||||
|
||||
const db = get(DataBase);
|
||||
const db = getDatabase();
|
||||
const data: HypaV2Data = room.hypaV2Data ?? { chunks: [], mainChunks: [] };
|
||||
|
||||
let allocatedTokens = db.hypaAllocatedTokens;
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { get } from "svelte/store";
|
||||
import type { OpenAIChat } from "..";
|
||||
import { DataBase, type Chat, type character, type groupChat } from "../../storage/database";
|
||||
import type { OpenAIChat } from "../index.svelte";
|
||||
import { getDatabase, type Chat, type character, type groupChat } from "../../storage/database.svelte";
|
||||
import { tokenize, type ChatTokenizer } from "../../tokenizer";
|
||||
import { requestChatData } from "../request";
|
||||
import { HypaProcesser } from "./hypamemory";
|
||||
import { stringlizeChat } from "../stringlize";
|
||||
import { globalFetch } from "src/ts/storage/globalApi";
|
||||
import { globalFetch } from "src/ts/globalApi.svelte";
|
||||
import { runSummarizer } from "../transformers";
|
||||
import { getUserName } from "src/ts/util";
|
||||
import { parseChatML } from "src/ts/parser.svelte";
|
||||
|
||||
export async function supaMemory(
|
||||
chats:OpenAIChat[],
|
||||
@@ -18,7 +18,7 @@ export async function supaMemory(
|
||||
tokenizer:ChatTokenizer,
|
||||
arg:{asHyper?:boolean} = {}
|
||||
): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string;lastId?:string}>{
|
||||
const db = get(DataBase)
|
||||
const db = getDatabase()
|
||||
|
||||
currentTokens += 10
|
||||
|
||||
@@ -74,7 +74,7 @@ export async function supaMemory(
|
||||
for(let j=0;j<HypaData.length;j++){
|
||||
let i =0;
|
||||
let countTokens = currentTokens
|
||||
let countChats = structuredClone(chats)
|
||||
let countChats = safeStructuredClone(chats)
|
||||
while(true){
|
||||
if(countChats.length === 0){
|
||||
break
|
||||
@@ -253,7 +253,8 @@ export async function supaMemory(
|
||||
}
|
||||
}
|
||||
else {
|
||||
const promptbody:OpenAIChat[] = [
|
||||
let parsedPrompt = parseChatML(supaPrompt.replaceAll('{{slot}}', stringlizedChat))
|
||||
const promptbody:OpenAIChat[] = parsedPrompt ?? [
|
||||
{
|
||||
role: "user",
|
||||
content: stringlizedChat
|
||||
@@ -268,7 +269,7 @@ export async function supaMemory(
|
||||
bias: {},
|
||||
useStreaming: false,
|
||||
noMultiGen: true
|
||||
}, 'submodel')
|
||||
}, 'memory')
|
||||
if(da.type === 'fail' || da.type === 'streaming' || da.type === 'multiline'){
|
||||
return {
|
||||
currentTokens: currentTokens,
|
||||
|
||||
Reference in New Issue
Block a user