Update token length check in embedding transformer and error message in supaMemory

This commit is contained in:
kwaroran
2024-01-15 03:40:29 +09:00
parent a1a38d5ad2
commit 3dec23a99c
2 changed files with 3 additions and 3 deletions

View File

@@ -57,7 +57,7 @@ export const runEmbedding = async (text: string):Promise<Float32Array> => {
}
const tokenizer = await AutoTokenizer.from_pretrained('Xenova/all-MiniLM-L6-v2');
const tokens = tokenizer.encode(text)
if (tokens.length > 256) {
if (tokens.length > 1024) {
let chunks:string[] = []
let chunk:number[] = []
for (let i = 0; i < tokens.length; i++) {

View File

@@ -178,7 +178,7 @@ export async function supaMemory(
if(db.supaMemoryType === 'distilbart'){
try {
const sum = await runSummarizer(stringlizedChat)
return sum[0].summary_text
return sum
} catch (error) {
return {
currentTokens: currentTokens,
@@ -274,7 +274,7 @@ export async function supaMemory(
return {
currentTokens: currentTokens,
chats: chats,
error: "Not Enough Tokens"
error: "Not Enough Tokens to summarize in SupaMemory"
}
}
maxChunkSize = maxChunkSize * 0.7