[fix] tokenizer problems

This commit is contained in:
kwaroran
2023-05-31 05:26:10 +09:00
parent 7ea768cb5b
commit f98764776e
3 changed files with 53 additions and 14 deletions

View File

@@ -1,4 +1,5 @@
import type { OpenAIChat } from ".";
import { tokenize } from "../tokenizer";
export function multiChatReplacer(){
@@ -52,4 +53,15 @@ export function unstringlizeChat(text:string, formated:OpenAIChat[], char:string
}
return text
}
export async function getNameMaxTokens(names:string[]){
let maxCharNameTokens = 0
for(const name of names){
const tokens = await tokenize(name + ': ') + 1
if(maxCharNameTokens < tokens){
maxCharNameTokens = tokens
}
}
return maxCharNameTokens
}