[fix] tokenizer problems
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import type { OpenAIChat } from ".";
|
||||
import { tokenize } from "../tokenizer";
|
||||
|
||||
export function multiChatReplacer(){
|
||||
|
||||
@@ -52,4 +53,15 @@ export function unstringlizeChat(text:string, formated:OpenAIChat[], char:string
|
||||
}
|
||||
|
||||
return text
|
||||
}
|
||||
|
||||
export async function getNameMaxTokens(names:string[]){
|
||||
let maxCharNameTokens = 0
|
||||
for(const name of names){
|
||||
const tokens = await tokenize(name + ': ') + 1
|
||||
if(maxCharNameTokens < tokens){
|
||||
maxCharNameTokens = tokens
|
||||
}
|
||||
}
|
||||
return maxCharNameTokens
|
||||
}
|
||||
Reference in New Issue
Block a user