This commit is contained in:
kwaroran
2024-05-14 21:45:29 +09:00
3 changed files with 10 additions and 4 deletions

View File

@@ -495,12 +495,12 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
if(supportsInlayImage()){
// inlay models doesn't support logit_bias
// gpt-4-turbo supports both logit_bias and inlay image
// OpenAI's gpt based llm model supports both logit_bias and inlay image
if(!(
aiModel.startsWith('gpt4_turbo') ||
aiModel.startsWith('gpt') ||
(aiModel == 'reverse_proxy' && (
db.proxyRequestModel?.startsWith('gpt4_turbo') ||
(db.proxyRequestModel === 'custom' && db.customProxyRequestModel.startsWith('gpt-4-turbo'))
db.proxyRequestModel?.startsWith('gpt') ||
(db.proxyRequestModel === 'custom' && db.customProxyRequestModel.startsWith('gpt'))
)))){
// @ts-ignore
delete body.logit_bias

View File

@@ -35,6 +35,11 @@ export async function encode(data:string):Promise<(number[]|Uint32Array|Int32Arr
case 'llama3':
return await tokenizeWebTokenizers(data, 'llama')
default:
// Add exception for gpt-4o tokenizers on reverse_proxy
if(db.proxyRequestModel?.startsWith('gpt4o') ||
(db.proxyRequestModel === 'custom' && db.customProxyRequestModel.startsWith('gpt-4o'))) {
return await tikJS(data, 'o200k_base')
}
return await tikJS(data)
}
}