[fix] stop tokens

This commit is contained in:
kwaroran
2023-09-20 15:37:53 +09:00
parent c007f5b85a
commit a8fd602d5d

View File

@@ -471,15 +471,13 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
}).join("") + `\n## Response\n`; }).join("") + `\n## Response\n`;
const response = await globalFetch( "https://api.openai.com/v1/completions", { const response = await globalFetch( "https://api.openai.com/v1/completions", {
rawResponse:false,
body: { body: {
model: "gpt-3.5-turbo-instruct", model: "gpt-3.5-turbo-instruct",
prompt: prompt, prompt: prompt,
max_tokens: maxTokens, max_tokens: maxTokens,
temperature: temperature, temperature: temperature,
top_p: 1, top_p: 1,
stream: false, stop:["User:"," User:", "user:", " user:"],
stop:["\n### User:","### User:","User:"," User:", "user:", " user:",],
presence_penalty: arg.PresensePenalty || (db.PresensePenalty / 100), presence_penalty: arg.PresensePenalty || (db.PresensePenalty / 100),
frequency_penalty: arg.frequencyPenalty || (db.frequencyPenalty / 100), frequency_penalty: arg.frequencyPenalty || (db.frequencyPenalty / 100),
}, },
@@ -495,10 +493,10 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
result: (language.errors.httpError + `${JSON.stringify(response.data)}`) result: (language.errors.httpError + `${JSON.stringify(response.data)}`)
} }
} }
const text = response.data.choices[0].text const text:string = response.data.choices[0].text
return { return {
type: 'success', type: 'success',
result: text result: text.replace(/##\n/g, '')
} }
} }
case "textgen_webui": case "textgen_webui":