[feat] gpt 0613 additions, some fixes

This commit is contained in:
kwaroran
2023-06-14 23:14:44 +09:00
parent 1c170ef8a7
commit 0884c445a0
4 changed files with 19 additions and 13 deletions

View File

@@ -12,7 +12,7 @@
</script>
<div class="absolute w-full h-full z-40 bg-black bg-opacity-50 flex justify-center items-center">
<div class="bg-darkbg p-4 break-any rounded-md flex flex-col max-w-3xl w-72">
<div class="bg-darkbg p-4 break-any rounded-md flex flex-col max-w-3xl w-72 max-h-full overflow-y-auto">
<div class="flex items-center text-neutral-200 mb-4">
<h2 class="mt-0 mb-0">{language.chatList}</h2>
<div class="flex-grow flex justify-end">

View File

@@ -110,7 +110,7 @@
<span class="text-neutral-200">Claude {language.apiKey}</span>
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="..." bind:value={$DataBase.claudeAPIKey}>
{/if}
{#if $DataBase.aiModel === 'gpt35' || $DataBase.aiModel === 'gpt35_16k_0613' || $DataBase.subModel === 'gpt35_16k_0613' || $DataBase.aiModel === 'gpt35_16k' || $DataBase.subModel === 'gpt35_16k' || $DataBase.aiModel === 'gpt4' || $DataBase.subModel === 'gpt4' || $DataBase.subModel === 'gpt35'|| $DataBase.aiModel === 'gpt4_32k' || $DataBase.subModel === 'gpt4_32k'}
{#if $DataBase.aiModel.startsWith('gpt')}
<span class="text-neutral-200">OpenAI {language.apiKey} <Help key="oaiapikey"/></span>
<input class="text-neutral-200 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="sk-XXXXXXXXXXXXXXXXXXXX" bind:value={$DataBase.openAIKey}>
<div class="flex items-center mt-2 mb-4">

View File

@@ -13,6 +13,8 @@
switch(name){
case "gpt35":
return "GPT-3.5 Turbo"
case "gpt35_0613":
return "GPT-3.5 Turbo 0613"
case "gpt35_16k":
return "GPT-3.5 Turbo 16k"
case "gpt35_16k_0613":
@@ -21,6 +23,10 @@
return "GPT-4"
case "gpt4_32k":
return "GPT-4 32k"
case "gpt4_0613":
return "GPT-4 0613"
case "gpt4_32k_0613":
return "GPT-4 32k 0613"
case "palm2":
return "PaLM2"
case "textgen_webui":
@@ -59,7 +65,9 @@
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('gpt35_16k')}}>GPT-3.5 Turbo 16K</button>
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('gpt35_16k_0613')}}>GPT-3.5 Turbo 16K 0613</button>
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('gpt4')}}>GPT-4</button>
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('gpt4_0613')}}>GPT-4 0613</button>
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('gpt4_32k')}}>GPT-4 32K</button>
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('gpt4_32k_0613')}}>GPT-4 32K 0613</button>
</Arcodion>
<Arcodion name="Anthropic Claude">
<button class="p-2 hover:text-green-500" on:click={() => {changeModel('claude-v1')}}>claude-v1</button>

View File

@@ -83,10 +83,13 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
switch(aiModel){
case 'gpt35':
case 'gpt35_0613':
case 'gpt35_16k':
case 'gpt35_16k_0613':
case 'gpt4':
case 'gpt4_32k':{
case 'gpt4_32k':
case 'gpt4_0613':
case 'gpt4_32k_0613':{
for(let i=0;i<formated.length;i++){
if(formated[i].role !== 'function'){
@@ -131,9 +134,13 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const oaiFunctionCall = oaiFunctions ? (arg.useEmotion ? {"name": "set_emotion"} : "auto") : undefined
const body = ({
model: aiModel === 'gpt35' ? 'gpt-3.5-turbo'
: aiModel === 'gpt35_0613' ? 'gpt-3.5-turbo-0613'
: aiModel === 'gpt35_16k' ? 'gpt-3.5-turbo-16k'
: aiModel === 'gpt35_16k_0613' ? 'gpt-3.5-turbo-16k-0613'
: aiModel === 'gpt4' ? 'gpt-4' : 'gpt-4-32k',
: aiModel === 'gpt4' ? 'gpt-4'
: aiModel === 'gpt4_32k' ? 'gpt-4-32k'
: aiModel === "gpt4_0613" ? 'gpt-4-0613'
: aiModel === "gpt4_32k_0613" ? 'gpt-4-32k-0613' : '',
messages: formated,
temperature: temperature,
max_tokens: maxTokens,
@@ -195,8 +202,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
control.enqueue(readed)
return
}
console.log(rawChunk)
const chunk = JSON.parse(rawChunk).choices[0].delta.content
if(chunk){
readed += chunk
@@ -230,7 +235,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const dat = res.data as any
if(res.ok){
try {
console.log(dat)
const msg:OpenAIChatFull = (dat.choices[0].message)
return {
type: 'success',
@@ -374,8 +378,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
})
const dat = res.data as any
console.log(DURL)
console.log(res.data)
if(res.ok){
try {
let result:string = isNewAPI ? dat.results[0].text : dat.data[0].substring(proompt.length)
@@ -600,8 +602,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
return prefix + v.content
}).join('') + '\n\nAssistant: '
console.log(requestPrompt)
const da = await globalFetch('https://api.anthropic.com/v1/complete', {
method: "POST",
body: {
@@ -626,7 +626,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const res = da.data
console.log(res)
return {
type: "success",
result: res.completion,
@@ -638,7 +637,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const realModel = aiModel.split(":::")[1]
console.log(realModel)
const argument = {
"prompt": proompt,
"params": {