[feat] novellist api

This commit is contained in:
kwaroran
2023-06-07 08:57:42 +09:00
parent f771fe3890
commit f37ae13d83
9 changed files with 464 additions and 20 deletions

View File

@@ -101,6 +101,10 @@
<span class="text-neutral-200">Palm2 {language.apiKey}</span>
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="..." bind:value={$DataBase.palmAPI}>
{/if}
{#if $DataBase.aiModel === 'novellist' || $DataBase.subModel === 'novellist'}
<span class="text-neutral-200">NovelList {language.apiKey}</span>
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="..." bind:value={$DataBase.novellistAPI}>
{/if}
{#if $DataBase.aiModel.startsWith('claude') || $DataBase.subModel.startsWith('claude')}
<span class="text-neutral-200">Claude {language.apiKey}</span>

View File

@@ -76,6 +76,7 @@
{/each}
{/await}
</Arcodion>
<button class="hover:bg-selected px-6 py-2 text-lg" on:click={() => {changeModel('novellist')}}>Novellist</button>
{#if $DataBase.plugins.length > 0}
<button on:click={() => {changeModel('custom')}} class="hover:bg-selected px-6 py-2 text-lg" >Plugin</button>
{/if}

View File

@@ -72,7 +72,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
: aiModel === 'gpt4' ? 'gpt-4' : 'gpt-4-32k',
messages: formated,
temperature: temperature,
max_tokens: arg.maxTokens ?? maxTokens,
max_tokens: maxTokens,
presence_penalty: arg.PresensePenalty ?? (db.PresensePenalty / 100),
frequency_penalty: arg.frequencyPenalty ?? (db.frequencyPenalty / 100),
logit_bias: bias,
@@ -460,6 +460,44 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
result: data.results[0].text
}
}
case "novellist":{
const auth_key = db.novellistAPI;
const api_server_url = 'https://api.tringpt.com/';
const headers = {
'Authorization': `Bearer ${auth_key}`,
'Content-Type': 'application/json'
};
const send_body = {
text: stringlizeChat(formated, currentChar?.name ?? ''),
length: maxTokens,
temperature: temperature,
top_p: 0.7,
tailfree: 1.0,
rep_pen: arg.frequencyPenalty ?? (db.frequencyPenalty / 100),
};
const response = await globalFetch(api_server_url + '/api', {
method: 'POST',
headers: headers,
body: send_body,
});
if(!response.ok){
return {
type: 'fail',
result: response.data
}
}
const result = response.data.data[0];
return {
'type': 'success',
'result': result
}
}
default:{
if(aiModel.startsWith('claude')){
for(let i=0;i<formated.length;i++){

View File

@@ -501,7 +501,8 @@ export interface Database{
advancedBotSettings:boolean
useAutoSuggestions:boolean
autoSuggestPrompt:string,
claudeAPIKey:string
claudeAPIKey:string,
novellistAPI:string
}
interface hordeConfig{

View File

@@ -416,17 +416,17 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
})
if(arg.rawResponse){
addFetchLog("Uint8Array Response", da.ok)
addFetchLog("Uint8Array Response", da.ok && da.status >= 200 && da.status < 300)
return {
ok: da.ok,
ok: da.ok && da.status >= 200 && da.status < 300,
data: new Uint8Array(await da.arrayBuffer())
}
}
else{
const dat = await da.json()
addFetchLog(dat, da.ok)
addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300)
return {
ok: da.ok,
ok: da.ok && da.status >= 200 && da.status < 300,
data: dat
}
}
@@ -455,17 +455,17 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
})
if(arg.rawResponse){
addFetchLog("Uint8Array Response", da.ok)
addFetchLog("Uint8Array Response", da.ok && da.status >= 200 && da.status < 300)
return {
ok: da.ok,
ok: da.ok && da.status >= 200 && da.status < 300,
data: new Uint8Array(await da.arrayBuffer())
}
}
else{
const dat = await da.json()
addFetchLog(dat, da.ok)
addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300)
return {
ok: da.ok,
ok: da.ok && da.status >= 200 && da.status < 300,
data: dat
}
}
@@ -567,9 +567,9 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
,signal: arg.abortSignal
})
addFetchLog("Uint8Array Response", da.ok)
addFetchLog("Uint8Array Response", da.ok && da.status >= 200 && da.status < 300)
return {
ok: da.ok,
ok: da.ok && da.status >= 200 && da.status < 300,
data: new Uint8Array(await da.arrayBuffer())
}
}
@@ -586,9 +586,9 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
})
const dat = await da.json()
addFetchLog(dat, da.ok)
addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300)
return {
ok: da.ok,
ok: da.ok && da.status >= 200 && da.status < 300,
data: dat
}
}

View File

@@ -1,7 +1,13 @@
import type { Tiktoken } from "@dqbd/tiktoken";
import type { character } from "./storage/database";
import { DataBase, type character } from "./storage/database";
import { get } from "svelte/store";
import { tokenizeTransformers } from "./transformers/transformer";
async function encode(data:string):Promise<(number[]|Uint32Array)>{
let db = get(DataBase)
if(db.aiModel === 'novellist'){
return await tokenizeTransformers('trin',data)
}
return await tikJS(data)
}

View File

@@ -0,0 +1,30 @@
import type { PreTrainedTokenizer } from "@xenova/transformers"
type transformerLibType = typeof import("@xenova/transformers");
let tokenizer:PreTrainedTokenizer = null
let transformerLib:transformerLibType
const tokenizerDict = {
'trin': 'naclbit/trin_tokenizer_v3',
} as const
type tokenizerTypes = keyof(typeof tokenizerDict)
let tokenizerType:tokenizerTypes|'' = ''
async function loadTransformers() {
if(!transformerLib){
transformerLib = await import('@xenova/transformers')
}
}
export async function tokenizeTransformers(type:tokenizerTypes, text:string) {
await loadTransformers()
if(tokenizerType !== type){
const AutoTokenizer = transformerLib.AutoTokenizer
tokenizer = await AutoTokenizer.from_pretrained(tokenizerDict[type])
tokenizerType = type
}
return tokenizer.encode(text)
}