Merge branch 'dev' into autotranslateinput
This commit is contained in:
@@ -101,6 +101,10 @@
|
||||
<span class="text-neutral-200">Palm2 {language.apiKey}</span>
|
||||
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="..." bind:value={$DataBase.palmAPI}>
|
||||
{/if}
|
||||
{#if $DataBase.aiModel === 'novellist' || $DataBase.subModel === 'novellist'}
|
||||
<span class="text-neutral-200">NovelList {language.apiKey}</span>
|
||||
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="..." bind:value={$DataBase.novellistAPI}>
|
||||
{/if}
|
||||
|
||||
{#if $DataBase.aiModel.startsWith('claude') || $DataBase.subModel.startsWith('claude')}
|
||||
<span class="text-neutral-200">Claude {language.apiKey}</span>
|
||||
|
||||
@@ -72,7 +72,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
||||
: aiModel === 'gpt4' ? 'gpt-4' : 'gpt-4-32k',
|
||||
messages: formated,
|
||||
temperature: temperature,
|
||||
max_tokens: arg.maxTokens ?? maxTokens,
|
||||
max_tokens: maxTokens,
|
||||
presence_penalty: arg.PresensePenalty ?? (db.PresensePenalty / 100),
|
||||
frequency_penalty: arg.frequencyPenalty ?? (db.frequencyPenalty / 100),
|
||||
logit_bias: bias,
|
||||
@@ -460,6 +460,44 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
||||
result: data.results[0].text
|
||||
}
|
||||
}
|
||||
case "novellist":{
|
||||
const auth_key = db.novellistAPI;
|
||||
const api_server_url = 'https://api.tringpt.com/';
|
||||
|
||||
const headers = {
|
||||
'Authorization': `Bearer ${auth_key}`,
|
||||
'Content-Type': 'application/json'
|
||||
};
|
||||
|
||||
const send_body = {
|
||||
text: stringlizeChat(formated, currentChar?.name ?? ''),
|
||||
length: maxTokens,
|
||||
temperature: temperature,
|
||||
top_p: 0.7,
|
||||
tailfree: 1.0,
|
||||
rep_pen: arg.frequencyPenalty ?? (db.frequencyPenalty / 100),
|
||||
};
|
||||
|
||||
const response = await globalFetch(api_server_url + '/api', {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
body: send_body,
|
||||
});
|
||||
|
||||
if(!response.ok){
|
||||
return {
|
||||
type: 'fail',
|
||||
result: response.data
|
||||
}
|
||||
}
|
||||
|
||||
const result = response.data.data[0];
|
||||
|
||||
return {
|
||||
'type': 'success',
|
||||
'result': unstringlizeChat(result, formated, currentChar?.name ?? '')
|
||||
}
|
||||
}
|
||||
default:{
|
||||
if(aiModel.startsWith('claude')){
|
||||
for(let i=0;i<formated.length;i++){
|
||||
|
||||
@@ -78,7 +78,7 @@ export async function supaMemory(
|
||||
async function summarize(stringlizedChat:string){
|
||||
|
||||
const supaPrompt = db.supaMemoryPrompt === '' ?
|
||||
"[Summarize the ongoing role story, including as many events from the past as possible, using assistant as a narrative helper;do not analyze. include all of the characters' names, statuses, thoughts, relationships, and attire. Be sure to include dialogue exchanges and context by referencing previous statements and reactions. assistant's summary should provide an objective overview of the story while also considering relevant past conversations and events. It must also remove redundancy and unnecessary content from the prompt so that gpt3 and other sublanguage models]\n"
|
||||
"[Summarize the ongoing role story, It must also remove redundancy and unnecessary text and content from the output to reduce tokens for gpt3 and other sublanguage models]\n"
|
||||
: db.supaMemoryPrompt
|
||||
|
||||
let result = ''
|
||||
|
||||
@@ -8,7 +8,7 @@ import { defaultAutoSuggestPrompt, defaultJailbreak, defaultMainPrompt } from '.
|
||||
|
||||
export const DataBase = writable({} as any as Database)
|
||||
export const loadedStore = writable(false)
|
||||
export let appVer = '1.24.0'
|
||||
export let appVer = '1.24.1'
|
||||
|
||||
export function setDatabase(data:Database){
|
||||
if(checkNullish(data.characters)){
|
||||
@@ -500,9 +500,10 @@ export interface Database{
|
||||
koboldURL:string
|
||||
advancedBotSettings:boolean
|
||||
useAutoSuggestions:boolean
|
||||
autoSuggestPrompt:string
|
||||
claudeAPIKey:string
|
||||
useChatCopy:boolean
|
||||
autoSuggestPrompt:string,
|
||||
claudeAPIKey:string,
|
||||
useChatCopy:boolean,
|
||||
novellistAPI:string,
|
||||
useAutoTranslateInput:boolean
|
||||
}
|
||||
|
||||
|
||||
@@ -416,17 +416,17 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
|
||||
})
|
||||
|
||||
if(arg.rawResponse){
|
||||
addFetchLog("Uint8Array Response", da.ok)
|
||||
addFetchLog("Uint8Array Response", da.ok && da.status >= 200 && da.status < 300)
|
||||
return {
|
||||
ok: da.ok,
|
||||
ok: da.ok && da.status >= 200 && da.status < 300,
|
||||
data: new Uint8Array(await da.arrayBuffer())
|
||||
}
|
||||
}
|
||||
else{
|
||||
const dat = await da.json()
|
||||
addFetchLog(dat, da.ok)
|
||||
addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300)
|
||||
return {
|
||||
ok: da.ok,
|
||||
ok: da.ok && da.status >= 200 && da.status < 300,
|
||||
data: dat
|
||||
}
|
||||
}
|
||||
@@ -455,17 +455,17 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
|
||||
})
|
||||
|
||||
if(arg.rawResponse){
|
||||
addFetchLog("Uint8Array Response", da.ok)
|
||||
addFetchLog("Uint8Array Response", da.ok && da.status >= 200 && da.status < 300)
|
||||
return {
|
||||
ok: da.ok,
|
||||
ok: da.ok && da.status >= 200 && da.status < 300,
|
||||
data: new Uint8Array(await da.arrayBuffer())
|
||||
}
|
||||
}
|
||||
else{
|
||||
const dat = await da.json()
|
||||
addFetchLog(dat, da.ok)
|
||||
addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300)
|
||||
return {
|
||||
ok: da.ok,
|
||||
ok: da.ok && da.status >= 200 && da.status < 300,
|
||||
data: dat
|
||||
}
|
||||
}
|
||||
@@ -567,9 +567,9 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
|
||||
,signal: arg.abortSignal
|
||||
})
|
||||
|
||||
addFetchLog("Uint8Array Response", da.ok)
|
||||
addFetchLog("Uint8Array Response", da.ok && da.status >= 200 && da.status < 300)
|
||||
return {
|
||||
ok: da.ok,
|
||||
ok: da.ok && da.status >= 200 && da.status < 300,
|
||||
data: new Uint8Array(await da.arrayBuffer())
|
||||
}
|
||||
}
|
||||
@@ -586,9 +586,9 @@ export async function globalFetch(url:string, arg:{body?:any,headers?:{[key:stri
|
||||
})
|
||||
|
||||
const dat = await da.json()
|
||||
addFetchLog(dat, da.ok)
|
||||
addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300)
|
||||
return {
|
||||
ok: da.ok,
|
||||
ok: da.ok && da.status >= 200 && da.status < 300,
|
||||
data: dat
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import type { Tiktoken } from "@dqbd/tiktoken";
|
||||
import type { character } from "./storage/database";
|
||||
import { DataBase, type character } from "./storage/database";
|
||||
import { get } from "svelte/store";
|
||||
import { tokenizeTransformers } from "./transformers/transformer";
|
||||
|
||||
async function encode(data:string):Promise<(number[]|Uint32Array)>{
|
||||
let db = get(DataBase)
|
||||
if(db.aiModel === 'novellist'){
|
||||
return await tokenizeTransformers('naclbit/trin_tokenizer_v3',data)
|
||||
}
|
||||
return await tikJS(data)
|
||||
}
|
||||
|
||||
|
||||
24
src/ts/transformers/transformer.ts
Normal file
24
src/ts/transformers/transformer.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { PreTrainedTokenizer } from "@xenova/transformers"
|
||||
type transformerLibType = typeof import("@xenova/transformers");
|
||||
let tokenizer:PreTrainedTokenizer = null
|
||||
let transformerLib:transformerLibType
|
||||
|
||||
let tokenizerType:string = ''
|
||||
|
||||
|
||||
async function loadTransformers() {
|
||||
if(!transformerLib){
|
||||
transformerLib = await import('@xenova/transformers')
|
||||
}
|
||||
}
|
||||
|
||||
export async function tokenizeTransformers(type:string, text:string) {
|
||||
await loadTransformers()
|
||||
if(tokenizerType !== type){
|
||||
const AutoTokenizer = transformerLib.AutoTokenizer
|
||||
tokenizer = await AutoTokenizer.from_pretrained(type)
|
||||
tokenizerType = type
|
||||
}
|
||||
|
||||
return tokenizer.encode(text)
|
||||
}
|
||||
Reference in New Issue
Block a user