[feat] ooba mode

This commit is contained in:
kwaroran
2023-11-16 04:02:59 +09:00
parent c4efe0b149
commit 714c81017d
9 changed files with 254 additions and 1 deletions

47
src/ts/model/ooba.ts Normal file
View File

@@ -0,0 +1,47 @@
export interface OobaChatCompletionRequestParams {
mode: 'instruct'|'chat'|'chat-instruct'
turn_template?: string
name1_instruct?: string
name2_instruct?: string
context_instruct?: string
system_message?: string
name1?: string
name2?: string
context?: string
greeting?: string
chat_instruct_command?: string
preset?: string; // The '?' denotes that the property is optional
min_p?: number;
top_k?: number;
repetition_penalty?: number;
repetition_penalty_range?: number;
typical_p?: number;
tfs?: number;
top_a?: number;
epsilon_cutoff?: number;
eta_cutoff?: number;
guidance_scale?: number;
negative_prompt?: string;
penalty_alpha?: number;
mirostat_mode?: number;
mirostat_tau?: number;
mirostat_eta?: number;
temperature_last?: boolean;
do_sample?: boolean;
seed?: number;
encoder_repetition_penalty?: number;
no_repeat_ngram_size?: number;
min_length?: number;
num_beams?: number;
length_penalty?: number;
early_stopping?: boolean;
truncation_length?: number;
max_tokens_second?: number;
custom_token_bans?: string;
auto_max_new_tokens?: boolean;
ban_eos_token?: boolean;
add_bos_token?: boolean;
skip_special_tokens?: boolean;
grammar_string?: string;
}

View File

@@ -295,6 +295,18 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
body.user = getOpenUserString()
}
if(aiModel === 'reverse_proxy' && db.reverseProxyOobaMode){
const OobaBodyTemplate = db.reverseProxyOobaArgs
const keys = Object.keys(OobaBodyTemplate)
for(const key of keys){
if(OobaBodyTemplate[key] !== undefined && OobaBodyTemplate[key] !== null){
// @ts-ignore
body[key] = OobaBodyTemplate[key]
}
}
}
if(supportsInlayImage()){
// inlay models doesn't support logit_bias
// @ts-ignore

View File

@@ -11,6 +11,7 @@ import type { NAISettings } from '../process/models/nai';
import { prebuiltNAIpresets, prebuiltPresets } from '../process/templates/templates';
import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme';
import type { Proompt } from '../process/proompt';
import type { OobaChatCompletionRequestParams } from '../model/ooba';
export const DataBase = writable({} as any as Database)
export const loadedStore = writable(false)
@@ -320,6 +321,9 @@ export function setDatabase(data:Database){
data.generationSeed ??= -1
data.newOAIHandle ??= true
data.gptVisionQuality ??= 'low'
data.reverseProxyOobaArgs ??= {
mode: 'instruct'
}
changeLanguage(data.language)
DataBase.set(data)
}
@@ -499,6 +503,8 @@ export interface Database{
putUserOpen: boolean
inlayImage:boolean
gptVisionQuality:string
reverseProxyOobaMode:boolean
reverseProxyOobaArgs: OobaChatCompletionRequestParams
}
export interface customscript{

View File

@@ -17,9 +17,10 @@ async function encode(data:string):Promise<(number[]|Uint32Array|Int32Array)>{
if(db.aiModel.startsWith('novelai')){
return await tokenizeWebTokenizers(data, 'novelai')
}
if(db.aiModel.startsWith('local_') || db.aiModel === 'mancer' || db.aiModel === 'textgen_webui'){
if(db.aiModel.startsWith('local_') || db.aiModel === 'mancer' || db.aiModel === 'textgen_webui' || (db.aiModel === 'reverse_proxy' && db.reverseProxyOobaMode)){
return await tokenizeWebTokenizers(data, 'llama')
}
return await tikJS(data)
}