diff --git a/src/lib/Setting/Pages/PromptSettings.svelte b/src/lib/Setting/Pages/PromptSettings.svelte index 19fa588e..ccb065fe 100644 --- a/src/lib/Setting/Pages/PromptSettings.svelte +++ b/src/lib/Setting/Pages/PromptSettings.svelte @@ -1,13 +1,13 @@ + +
+ + + + + + {language.type} + + { + if(promptItem.type === 'plain' || promptItem.type === 'jailbreak' || promptItem.type === 'cot'){ + promptItem.text = "" + promptItem.role = "system" + } + if(promptItem.type === 'chat'){ + promptItem.rangeStart = -2 + promptItem.rangeEnd = 'end' + } + }} > + {language.formating.plain} + {language.formating.jailbreak} + {language.Chat} + {language.formating.personaPrompt} + {language.formating.description} + {language.formating.authorNote} + {language.formating.lorebook} + {language.formating.memory} + {language.formating.postEverything} + {#if $DataBase.promptSettings.customChainOfThought} + {language.cot} + {/if} + + + {#if promptItem.type === 'plain' || promptItem.type === 'jailbreak' || promptItem.type === 'cot'} + {language.specialType} + + {language.noSpecialType} + {language.mainPrompt} + {language.globalNote} + + {language.prompt} + + {language.role} + + {language.user} + {language.character} + {language.systemPrompt} + + {/if} + {#if promptItem.type === 'chat'} + + + + {#if promptItem.rangeStart !== -2} + {language.rangeStart} + + {language.rangeEnd} + {#if promptItem.rangeEnd === 'end'} + { + if(promptItem.type === 'chat'){ + promptItem.rangeEnd = 0 + } + }} /> + {:else} + + { + if(promptItem.type === 'chat'){ + promptItem.rangeEnd = 'end' + } + }} /> + {/if} + {#if $DataBase.promptSettings.sendChatAsSystem} + + {/if} + {/if} + {/if} + {#if promptItem.type === 'authornote'} + {language.defaultPrompt} + + {/if} + {#if promptItem.type === 'persona' || promptItem.type === 'description' || promptItem.type === 'authornote' || promptItem.type === 'memory'} + {#if !promptItem.innerFormat} + { + if(promptItem.type === 'persona' || promptItem.type === 'description' || promptItem.type === 'authornote' || promptItem.type === 'memory'){ + promptItem.innerFormat = "{{slot}}" + } + }} /> + {:else} + {language.innerFormat} + + { + if(promptItem.type === 'persona' || promptItem.type === 'description' || promptItem.type === 'authornote' || promptItem.type === 'memory'){ + promptItem.innerFormat = null + } + }} /> + {/if} + {/if} +
\ No newline at end of file diff --git a/src/lib/UI/ProomptItem.svelte b/src/lib/UI/ProomptItem.svelte deleted file mode 100644 index d862fcca..00000000 --- a/src/lib/UI/ProomptItem.svelte +++ /dev/null @@ -1,110 +0,0 @@ - - -
- - - - - - {language.type} - - { - if(proompt.type === 'plain' || proompt.type === 'jailbreak' || proompt.type === 'cot'){ - proompt.text = "" - proompt.role = "system" - } - if(proompt.type === 'chat'){ - proompt.rangeStart = 0 - proompt.rangeEnd = 'end' - } - }} > - {language.formating.plain} - {language.formating.jailbreak} - {language.Chat} - {language.formating.personaPrompt} - {language.formating.description} - {language.formating.authorNote} - {language.formating.lorebook} - {language.formating.memory} - {language.formating.postEverything} - {#if $DataBase.proomptSettings.customChainOfThought} - {language.cot} - {/if} - - - {#if proompt.type === 'plain' || proompt.type === 'jailbreak' || proompt.type === 'cot'} - {language.specialType} - - {language.noSpecialType} - {language.mainPrompt} - {language.globalNote} - - {language.prompt} - - {language.role} - - {language.user} - {language.character} - {language.systemPrompt} - - {/if} - {#if proompt.type === 'chat'} - {language.rangeStart} - - {language.rangeEnd} - {#if proompt.rangeEnd === 'end'} - { - if(proompt.type === 'chat'){ - proompt.rangeEnd = 0 - } - }} /> - {:else} - - { - if(proompt.type === 'chat'){ - proompt.rangeEnd = 'end' - } - }} /> - {/if} - {#if $DataBase.proomptSettings.sendChatAsSystem} - - {/if} - {/if} - {#if proompt.type === 'authornote'} - {language.defaultPrompt} - - {/if} - {#if proompt.type === 'persona' || proompt.type === 'description' || proompt.type === 'authornote' || proompt.type === 'memory'} - {#if !proompt.innerFormat} - { - if(proompt.type === 'persona' || proompt.type === 'description' || proompt.type === 'authornote' || proompt.type === 'memory'){ - proompt.innerFormat = "{{slot}}" - } - }} /> - {:else} - {language.innerFormat} - - { - if(proompt.type === 'persona' || proompt.type === 'description' || proompt.type === 'authornote' || proompt.type === 'memory'){ - proompt.innerFormat = null - } - }} /> - {/if} - {/if} -
\ No newline at end of file diff --git a/src/ts/process/index.ts b/src/ts/process/index.ts index cef757ab..af56b3c5 100644 --- a/src/ts/process/index.ts +++ b/src/ts/process/index.ts @@ -224,7 +224,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n }) } } - if(currentChar.utilityBot && (!(usingPromptTemplate && db.proomptSettings.utilOverride))){ + if(currentChar.utilityBot && (!(usingPromptTemplate && db.promptSettings.utilOverride))){ promptTemplate = [ { "type": "plain", @@ -300,7 +300,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n }) } - if(db.chainOfThought && (!(usingPromptTemplate && db.proomptSettings.customChainOfThought))){ + if(db.chainOfThought && (!(usingPromptTemplate && db.promptSettings.customChainOfThought))){ unformated.postEverything.push({ role: 'system', content: ` - before respond everything, Think step by step as a ai assistant how would you respond inside xml tag. this must be less than 5 paragraphs.` @@ -432,10 +432,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n } case 'postEverything':{ await tokenizeChatArray(unformated.postEverything) - if(usingPromptTemplate && db.proomptSettings.postEndInnerFormat){ + if(usingPromptTemplate && db.promptSettings.postEndInnerFormat){ await tokenizeChatArray([{ role: 'system', - content: db.proomptSettings.postEndInnerFormat + content: db.promptSettings.postEndInnerFormat }]) } break @@ -497,7 +497,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n } let chats = unformated.chats.slice(start, end) - if(usingPromptTemplate && db.proomptSettings.sendChatAsSystem && (!card.chatAsOriginalOnSystem)){ + if(usingPromptTemplate && db.promptSettings.sendChatAsSystem && (!card.chatAsOriginalOnSystem)){ chats = systemizeChat(chats) } await tokenizeChatArray(chats) @@ -545,7 +545,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n 'editprocess')) } - if(usingPromptTemplate && db.proomptSettings.sendName){ + if(usingPromptTemplate && db.promptSettings.sendName){ chat.content = `${currentChar.name}: ${chat.content}` chat.attr = ['nameAdded'] } @@ -622,13 +622,13 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n let attr:string[] = [] - if(nowChatroom.type === 'group' || (usingPromptTemplate && db.proomptSettings.sendName)){ + if(nowChatroom.type === 'group' || (usingPromptTemplate && db.promptSettings.sendName)){ formatedChat = name + ': ' + formatedChat attr.push('nameAdded') } - if(usingPromptTemplate && db.proomptSettings.customChainOfThought && db.proomptSettings.maxThoughtTagDepth !== -1){ + if(usingPromptTemplate && db.promptSettings.customChainOfThought && db.promptSettings.maxThoughtTagDepth !== -1){ const depth = ms.length - index - if(depth >= db.proomptSettings.maxThoughtTagDepth){ + if(depth >= db.promptSettings.maxThoughtTagDepth){ formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') } } @@ -837,10 +837,10 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n } case 'postEverything':{ pushPrompts(unformated.postEverything) - if(usingPromptTemplate && db.proomptSettings.postEndInnerFormat){ + if(usingPromptTemplate && db.promptSettings.postEndInnerFormat){ pushPrompts([{ role: 'system', - content: db.proomptSettings.postEndInnerFormat + content: db.promptSettings.postEndInnerFormat }]) } break @@ -902,7 +902,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n } let chats = unformated.chats.slice(start, end) - if(usingPromptTemplate && db.proomptSettings.sendChatAsSystem && (!card.chatAsOriginalOnSystem)){ + if(usingPromptTemplate && db.promptSettings.sendChatAsSystem && (!card.chatAsOriginalOnSystem)){ chats = systemizeChat(chats) } pushPrompts(chats) diff --git a/src/ts/process/proompt.ts b/src/ts/process/prompt.ts similarity index 61% rename from src/ts/process/proompt.ts rename to src/ts/process/prompt.ts index 2c8c8f09..1989d009 100644 --- a/src/ts/process/proompt.ts +++ b/src/ts/process/prompt.ts @@ -1,8 +1,8 @@ import { tokenizeAccurate } from "../tokenizer"; -export type Proompt = ProomptPlain|ProomptTyped|ProomptChat|ProomptAuthorNote; -export type ProomptType = Proompt['type']; -export type ProomptSettings = { +export type PromptItem = PromptItemPlain|PromptItemTyped|PromptItemChat|PromptItemAuthorNote; +export type PromptType = PromptItem['type']; +export type PromptSettings = { assistantPrefill: string postEndInnerFormat: string sendChatAsSystem: boolean @@ -12,39 +12,39 @@ export type ProomptSettings = { maxThoughtTagDepth?: number } -export interface ProomptPlain { +export interface PromptItemPlain { type: 'plain'|'jailbreak'|'cot'; type2: 'normal'|'globalNote'|'main' text: string; role: 'user'|'bot'|'system'; } -export interface ProomptTyped { +export interface PromptItemTyped { type: 'persona'|'description'|'lorebook'|'postEverything'|'memory' innerFormat?: string } -export interface ProomptAuthorNote { +export interface PromptItemAuthorNote { type : 'authornote' innerFormat?: string defaultText?: string } -export interface ProomptChat { +export interface PromptItemChat { type: 'chat'; rangeStart: number; rangeEnd: number|'end'; chatAsOriginalOnSystem?: boolean; } -export async function tokenizePreset(proompts:Proompt[], consti:boolean = false){ +export async function tokenizePreset(prompts:PromptItem[], consti:boolean = false){ let total = 0 - for(const proompt of proompts){ - switch(proompt.type){ + for(const prompt of prompts){ + switch(prompt.type){ case 'plain': case 'jailbreak':{ - total += await tokenizeAccurate(proompt.text, consti) + total += await tokenizeAccurate(prompt.text, consti) break } case 'persona': @@ -53,8 +53,8 @@ export async function tokenizePreset(proompts:Proompt[], consti:boolean = false) case 'postEverything': case 'authornote': case 'memory':{ - if(proompt.innerFormat){ - total += await tokenizeAccurate(proompt.innerFormat, consti) + if(prompt.innerFormat){ + total += await tokenizeAccurate(prompt.innerFormat, consti) } break } diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 1663683f..da3efd61 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -742,7 +742,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' case 'novelai': case 'novelai_kayra':{ console.log(arg.continue) - const proompt = stringlizeNAIChat(formated, currentChar?.name ?? '', arg.continue) + const prompt = stringlizeNAIChat(formated, currentChar?.name ?? '', arg.continue) let logit_bias_exp:{ sequence: number[], bias: number, ensure_sequence_finish: false, generate_once: true }[] = [] @@ -805,7 +805,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' const body = { - "input": proompt, + "input": prompt, "model": aiModel === 'novelai_kayra' ? 'kayra-v1' : 'clio-v1', "parameters":payload } @@ -887,7 +887,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' let blockingUrl = db.textgenWebUIBlockingURL.replace(/\/api.*/, "/api/v1/generate") let bodyTemplate:any const suggesting = model === "submodel" - const proompt = applyChatTemplate(formated) + const prompt = applyChatTemplate(formated) let stopStrings = getStopStrings(suggesting) if(db.localStopStrings){ stopStrings = db.localStopStrings.map((v) => { @@ -915,7 +915,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' 'seed': -1, add_bos_token: db.ooba.add_bos_token, topP: db.top_p, - prompt: proompt + prompt: prompt } const headers = (aiModel === 'textgen_webui') ? {} : { @@ -1006,7 +1006,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' case 'ooba': { const suggesting = model === "submodel" - const proompt = applyChatTemplate(formated) + const prompt = applyChatTemplate(formated) let stopStrings = getStopStrings(suggesting) if(db.localStopStrings){ stopStrings = db.localStopStrings.map((v) => { @@ -1014,7 +1014,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' }) } let bodyTemplate:Record = { - 'prompt': proompt, + 'prompt': prompt, presence_penalty: arg.PresensePenalty || (db.PresensePenalty / 100), frequency_penalty: arg.frequencyPenalty || (db.frequencyPenalty / 100), logit_bias: {}, @@ -1355,7 +1355,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' } case "kobold":{ - const proompt = stringlizeChat(formated, currentChar?.name ?? '', arg.continue) + const prompt = stringlizeChat(formated, currentChar?.name ?? '', arg.continue) const url = new URL(db.koboldURL) if(url.pathname.length < 3){ url.pathname = 'api/v1/generate' @@ -1364,7 +1364,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' const da = await globalFetch(url.toString(), { method: "POST", body: { - "prompt": proompt, + "prompt": prompt, "temperature": (db.temperature / 100), "top_p": 0.9 }, @@ -2199,12 +2199,12 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' } if(aiModel.startsWith("horde:::")){ - const proompt = stringlizeChat(formated, currentChar?.name ?? '', arg.continue) + const prompt = stringlizeChat(formated, currentChar?.name ?? '', arg.continue) const realModel = aiModel.split(":::")[1] const argument = { - "prompt": proompt, + "prompt": prompt, "params": { "n": 1, "max_context_length": db.maxContext + 100, @@ -2292,8 +2292,8 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' if(aiModel.startsWith('hf:::')){ const realModel = aiModel.split(":::")[1] const suggesting = model === "submodel" - const proompt = applyChatTemplate(formated) - const v = await runTransformers(proompt, realModel, { + const prompt = applyChatTemplate(formated) + const v = await runTransformers(prompt, realModel, { temperature: temperature, max_new_tokens: maxTokens, top_k: db.ooba.top_k, @@ -2309,12 +2309,12 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' if(aiModel.startsWith('local_')){ console.log('running local model') const suggesting = model === "submodel" - const proompt = applyChatTemplate(formated) + const prompt = applyChatTemplate(formated) const stopStrings = getStopStrings(suggesting) console.log(stopStrings) const modelPath = aiModel.replace('local_', '') const res = await runGGUFModel({ - prompt: proompt, + prompt: prompt, modelPath: modelPath, temperature: temperature, top_p: db.top_p, diff --git a/src/ts/process/stableDiff.ts b/src/ts/process/stableDiff.ts index 79a40dba..d7dd9e7a 100644 --- a/src/ts/process/stableDiff.ts +++ b/src/ts/process/stableDiff.ts @@ -15,7 +15,7 @@ export async function stableDiff(currentChar:character,prompt:string){ } - const proompt = `Chat:\n${prompt}` + const promptItem = `Chat:\n${prompt}` const promptbody:OpenAIChat[] = [ { @@ -25,7 +25,7 @@ export async function stableDiff(currentChar:character,prompt:string){ }, { role: 'user', - content: proompt + content: promptItem }, ] diff --git a/src/ts/process/templates/templates.ts b/src/ts/process/templates/templates.ts index f454036f..6eedac2b 100644 --- a/src/ts/process/templates/templates.ts +++ b/src/ts/process/templates/templates.ts @@ -780,7 +780,7 @@ export const prebuiltPresets:{OAI:botPreset,ooba:botPreset,NAI:botPreset,oobaRp: "mode": "instruct" }, "top_p": 1, - "proomptSettings": { + "promptSettings": { "assistantPrefill": "", "postEndInnerFormat": "", "sendChatAsSystem": false, diff --git a/src/ts/storage/database.ts b/src/ts/storage/database.ts index c5a6b3b7..1cc646a7 100644 --- a/src/ts/storage/database.ts +++ b/src/ts/storage/database.ts @@ -10,7 +10,7 @@ import { alertNormal, alertSelect } from '../alert'; import type { NAISettings } from '../process/models/nai'; import { prebuiltNAIpresets, prebuiltPresets } from '../process/templates/templates'; import { defaultColorScheme, type ColorScheme } from '../gui/colorscheme'; -import type { Proompt, ProomptSettings } from '../process/proompt'; +import type { PromptItem, PromptSettings } from '../process/prompt'; import type { OobaChatCompletionRequestParams } from '../model/ooba'; export const DataBase = writable({} as any as Database) @@ -361,7 +361,7 @@ export function setDatabase(data:Database){ data.google.accessToken ??= '' data.google.projectId ??= '' data.genTime ??= 1 - data.proomptSettings ??= { + data.promptSettings ??= { assistantPrefill: '', postEndInnerFormat: '', sendChatAsSystem: false, @@ -372,7 +372,7 @@ export function setDatabase(data:Database){ } data.keiServerURL ??= '' data.top_k ??= 0 - data.proomptSettings.maxThoughtTagDepth ??= -1 + data.promptSettings.maxThoughtTagDepth ??= -1 data.openrouterFallback ??= true data.openrouterMiddleOut ??= false data.removePunctuationHypa ??= true @@ -568,7 +568,7 @@ export interface Database{ hideRealm:boolean colorScheme:ColorScheme colorSchemeName:string - promptTemplate?:Proompt[] + promptTemplate?:PromptItem[] forceProxyAsOpenAI?:boolean hypaModel:'ada'|'MiniLM' saveTime?:number @@ -609,7 +609,7 @@ export interface Database{ mistralKey?:string chainOfThought?:boolean genTime:number - proomptSettings: ProomptSettings + promptSettings: PromptSettings keiServerURL:string statistics: { newYear2024?: { @@ -840,14 +840,14 @@ export interface botPreset{ autoSuggestPrompt?: string autoSuggestPrefix?: string autoSuggestClean?: boolean - promptTemplate?:Proompt[] + promptTemplate?:PromptItem[] NAIadventure?: boolean NAIappendName?: boolean localStopStrings?: string[] customProxyRequestModel?: string reverseProxyOobaArgs?: OobaChatCompletionRequestParams top_p?: number - proomptSettings?: ProomptSettings + promptSettings?: PromptSettings repetition_penalty?:number min_p?:number top_a?:number @@ -1113,7 +1113,7 @@ export function saveCurrentPreset(){ customProxyRequestModel: db.customProxyRequestModel, reverseProxyOobaArgs: cloneDeep(db.reverseProxyOobaArgs) ?? null, top_p: db.top_p ?? 1, - proomptSettings: cloneDeep(db.proomptSettings) ?? null, + promptSettings: cloneDeep(db.promptSettings) ?? null, repetition_penalty: db.repetition_penalty, min_p: db.min_p, top_a: db.top_a, @@ -1188,7 +1188,8 @@ export function setPreset(db:Database, newPres: botPreset){ mode: 'instruct' } db.top_p = newPres.top_p ?? 1 - db.proomptSettings = cloneDeep(newPres.proomptSettings) ?? { + //@ts-ignore //for legacy mistpings + db.promptSettings = cloneDeep(newPres.promptSettings) ?? cloneDeep(newPres.proomptSettings) ?? { assistantPrefill: '', postEndInnerFormat: '', sendChatAsSystem: false, diff --git a/src/ts/storage/globalApi.ts b/src/ts/storage/globalApi.ts index 24b19186..5330725f 100644 --- a/src/ts/storage/globalApi.ts +++ b/src/ts/storage/globalApi.ts @@ -960,6 +960,8 @@ async function checkNewFormat() { if(db.mainPrompt === oldJailbreak){ db.mainPrompt = defaultJailbreak } + //@ts-ignore + if(db.proomptSettings){ db.promptSettings = db.proomptSettingsdelete; delete db.proomptSettings } setDatabase(db) checkCharOrder()