Add ChatML and Rework display
This commit is contained in:
@@ -28,6 +28,7 @@ import { runImageEmbedding } from "./transformers";
|
||||
import { hanuraiMemory } from "./memory/hanuraiMemory";
|
||||
import { hypaMemoryV2 } from "./memory/hypav2";
|
||||
import { runLuaEditTrigger } from "./lua";
|
||||
import { parseChatML } from "../parser.svelte";
|
||||
|
||||
export interface OpenAIChat{
|
||||
role: 'system'|'user'|'assistant'|'function'
|
||||
@@ -548,6 +549,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
||||
await tokenizeChatArray([prompt])
|
||||
break
|
||||
}
|
||||
case 'chatML':{
|
||||
let prompts = parseChatML(card.text)
|
||||
await tokenizeChatArray(prompts)
|
||||
break
|
||||
}
|
||||
case 'chat':{
|
||||
let start = card.rangeStart
|
||||
let end = (card.rangeEnd === 'end') ? unformated.chats.length : card.rangeEnd
|
||||
@@ -1007,6 +1013,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{
|
||||
pushPrompts([prompt])
|
||||
break
|
||||
}
|
||||
case 'chatML':{
|
||||
let prompts = parseChatML(card.text)
|
||||
pushPrompts(prompts)
|
||||
break
|
||||
}
|
||||
case 'chat':{
|
||||
let start = card.rangeStart
|
||||
let end = (card.rangeEnd === 'end') ? unformated.chats.length : card.rangeEnd
|
||||
|
||||
@@ -4,7 +4,7 @@ import { getDatabase, presetTemplate, setDatabase, type Database } from "../stor
|
||||
import { alertError, alertNormal } from "../alert";
|
||||
import type { OobaChatCompletionRequestParams } from "../model/ooba";
|
||||
|
||||
export type PromptItem = PromptItemPlain|PromptItemTyped|PromptItemChat|PromptItemAuthorNote;
|
||||
export type PromptItem = PromptItemPlain|PromptItemTyped|PromptItemChat|PromptItemAuthorNote|PromptItemChatML
|
||||
export type PromptType = PromptItem['type'];
|
||||
export type PromptSettings = {
|
||||
assistantPrefill: string
|
||||
@@ -21,17 +21,26 @@ export interface PromptItemPlain {
|
||||
type2: 'normal'|'globalNote'|'main'
|
||||
text: string;
|
||||
role: 'user'|'bot'|'system';
|
||||
name?: string
|
||||
}
|
||||
|
||||
export interface PromptItemChatML {
|
||||
type: 'chatML'
|
||||
text: string
|
||||
name?: string
|
||||
}
|
||||
|
||||
export interface PromptItemTyped {
|
||||
type: 'persona'|'description'|'lorebook'|'postEverything'|'memory'
|
||||
innerFormat?: string
|
||||
innerFormat?: string,
|
||||
name?: string
|
||||
}
|
||||
|
||||
export interface PromptItemAuthorNote {
|
||||
type : 'authornote'
|
||||
innerFormat?: string
|
||||
defaultText?: string
|
||||
name?: string
|
||||
}
|
||||
|
||||
|
||||
@@ -40,6 +49,7 @@ export interface PromptItemChat {
|
||||
rangeStart: number;
|
||||
rangeEnd: number|'end';
|
||||
chatAsOriginalOnSystem?: boolean;
|
||||
name?: string
|
||||
}
|
||||
|
||||
export async function tokenizePreset(prompts:PromptItem[], consti:boolean = false){
|
||||
|
||||
Reference in New Issue
Block a user