add chatml related
This commit is contained in:
@@ -2480,4 +2480,56 @@ export function applyMarkdownToNode(node: Node) {
|
|||||||
applyMarkdownToNode(child);
|
applyMarkdownToNode(child);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseChatML(data:string):OpenAIChat[]|null{
|
||||||
|
|
||||||
|
const starter = '<|im_start|>'
|
||||||
|
const seperator = '<|im_sep|>'
|
||||||
|
const ender = '<|im_end|>'
|
||||||
|
const trimedData = data.trim()
|
||||||
|
if(!trimedData.startsWith(starter)){
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return trimedData.split(starter).filter((f) => f !== '').map((v) => {
|
||||||
|
let role:'system'|'user'|'assistant' = 'user'
|
||||||
|
//default separators
|
||||||
|
if(v.startsWith('user' + seperator)){
|
||||||
|
role = 'user'
|
||||||
|
v = v.substring(4 + seperator.length)
|
||||||
|
}
|
||||||
|
else if(v.startsWith('system' + seperator)){
|
||||||
|
role = 'system'
|
||||||
|
v = v.substring(6 + seperator.length)
|
||||||
|
}
|
||||||
|
else if(v.startsWith('assistant' + seperator)){
|
||||||
|
role = 'system'
|
||||||
|
v = v.substring(9 + seperator.length)
|
||||||
|
}
|
||||||
|
//space/newline separators
|
||||||
|
else if(v.startsWith('user ') || v.startsWith('user\n')){
|
||||||
|
role = 'user'
|
||||||
|
v = v.substring(5)
|
||||||
|
}
|
||||||
|
else if(v.startsWith('system ') || v.startsWith('system\n')){
|
||||||
|
role = 'system'
|
||||||
|
v = v.substring(7)
|
||||||
|
}
|
||||||
|
else if(v.startsWith('assistant ') || v.startsWith('assistant\n')){
|
||||||
|
role = 'assistant'
|
||||||
|
v = v.substring(10)
|
||||||
|
}
|
||||||
|
|
||||||
|
v = v.trim()
|
||||||
|
|
||||||
|
if(v.endsWith(ender)){
|
||||||
|
v = v.substring(0, v.length - ender.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
role: role,
|
||||||
|
content: v
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { risuChatParser, risuCommandParser } from "../parser";
|
import { parseChatML, risuChatParser, risuCommandParser } from "../parser";
|
||||||
import { DataBase, type Chat, type character } from "../storage/database";
|
import { DataBase, type Chat, type character } from "../storage/database";
|
||||||
import { tokenize } from "../tokenizer";
|
import { tokenize } from "../tokenizer";
|
||||||
import { getModuleTriggers } from "./modules";
|
import { getModuleTriggers } from "./modules";
|
||||||
@@ -426,41 +426,10 @@ export async function runTrigger(char:character,mode:triggerMode, arg:{
|
|||||||
}
|
}
|
||||||
const effectValue = risuChatParser(effect.value,{chara:char})
|
const effectValue = risuChatParser(effect.value,{chara:char})
|
||||||
const varName = effect.inputVar
|
const varName = effect.inputVar
|
||||||
let promptbody:OpenAIChat[] = []
|
let promptbody:OpenAIChat[] = parseChatML(effectValue)
|
||||||
let currentRole:'user'|'assistant'|'system'
|
if(!promptbody){
|
||||||
|
promptbody = [{role:'user', content:effectValue}]
|
||||||
const splited = effectValue.split('\n')
|
|
||||||
|
|
||||||
for(let i = 0; i < splited.length; i++){
|
|
||||||
const line = splited[i]
|
|
||||||
if(line.startsWith('@@role ')){
|
|
||||||
const role = line.split(' ')[1]
|
|
||||||
switch(role){
|
|
||||||
case 'user':
|
|
||||||
case 'assistant':
|
|
||||||
case 'system':
|
|
||||||
currentRole = role
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
currentRole = 'system'
|
|
||||||
break
|
|
||||||
}
|
|
||||||
promptbody.push({role: currentRole, content: ''})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
else if(promptbody.length === 0){
|
|
||||||
promptbody.push({role: 'system', content: line})
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
promptbody[promptbody.length - 1].content += line
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
promptbody = promptbody.map((e) => {
|
|
||||||
e.content = e.content.trim()
|
|
||||||
return e
|
|
||||||
}).filter((e) => e.content.length > 0)
|
|
||||||
|
|
||||||
const result = await requestChatData({
|
const result = await requestChatData({
|
||||||
formated: promptbody,
|
formated: promptbody,
|
||||||
bias: {},
|
bias: {},
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import { DataBase, type character, type customscript, type groupChat } from "../
|
|||||||
import { globalFetch, isTauri } from "../storage/globalApi"
|
import { globalFetch, isTauri } from "../storage/globalApi"
|
||||||
import { alertError } from "../alert"
|
import { alertError } from "../alert"
|
||||||
import { requestChatData } from "../process/request"
|
import { requestChatData } from "../process/request"
|
||||||
import { doingChat } from "../process"
|
import { doingChat, OpenAIChat } from "../process"
|
||||||
import { applyMarkdownToNode, type simpleCharacterArgument } from "../parser"
|
import { applyMarkdownToNode, parseChatML, type simpleCharacterArgument } from "../parser"
|
||||||
import { selectedCharID } from "../stores"
|
import { selectedCharID } from "../stores"
|
||||||
import { getModuleRegexScripts } from "../process/modules"
|
import { getModuleRegexScripts } from "../process/modules"
|
||||||
import { getNodetextToSentence, sleep } from "../util"
|
import { getNodetextToSentence, sleep } from "../util"
|
||||||
@@ -449,10 +449,15 @@ async function translateLLM(text:string, arg:{to:string}){
|
|||||||
return llmCache.get(text)
|
return llmCache.get(text)
|
||||||
}
|
}
|
||||||
const db = get(DataBase)
|
const db = get(DataBase)
|
||||||
|
let formated:OpenAIChat[] = []
|
||||||
let prompt = db.translatorPrompt || `You are a translator. translate the following html or text into {{slot}}. do not output anything other than the translation.`
|
let prompt = db.translatorPrompt || `You are a translator. translate the following html or text into {{slot}}. do not output anything other than the translation.`
|
||||||
prompt = prompt.replace('{{slot}}', arg.to)
|
let parsedPrompt = parseChatML(prompt.replaceAll('{{slot}}', arg.to).replaceAll('{{solt::content}}', text))
|
||||||
const rq = await requestChatData({
|
if(parsedPrompt){
|
||||||
formated: [
|
formated = parsedPrompt
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
prompt = prompt.replaceAll('{{slot}}', arg.to)
|
||||||
|
formated = [
|
||||||
{
|
{
|
||||||
'role': 'system',
|
'role': 'system',
|
||||||
'content': prompt
|
'content': prompt
|
||||||
@@ -461,7 +466,10 @@ async function translateLLM(text:string, arg:{to:string}){
|
|||||||
'role': 'user',
|
'role': 'user',
|
||||||
'content': text
|
'content': text
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
|
}
|
||||||
|
const rq = await requestChatData({
|
||||||
|
formated,
|
||||||
bias: {},
|
bias: {},
|
||||||
useStreaming: false,
|
useStreaming: false,
|
||||||
noMultiGen: true,
|
noMultiGen: true,
|
||||||
|
|||||||
Reference in New Issue
Block a user