[feat] continue response

This commit is contained in:
kwaroran
2023-08-17 17:00:10 +09:00
parent 64ee71e2c6
commit 4a66a585f7
9 changed files with 294 additions and 214 deletions

View File

@@ -36,7 +36,7 @@ export interface OpenAIChatFull extends OpenAIChat{
export const doingChat = writable(false)
export const abortChat = writable(false)
export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:number,signal?:AbortSignal} = {}):Promise<boolean> {
export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:number,signal?:AbortSignal,continue?:boolean} = {}):Promise<boolean> {
const abortSignal = arg.signal ?? (new AbortController()).signal
@@ -515,6 +515,14 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
formatOrder.push('postEverything')
}
//continue chat model
if(arg.continue && (db.aiModel.startsWith('claude') || db.aiModel.startsWith('gpt') || db.aiModel.startsWith('openrouter') || db.aiModel.startsWith('reverse_proxy'))){
unformated.postEverything.push({
role: 'system',
content: '[Continue the last response]'
})
}
function pushPrompts(cha:OpenAIChat[]){
for(const chat of cha){
@@ -670,19 +678,26 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
else if(req.type === 'streaming'){
const reader = req.result.getReader()
const msgIndex = db.characters[selectedChar].chats[selectedChat].message.length
let msgIndex = db.characters[selectedChar].chats[selectedChat].message.length
let prefix = ''
if(arg.continue){
msgIndex -= 1
prefix = db.characters[selectedChar].chats[selectedChat].message[msgIndex].data
}
else{
db.characters[selectedChar].chats[selectedChat].message.push({
role: 'char',
data: "",
saying: currentChar.chaId,
time: Date.now()
})
}
db.characters[selectedChar].chats[selectedChat].isStreaming = true
db.characters[selectedChar].chats[selectedChat].message.push({
role: 'char',
data: "",
saying: currentChar.chaId,
time: Date.now()
})
while(abortSignal.aborted === false){
const readed = (await reader.read())
if(readed.value){
result = readed.value
const result2 = processScriptFull(nowChatroom, reformatContent(result), 'editoutput', msgIndex)
const result2 = processScriptFull(nowChatroom, reformatContent(prefix + result), 'editoutput', msgIndex)
db.characters[selectedChar].chats[selectedChat].message[msgIndex].data = result2.data
emoChanged = result2.emoChanged
db.characters[selectedChar].reloadKeys += 1
@@ -709,17 +724,33 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
const msgs = (req.type === 'success') ? [['char',req.result]] as const
: (req.type === 'multiline') ? req.result
: []
for(const msg of msgs){
const msgIndex = db.characters[selectedChar].chats[selectedChat].message.length
const result2 = processScriptFull(nowChatroom, reformatContent(msg[1]), 'editoutput', msgIndex)
for(let i=0;i<msgs.length;i++){
const msg = msgs[i]
let msgIndex = db.characters[selectedChar].chats[selectedChat].message.length
let result2 = processScriptFull(nowChatroom, reformatContent(msg[1]), 'editoutput', msgIndex)
if(i === 0 && arg.continue){
msgIndex -= 1
let beforeChat = db.characters[selectedChar].chats[selectedChat].message[msgIndex]
result2 = processScriptFull(nowChatroom, reformatContent(beforeChat.data + msg[1]), 'editoutput', msgIndex)
}
result = result2.data
emoChanged = result2.emoChanged
db.characters[selectedChar].chats[selectedChat].message.push({
role: msg[0],
data: result,
saying: currentChar.chaId,
time: Date.now()
})
if(i === 0 && arg.continue){
db.characters[selectedChar].chats[selectedChat].message[msgIndex] = {
role: 'char',
data: result,
saying: currentChar.chaId,
time: Date.now()
}
}
else{
db.characters[selectedChar].chats[selectedChat].message.push({
role: msg[0],
data: result,
saying: currentChar.chaId,
time: Date.now()
})
}
db.characters[selectedChar].reloadKeys += 1
await sayTTS(currentChar, result)
setDatabase(db)

View File

@@ -1,7 +1,7 @@
import { invoke } from "@tauri-apps/api/tauri";
import { globalFetch } from "src/ts/storage/globalApi";
import { sleep } from "src/ts/util";
import path from "@tauri-apps/api/path";
import * as path from "@tauri-apps/api/path";
import { exists } from "@tauri-apps/api/fs";
import { alertClear, alertError, alertMd, alertWait } from "src/ts/alert";
import { get } from "svelte/store";

View File

@@ -5,7 +5,7 @@ import { globalFetch } from "src/ts/storage/globalApi"
import { alertError, alertInput, alertNormal, alertWait } from "src/ts/alert"
import { sleep } from "src/ts/util"
export function stringlizeNAIChat(formated:OpenAIChat[], char:string = ''){
export function stringlizeNAIChat(formated:OpenAIChat[], char:string, continued: boolean){
const db = get(DataBase)
@@ -35,7 +35,12 @@ export function stringlizeNAIChat(formated:OpenAIChat[], char:string = ''){
}
}
return resultString.join(seperator) + `\n\n${char}:`
let res = resultString.join(seperator)
if(!continued){
res += `\n\n${char}:`
}
return res
}
export const novelLogin = async () => {

View File

@@ -23,6 +23,7 @@ interface requestDataArgument{
useStreaming?:boolean
isGroupChat?:boolean
useEmotion?:boolean
continue?:boolean
}
type requestDataResponse = {
@@ -90,6 +91,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
let temperature = arg.temperature ?? (db.temperature / 100)
let bias = arg.bias
let currentChar = arg.currentChar
arg.continue = arg.continue ?? false
let biasString = arg.biasString ?? []
const aiModel = (model === 'model' || (!db.advancedBotSettings)) ? db.aiModel : db.subModel
@@ -356,7 +358,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
}
case 'novelai':
case 'novelai_kayra':{
const proompt = stringlizeNAIChat(formated, currentChar?.name ?? '')
const proompt = stringlizeNAIChat(formated, currentChar?.name ?? '', arg.continue)
let logit_bias_exp:{
sequence: number[], bias: number, ensure_sequence_finish: false, generate_once: true
}[] = []
@@ -439,7 +441,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
let blockingUrl = db.textgenWebUIBlockingURL.replace(/\/api.*/, "/api/v1/generate")
let bodyTemplate:any
const suggesting = model === "submodel"
const proompt = stringlizeChatOba(formated, currentChar.name, suggesting)
const proompt = stringlizeChatOba(formated, currentChar.name, suggesting, arg.continue)
const stopStrings = getStopStrings(suggesting)
console.log(proompt)
console.log(stopStrings)
@@ -583,7 +585,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
case 'palm2':{
const body = {
"prompt": {
"text": stringlizeChat(formated, currentChar?.name ?? '')
"text": stringlizeChat(formated, currentChar?.name ?? '', arg.continue)
},
"safetySettings":[
{
@@ -654,7 +656,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
}
}
case "kobold":{
const proompt = stringlizeChat(formated, currentChar?.name ?? '')
const proompt = stringlizeChat(formated, currentChar?.name ?? '', arg.continue)
const url = new URL(db.koboldURL)
if(url.pathname.length < 3){
url.pathname = 'api/v1/generate'
@@ -704,7 +706,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
};
const send_body = {
text: stringlizeAINChat(formated, currentChar?.name ?? ''),
text: stringlizeAINChat(formated, currentChar?.name ?? '', arg.continue),
length: maxTokens,
temperature: temperature,
top_p: db.ainconfig.top_p,
@@ -764,7 +766,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const response = await createDeep([{
role: 'user',
content: stringlizeChat(formated, currentChar?.name ?? '')
content: stringlizeChat(formated, currentChar?.name ?? '', arg.continue)
}])
if(!response.ok){
@@ -862,7 +864,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
}
if(aiModel.startsWith("horde:::")){
const proompt = stringlizeChat(formated, currentChar?.name ?? '')
const proompt = stringlizeChat(formated, currentChar?.name ?? '', arg.continue)
const realModel = aiModel.split(":::")[1]

View File

@@ -7,7 +7,7 @@ export function multiChatReplacer(){
}
export function stringlizeChat(formated:OpenAIChat[], char:string = ''){
export function stringlizeChat(formated:OpenAIChat[], char:string, continued:boolean){
let resultString:string[] = []
for(const form of formated){
if(form.role === 'system'){
@@ -20,7 +20,12 @@ export function stringlizeChat(formated:OpenAIChat[], char:string = ''){
resultString.push(form.content)
}
}
return resultString.join('\n\n') + `\n\n${char}:`
let res = resultString.join('\n\n')
if(!continued){
res += `\n\n${char}:`
}
return res
}
function appendWhitespace(prefix:string, seperator:string=" ") {
@@ -29,7 +34,7 @@ function appendWhitespace(prefix:string, seperator:string=" ") {
}
return prefix
}
export function stringlizeChatOba(formated:OpenAIChat[], characterName:string='', suggesting:boolean=false){
export function stringlizeChatOba(formated:OpenAIChat[], characterName:string, suggesting:boolean, continued:boolean){
const db = get(DataBase)
let resultString:string[] = []
let { header, systemPrefix, userPrefix, assistantPrefix, seperator } = db.ooba.formating;
@@ -59,10 +64,12 @@ export function stringlizeChatOba(formated:OpenAIChat[], characterName:string=''
}
resultString.push(prefix + name + form.content)
}
if (suggesting){
resultString.push(appendWhitespace(assistantPrefix, seperator) + `${db.username}:\n` + db.autoSuggestPrefix)
} else {
resultString.push(assistantPrefix + `${characterName}:`)
if(!continued){
if (suggesting){
resultString.push(appendWhitespace(assistantPrefix, seperator) + `${db.username}:\n` + db.autoSuggestPrefix)
} else {
resultString.push(assistantPrefix + `${characterName}:`)
}
}
return resultString.join(seperator)
}
@@ -190,7 +197,7 @@ export function getUnstringlizerChunks(formated:OpenAIChat[], char:string, mode:
return {chunks,extChunk:charNames.concat(chunks)}
}
export function stringlizeAINChat(formated:OpenAIChat[], char:string = ''){
export function stringlizeAINChat(formated:OpenAIChat[], char:string, continued: boolean){
let resultString:string[] = []
const db = get(DataBase)
@@ -213,7 +220,14 @@ export function stringlizeAINChat(formated:OpenAIChat[], char:string = ''){
resultString.push(form.content)
}
}
return resultString.join('\n\n') + `\n\n${char}`
let res = resultString.join('\n\n')
if(!continued){
res += + `\n\n${char}`
}
else{
res += " 「"
}
return res
}
function extractAINOutputStrings(inputString:string, characters:string[]) {