[feat] better group chat

This commit is contained in:
kwaroran
2023-05-31 05:08:46 +09:00
parent 7805092992
commit 7ea768cb5b
9 changed files with 208 additions and 57 deletions

103
src/ts/process/group.ts Normal file
View File

@@ -0,0 +1,103 @@
import { shuffle } from "lodash";
import { findCharacterbyId } from "../util";
import { alertConfirm, alertError, alertSelectChar } from "../alert";
import { language } from "src/lang";
import { get } from "svelte/store";
import { DataBase, setDatabase } from "../storage/database";
import { selectedCharID } from "../stores";
export async function addGroupChar(){
let db = get(DataBase)
let selectedId = get(selectedCharID)
let group = db.characters[selectedId]
if(group.type === 'group'){
const res = await alertSelectChar()
if(res){
if(group.characters.includes(res)){
alertError(language.errors.alreadyCharInGroup)
}
else{
if(await alertConfirm(language.askLoadFirstMsg)){
group.chats[group.chatPage].message.push({
role:'char',
data: findCharacterbyId(res).firstMessage,
saying: res,
})
}
group.characters.push(res)
group.characterTalks.push(1 / 6 * 4)
group.characterActive.push(true)
}
}
setDatabase(db)
}
}
export function rmCharFromGroup(index:number){
let db = get(DataBase)
let selectedId = get(selectedCharID)
let group = db.characters[selectedId]
if(group.type === 'group'){
group.characters.splice(index, 1)
group.characterTalks.splice(index, 1)
group.characterActive.splice(index, 1)
setDatabase(db)
}
}
export type GroupOrder = {
id: string,
talkness: number,
index: number
}
export function groupOrder(chars:GroupOrder[], input:string):GroupOrder[] {
let order:GroupOrder[] = [];
if (input) {
const words = getWords(input)
for (const word of words) {
for (let char of chars) {
const charNameChunks = getWords(findCharacterbyId(char.id).name)
console.log(charNameChunks)
if (charNameChunks.includes(word)) {
order.push(char);
break;
}
}
}
}
const shuffled = shuffle(chars)
for (const char of shuffled) {
if(order.includes(char)){
continue
}
//TODO
const chance = 0.5
if (chance >= Math.random()) {
order.push(char);
}
}
while (order.length === 0) {
order.push(chars[Math.floor(Math.random() * chars.length)]);
}
return order;
}
function getWords(data:string){
const matches = data.match(/\b\w+\b/gmi)
let words:string[] = []
for(const match of matches){
words.push(match.toLocaleLowerCase())
}
return words
}

View File

@@ -13,6 +13,8 @@ import { exampleMessage } from "./exampleMessages";
import { sayTTS } from "./tts";
import { supaMemory } from "./supaMemory";
import { v4 } from "uuid";
import { cloneDeep } from "lodash";
import { groupOrder } from "./group";
export interface OpenAIChat{
role: 'system'|'user'|'assistant'
@@ -58,8 +60,25 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
if(nowChatroom.type === 'group'){
if(chatProcessIndex === -1){
for(let i=0;i<nowChatroom.characters.length;i++){
const r = await sendChat(i)
const messages = nowChatroom.chats[nowChatroom.chatPage].message
const lastMessage = messages[messages.length-1]
let order = nowChatroom.characters.map((v,i) => {
return {
id: v,
talkness: nowChatroom.characterActive[i] ? nowChatroom.characterTalks[i] : -1,
index: i
}
})
if(!nowChatroom.orderByOrder){
order = groupOrder(order, lastMessage?.data).filter((v) => {
if(v.id === lastMessage?.saying){
return false
}
return true
})
}
for(let i=0;i<order.length;i++){
const r = await sendChat(order[i].index)
if(!r){
return false
}
@@ -103,6 +122,7 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
'authorNote':([] as OpenAIChat[]),
'lastChat':([] as OpenAIChat[]),
'description':([] as OpenAIChat[]),
'postEverything':([] as OpenAIChat[]),
}
if(!currentChar.utilityBot){
@@ -149,6 +169,13 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
content: description
})
if(nowChatroom.type === 'group'){
const systemMsg = `[Write the next reply only as ${currentChar.name}]`
unformated.postEverything.push({
role: 'system',
content: systemMsg
})
}
}
unformated.lorebook.push({
@@ -220,15 +247,6 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
currentTokens += (await tokenize(formedChat) + 5)
}
if(nowChatroom.type === 'group'){
const systemMsg = `[Write the next reply only as ${currentChar.name}]`
chats.push({
role: 'system',
content: systemMsg
})
currentTokens += (await tokenize(systemMsg) + 5)
}
if(nowChatroom.supaMemory && db.supaMemoryType !== 'none'){
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom)
if(sp.error){
@@ -252,7 +270,6 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
chats.splice(0, 1)
}
currentChat.lastMemory = chats[0].memo
console.log(currentChat.lastMemory)
}
let bias:{[key:number]:number} = {}
@@ -283,7 +300,8 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
//make into one
let formated:OpenAIChat[] = []
const formatOrder = db.formatingOrder
const formatOrder = cloneDeep(db.formatingOrder)
formatOrder.push('postEverything')
let sysPrompts:string[] = []
for(let i=0;i<formatOrder.length;i++){
const cha = unformated[formatOrder[i]]
@@ -443,7 +461,6 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
},
]
console.log('requesting chat')
const rq = await requestChatData({
formated: promptbody,
bias: emobias,

View File

@@ -61,7 +61,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
case 'gpt4_32k':{
for(let i=0;i<formated.length;i++){
if(arg.isGroupChat){
if(arg.isGroupChat && formated[i].name){
formated[i].content = formated[i].name + ": " + formated[i].content
}
formated[i].name = undefined