Merge branch 'dev' into dev

This commit is contained in:
kwaroran
2023-05-31 05:27:32 +09:00
committed by GitHub
11 changed files with 258 additions and 68 deletions

View File

@@ -38,7 +38,9 @@ export function createNewGroup(){
emotionImages: [],
customscript: [],
chaId: uuidv4(),
firstMsgIndex: -1
firstMsgIndex: -1,
characterTalks: [],
characterActive: []
})
setDatabase(db)
checkCharOrder()
@@ -300,6 +302,20 @@ export function characterFormatUpdate(index:number|character){
}
}
else{
if((!cha.characterTalks) || cha.characterTalks.length !== cha.characters.length){
cha.characterTalks = []
for(let i=0;i<cha.characters.length;i++){
cha.characterTalks.push(1 / 6 * 4)
}
}
if((!cha.characterActive) || cha.characterActive.length !== cha.characters.length){
cha.characterActive = []
for(let i=0;i<cha.characters.length;i++){
cha.characterActive.push(true)
}
}
}
if(checkNullish(cha.customscript)){
cha.customscript = []
}

103
src/ts/process/group.ts Normal file
View File

@@ -0,0 +1,103 @@
import { shuffle } from "lodash";
import { findCharacterbyId } from "../util";
import { alertConfirm, alertError, alertSelectChar } from "../alert";
import { language } from "src/lang";
import { get } from "svelte/store";
import { DataBase, setDatabase } from "../storage/database";
import { selectedCharID } from "../stores";
export async function addGroupChar(){
let db = get(DataBase)
let selectedId = get(selectedCharID)
let group = db.characters[selectedId]
if(group.type === 'group'){
const res = await alertSelectChar()
if(res){
if(group.characters.includes(res)){
alertError(language.errors.alreadyCharInGroup)
}
else{
if(await alertConfirm(language.askLoadFirstMsg)){
group.chats[group.chatPage].message.push({
role:'char',
data: findCharacterbyId(res).firstMessage,
saying: res,
})
}
group.characters.push(res)
group.characterTalks.push(1 / 6 * 4)
group.characterActive.push(true)
}
}
setDatabase(db)
}
}
export function rmCharFromGroup(index:number){
let db = get(DataBase)
let selectedId = get(selectedCharID)
let group = db.characters[selectedId]
if(group.type === 'group'){
group.characters.splice(index, 1)
group.characterTalks.splice(index, 1)
group.characterActive.splice(index, 1)
setDatabase(db)
}
}
export type GroupOrder = {
id: string,
talkness: number,
index: number
}
export function groupOrder(chars:GroupOrder[], input:string):GroupOrder[] {
let order:GroupOrder[] = [];
if (input) {
const words = getWords(input)
for (const word of words) {
for (let char of chars) {
const charNameChunks = getWords(findCharacterbyId(char.id).name)
console.log(charNameChunks)
if (charNameChunks.includes(word)) {
order.push(char);
break;
}
}
}
}
const shuffled = shuffle(chars)
for (const char of shuffled) {
if(order.includes(char)){
continue
}
//TODO
const chance = 0.5
if (chance >= Math.random()) {
order.push(char);
}
}
while (order.length === 0) {
order.push(chars[Math.floor(Math.random() * chars.length)]);
}
return order;
}
function getWords(data:string){
const matches = data.match(/\b\w+\b/gmi)
let words:string[] = []
for(const match of matches){
words.push(match.toLocaleLowerCase())
}
return words
}

View File

@@ -13,6 +13,9 @@ import { exampleMessage } from "./exampleMessages";
import { sayTTS } from "./tts";
import { supaMemory } from "./supaMemory";
import { v4 } from "uuid";
import { cloneDeep } from "lodash";
import { groupOrder } from "./group";
import { getNameMaxTokens } from "./stringlize";
export interface OpenAIChat{
role: 'system'|'user'|'assistant'
@@ -23,7 +26,7 @@ export interface OpenAIChat{
export const doingChat = writable(false)
export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:number} = {}):Promise<boolean> {
let findCharCache:{[key:string]:character} = {}
function findCharacterbyIdwithCache(id:string){
@@ -55,11 +58,40 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
let selectedChar = get(selectedCharID)
const nowChatroom = db.characters[selectedChar]
let currentChar:character
let caculatedChatTokens = 0
if(db.aiModel.startsWith('gpt')){
caculatedChatTokens += 5
}
else{
caculatedChatTokens += 3
}
if(nowChatroom.type === 'group'){
if(chatProcessIndex === -1){
for(let i=0;i<nowChatroom.characters.length;i++){
const r = await sendChat(i)
const charNames =nowChatroom.characters.map((v) => findCharacterbyIdwithCache(v).name)
caculatedChatTokens += await getNameMaxTokens([...charNames, db.username])
const messages = nowChatroom.chats[nowChatroom.chatPage].message
const lastMessage = messages[messages.length-1]
let order = nowChatroom.characters.map((v,i) => {
return {
id: v,
talkness: nowChatroom.characterActive[i] ? nowChatroom.characterTalks[i] : -1,
index: i
}
})
if(!nowChatroom.orderByOrder){
order = groupOrder(order, lastMessage?.data).filter((v) => {
if(v.id === lastMessage?.saying){
return false
}
return true
})
}
for(let i=0;i<order.length;i++){
const r = await sendChat(order[i].index, {
chatAdditonalTokens: caculatedChatTokens
})
if(!r){
return false
}
@@ -76,7 +108,13 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
}
else{
currentChar = nowChatroom
if(!db.aiModel.startsWith('gpt')){
caculatedChatTokens += await getNameMaxTokens([currentChar.name, db.username])
}
}
let chatAdditonalTokens = arg.chatAdditonalTokens ?? caculatedChatTokens
let selectedChat = nowChatroom.chatPage
let currentChat = nowChatroom.chats[selectedChat]
@@ -103,6 +141,7 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
'authorNote':([] as OpenAIChat[]),
'lastChat':([] as OpenAIChat[]),
'description':([] as OpenAIChat[]),
'postEverything':([] as OpenAIChat[]),
}
if(!currentChar.utilityBot){
@@ -149,6 +188,13 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
content: description
})
if(nowChatroom.type === 'group'){
const systemMsg = `[Write the next reply only as ${currentChar.name}]`
unformated.postEverything.push({
role: 'system',
content: systemMsg
})
}
}
unformated.lorebook.push({
@@ -161,13 +207,13 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
return (unformated[key] as OpenAIChat[]).map((d) => {
return d.content
}).join('\n\n')
}).join('\n\n')) + db.maxResponse) + 150
}).join('\n\n')) + db.maxResponse) + 100
const examples = exampleMessage(currentChar)
for(const example of examples){
currentTokens += await tokenize(example.content) + 5
currentTokens += await tokenize(example.content) + chatAdditonalTokens
}
let chats:OpenAIChat[] = examples
@@ -217,20 +263,11 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
memo: msg.chatId,
name: name
})
currentTokens += (await tokenize(formedChat) + 5)
}
if(nowChatroom.type === 'group'){
const systemMsg = `[Write the next reply only as ${currentChar.name}]`
chats.push({
role: 'system',
content: systemMsg
})
currentTokens += (await tokenize(systemMsg) + 5)
currentTokens += (await tokenize(formedChat) + chatAdditonalTokens)
}
if(nowChatroom.supaMemory && db.supaMemoryType !== 'none'){
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom)
const sp = await supaMemory(chats, currentTokens, maxContextTokens, currentChat, nowChatroom, chatAdditonalTokens)
if(sp.error){
alertError(sp.error)
return false
@@ -248,11 +285,10 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
return false
}
currentTokens -= (await tokenize(chats[0].content) + 5)
currentTokens -= (await tokenize(chats[0].content) + chatAdditonalTokens)
chats.splice(0, 1)
}
currentChat.lastMemory = chats[0].memo
console.log(currentChat.lastMemory)
}
let bias:{[key:number]:number} = {}
@@ -283,7 +319,8 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
//make into one
let formated:OpenAIChat[] = []
const formatOrder = db.formatingOrder
const formatOrder = cloneDeep(db.formatingOrder)
formatOrder.push('postEverything')
let sysPrompts:string[] = []
for(let i=0;i<formatOrder.length;i++){
const cha = unformated[formatOrder[i]]
@@ -443,7 +480,6 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
},
]
console.log('requesting chat')
const rq = await requestChatData({
formated: promptbody,
bias: emobias,

View File

@@ -60,7 +60,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
case 'gpt4_32k':{
for(let i=0;i<formated.length;i++){
if(arg.isGroupChat){
if(arg.isGroupChat && formated[i].name){
formated[i].content = formated[i].name + ": " + formated[i].content
}
formated[i].name = undefined

View File

@@ -1,4 +1,5 @@
import type { OpenAIChat } from ".";
import { tokenize } from "../tokenizer";
export function multiChatReplacer(){
@@ -52,4 +53,15 @@ export function unstringlizeChat(text:string, formated:OpenAIChat[], char:string
}
return text
}
export async function getNameMaxTokens(names:string[]){
let maxCharNameTokens = 0
for(const name of names){
const tokens = await tokenize(name + ': ') + 1
if(maxCharNameTokens < tokens){
maxCharNameTokens = tokens
}
}
return maxCharNameTokens
}

View File

@@ -5,9 +5,17 @@ import { tokenize } from "../tokenizer";
import { findCharacterbyId } from "../util";
import { requestChatData } from "./request";
export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxContextTokens:number,room:Chat,char:character|groupChat): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string;lastId?:string}>{
export async function supaMemory(
chats:OpenAIChat[],
currentTokens:number,
maxContextTokens:number,
room:Chat,
char:character|groupChat,
chatAdditonalTokens:number
): Promise<{ currentTokens: number; chats: OpenAIChat[]; error?:string; memory?:string;lastId?:string}>{
const db = get(DataBase)
console.log("Memory: " + currentTokens)
currentTokens += 10
if(currentTokens > maxContextTokens){
let coIndex = -1
@@ -19,7 +27,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
}
if(coIndex !== -1){
for(let i=0;i<coIndex;i++){
currentTokens -= (await tokenize(chats[0].content) + 1)
currentTokens -= (await tokenize(chats[0].content) + chatAdditonalTokens)
chats.splice(0, 1)
}
}
@@ -45,13 +53,13 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
lastId = id
break
}
currentTokens -= (await tokenize(chats[0].content) + 1)
currentTokens -= (await tokenize(chats[0].content) + chatAdditonalTokens)
chats.splice(0, 1)
i += 1
}
supaMemory = data
currentTokens += await tokenize(supaMemory) + 1
currentTokens += await tokenize(supaMemory) + chatAdditonalTokens
}
@@ -171,7 +179,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
}
continue
}
const tokens = await tokenize(cont.content) + 5
const tokens = await tokenize(cont.content) + chatAdditonalTokens
if((chunkSize + tokens) > maxChunkSize){
if(stringlizedChat === ''){
stringlizedChat += `${cont.role === 'assistant' ? char.type === 'group' ? '' : char.name : db.username}: ${cont.content}\n\n`
@@ -193,7 +201,7 @@ export async function supaMemory(chats:OpenAIChat[],currentTokens:number,maxCont
return result
}
const tokenz = await tokenize(result + '\n\n') + 5
const tokenz = await tokenize(result + '\n\n') + chatAdditonalTokens
currentTokens += tokenz
supaMemory += result.replace(/\n+/g,'\n') + '\n\n'

View File

@@ -353,6 +353,8 @@ export interface groupChat{
name:string
viewScreen: 'single'|'multiple'|'none'|'emp',
characters:string[]
characterTalks:number[]
characterActive:boolean[]
globalLore: loreBook[]
autoMode: boolean
useCharacterLore :boolean
@@ -367,6 +369,7 @@ export interface groupChat{
supaMemory?:boolean
ttsMode?:string
suggestMessages?:string[]
orderByOrder?:boolean
}
export interface botPreset{
@@ -522,7 +525,7 @@ interface sdConfig{
hr_upscaler:string
}
export type FormatingOrderItem = 'main'|'jailbreak'|'chats'|'lorebook'|'globalNote'|'authorNote'|'lastChat'|'description'
export type FormatingOrderItem = 'main'|'jailbreak'|'chats'|'lorebook'|'globalNote'|'authorNote'|'lastChat'|'description'|'postEverything'
export interface Chat{
message: Message[]