[feat] models base info

This commit is contained in:
kwaroran
2023-11-17 23:00:46 +09:00
parent 7359ed6302
commit d69c8eb6b6
3 changed files with 38 additions and 3 deletions

View File

@@ -20,6 +20,7 @@ import { HypaProcesser } from "./memory/hypamemory";
import { additionalInformations } from "./embedding/addinfo";
import { cipherChat, decipherChat } from "./cipherChat";
import { getInlayImage, supportsInlayImage } from "../image";
import { getGenerationModelString } from "./models/modelString";
export interface OpenAIChat{
role: 'system'|'user'|'assistant'|'function'
@@ -861,6 +862,8 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
if(abortSignal.aborted === true){
return false
}
const generationId = v4()
const generationModel = getGenerationModelString()
if(req.type === 'fail'){
alertError(req.result)
return false
@@ -878,7 +881,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
role: 'char',
data: "",
saying: currentChar.chaId,
time: Date.now()
time: Date.now(),
generationInfo: {
model: generationModel,
generationId: generationId,
}
})
}
db.characters[selectedChar].chats[selectedChat].isStreaming = true
@@ -936,7 +943,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
role: 'char',
data: result,
saying: currentChar.chaId,
time: Date.now()
time: Date.now(),
generationInfo: {
model: generationModel,
generationId: generationId,
}
}
}
else{
@@ -944,7 +955,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
role: msg[0],
data: result,
saying: currentChar.chaId,
time: Date.now()
time: Date.now(),
generationInfo: {
model: generationModel,
generationId: generationId,
}
})
}
db.characters[selectedChar].reloadKeys += 1

View File

@@ -0,0 +1,14 @@
import { DataBase } from "src/ts/storage/database";
import { get } from "svelte/store";
export function getGenerationModelString(){
const db = get(DataBase)
switch (db.aiModel){
case 'reverse_proxy':
return 'reverse_proxy-' + (db.reverseProxyOobaMode ? 'ooba' : db.proxyRequestModel)
case 'openrouter':
return 'openrouter-' + db.openrouterRequestModel
default:
return db.aiModel
}
}

View File

@@ -734,6 +734,12 @@ export interface Message{
saying?: string
chatId?:string
time?: number
generationInfo?: MessageGenerationInfo
}
export interface MessageGenerationInfo{
model?: string
generationId?: string
}
interface AINsettings{