Merge branch 'main' of https://github.com/kwaroran/RisuAI
This commit is contained in:
@@ -1181,7 +1181,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
db.characters[selectedChar].chats[selectedChat] = currentChat
|
||||
setDatabase(db)
|
||||
}
|
||||
await sayTTS(currentChar, result)
|
||||
if(db.ttsAutoSpeech){
|
||||
await sayTTS(currentChar, result)
|
||||
}
|
||||
}
|
||||
else{
|
||||
const msgs = (req.type === 'success') ? [['char',req.result]] as const
|
||||
@@ -1240,7 +1242,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
||||
mrerolls.push(result)
|
||||
}
|
||||
db.characters[selectedChar].reloadKeys += 1
|
||||
await sayTTS(currentChar, result)
|
||||
if(db.ttsAutoSpeech){
|
||||
await sayTTS(currentChar, result)
|
||||
}
|
||||
setDatabase(db)
|
||||
}
|
||||
|
||||
|
||||
@@ -235,12 +235,12 @@ export async function sayTTS(character:character,text:string) {
|
||||
const audioContext = new AudioContext();
|
||||
|
||||
const audio: Uint8Array = await loadAsset(character.gptSoVitsConfig.ref_audio_data.assetId);
|
||||
const base64Audio = btoa(new Uint8Array(audio).reduce((data, byte) => data + String.fromCharCode(byte), ''));
|
||||
const base64Audio = btoa(new Uint8Array(audio).reduce((data, byte) => data + String.fromCharCode(byte), ''));
|
||||
|
||||
const body = {
|
||||
text: text,
|
||||
text_lang: character.gptSoVitsConfig.text_lang,
|
||||
ref_audio_path: character.gptSoVitsConfig.ref_audio_path + '/public/audio/' + character.gptSoVitsConfig.ref_audio_data.fileName,
|
||||
ref_audio_path: undefined,
|
||||
ref_audio_name: character.gptSoVitsConfig.ref_audio_data.fileName,
|
||||
ref_audio_data: base64Audio,
|
||||
prompt_text: undefined,
|
||||
@@ -250,18 +250,41 @@ export async function sayTTS(character:character,text:string) {
|
||||
speed_factor: character.gptSoVitsConfig.speed,
|
||||
top_k: character.gptSoVitsConfig.top_k,
|
||||
text_split_method: character.gptSoVitsConfig.text_split_method,
|
||||
parallel_infer: false,
|
||||
parallel_infer: true,
|
||||
// media_type: character.gptSoVitsConfig.ref_audio_data.fileName.split('.')[1],
|
||||
ref_free: character.gptSoVitsConfig.use_long_audio || !character.gptSoVitsConfig.use_prompt,
|
||||
}
|
||||
|
||||
if (character.gptSoVitsConfig.use_prompt){
|
||||
body.prompt_text = character.gptSoVitsConfig.prompt
|
||||
}
|
||||
|
||||
if (character.gptSoVitsConfig.use_auto_path){
|
||||
console.log('auto')
|
||||
const path = await globalFetch(`${character.gptSoVitsConfig.url}/get_path`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
rawResponse: false,
|
||||
|
||||
})
|
||||
console.log(path)
|
||||
if(path.ok){
|
||||
body.ref_audio_path = path.data.message + '/public/audio/' + character.gptSoVitsConfig.ref_audio_data.fileName
|
||||
}
|
||||
else{
|
||||
throw new Error('Failed to Auto get path')
|
||||
}
|
||||
} else {
|
||||
body.ref_audio_path = character.gptSoVitsConfig.ref_audio_path + '/public/audio/' + character.gptSoVitsConfig.ref_audio_data.fileName
|
||||
}
|
||||
console.log(body)
|
||||
|
||||
const response = await globalFetch(`${character.gptSoVitsConfig.url}/tts`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: body,
|
||||
rawResponse: true,
|
||||
|
||||
@@ -430,7 +430,8 @@ export function setDatabase(data:Database){
|
||||
}
|
||||
data.hideApiKey ??= true
|
||||
data.unformatQuotes ??= false
|
||||
|
||||
data.ttsAutoSpeech ??= false
|
||||
data.translatorInputLanguage ??= 'auto'
|
||||
changeLanguage(data.language)
|
||||
DataBase.set(data)
|
||||
}
|
||||
@@ -510,6 +511,7 @@ export interface Database{
|
||||
NAII2I:boolean
|
||||
NAIREF:boolean
|
||||
NAIImgConfig:NAIImgConfig
|
||||
ttsAutoSpeech?:boolean
|
||||
runpodKey:string
|
||||
promptPreprocess:boolean
|
||||
bias: [string, number][]
|
||||
@@ -614,6 +616,7 @@ export interface Database{
|
||||
emotionProcesser:'submodel'|'embedding',
|
||||
showMenuChatList?:boolean,
|
||||
translatorType:'google'|'deepl'|'none'|'llm'|'deeplX',
|
||||
translatorInputLanguage?:string
|
||||
NAIadventure?:boolean,
|
||||
NAIappendName?:boolean,
|
||||
deeplOptions:{
|
||||
@@ -809,7 +812,9 @@ export interface character{
|
||||
}
|
||||
gptSoVitsConfig?:{
|
||||
url?:string
|
||||
use_auto_path?:boolean
|
||||
ref_audio_path?:string
|
||||
use_long_audio?:boolean
|
||||
ref_audio_data?: {
|
||||
fileName:string
|
||||
assetId:string
|
||||
|
||||
@@ -165,7 +165,7 @@ async function translateMain(text:string, arg:{from:string, to:string, host:stri
|
||||
}
|
||||
|
||||
|
||||
const url = `https://${arg.host}/translate_a/single?client=gtx&dt=t&sl=auto&tl=${arg.to}&q=` + encodeURIComponent(text)
|
||||
const url = `https://${arg.host}/translate_a/single?client=gtx&dt=t&sl=${db.translatorInputLanguage}&tl=${arg.to}&q=` + encodeURIComponent(text)
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user