Fix streaming response handling in sendChat function

This commit is contained in:
kwaroran
2024-01-13 10:41:12 +09:00
parent 88e88b9ce6
commit b402195e18
3 changed files with 31 additions and 11 deletions

View File

@@ -951,10 +951,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
})
}
db.characters[selectedChar].chats[selectedChat].isStreaming = true
let lastResponseChunk:{[key:string]:string} = {}
while(abortSignal.aborted === false){
const readed = (await reader.read())
if(readed.value){
result = readed.value
console.log(lastResponseChunk)
lastResponseChunk = readed.value
const firstChunkKey = Object.keys(lastResponseChunk)[0]
result = lastResponseChunk[firstChunkKey]
if(db.cipherChat){
result = decipherChat(result)
}
@@ -972,6 +977,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
}
}
console.log(lastResponseChunk)
addRerolls(generationId, Object.values(lastResponseChunk))
currentChat = db.characters[selectedChar].chats[selectedChat]
const triggerResult = await runTrigger(currentChar, 'output', {chat:currentChat})
if(triggerResult && triggerResult.chat){

View File

@@ -158,7 +158,6 @@ export async function runLocalModel(prompt:string){
export async function installPython(){
const appDir = await path.appDataDir()
const completedPath = await path.join(appDir, 'python', 'completed.txt')
console.log(await resolveResource('/src-python/'))
if(await exists(completedPath)){
alertMd("Python is already installed")
return
@@ -176,7 +175,11 @@ export async function installPython(){
await invoke('post_py_install', {
path: appDir
})
const srvPath = await resolveResource('/src-python/')
await invoke('run_py_server', {
path: srvPath
})
alertClear()
}

View File

@@ -49,7 +49,7 @@ type requestDataResponse = {
}
}|{
type: "streaming",
result: ReadableStream<string>,
result: ReadableStream<StreamResponseChunk>,
special?: {
emotion?: string
}
@@ -61,6 +61,8 @@ type requestDataResponse = {
}
}
interface StreamResponseChunk{[key:string]:string}
interface OaiFunctions {
name: string;
description: string;
@@ -503,7 +505,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
body.n = db.genTime
}
let throughProxi = (!isTauri) && (!isNodeServer) && (!db.usePlainFetch) && (!Capacitor.isNativePlatform())
if(db.useStreaming && arg.useStreaming && (!multiGen)){
if(db.useStreaming && arg.useStreaming){
body.stream = true
let urlHost = new URL(replacerURL).host
if(urlHost.includes("localhost") || urlHost.includes("172.0.0.1") || urlHost.includes("0.0.0.0")){
@@ -556,12 +558,12 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
let dataUint = new Uint8Array([])
const transtream = new TransformStream<Uint8Array, string>( {
const transtream = new TransformStream<Uint8Array, StreamResponseChunk>( {
async transform(chunk, control) {
dataUint = Buffer.from(new Uint8Array([...dataUint, ...chunk]))
try {
const datas = dataUint.toString().split('\n')
let readed = ''
let readed:{[key:string]:string} = {}
for(const data of datas){
if(data.startsWith("data: ")){
try {
@@ -570,9 +572,16 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
control.enqueue(readed)
return
}
const chunk = JSON.parse(rawChunk).choices[0].delta.content
if(chunk){
readed += chunk
const choices = JSON.parse(rawChunk).choices
for(const choice of choices){
const chunk = choice.delta.content
const ind = choice.index.toString()
if(chunk){
if(!readed[ind]){
readed[ind] = ""
}
readed[ind] += chunk
}
}
} catch (error) {}
}