Fix streaming response handling in sendChat function
This commit is contained in:
@@ -951,10 +951,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
db.characters[selectedChar].chats[selectedChat].isStreaming = true
|
db.characters[selectedChar].chats[selectedChat].isStreaming = true
|
||||||
|
let lastResponseChunk:{[key:string]:string} = {}
|
||||||
while(abortSignal.aborted === false){
|
while(abortSignal.aborted === false){
|
||||||
const readed = (await reader.read())
|
const readed = (await reader.read())
|
||||||
if(readed.value){
|
if(readed.value){
|
||||||
result = readed.value
|
console.log(lastResponseChunk)
|
||||||
|
|
||||||
|
lastResponseChunk = readed.value
|
||||||
|
const firstChunkKey = Object.keys(lastResponseChunk)[0]
|
||||||
|
result = lastResponseChunk[firstChunkKey]
|
||||||
if(db.cipherChat){
|
if(db.cipherChat){
|
||||||
result = decipherChat(result)
|
result = decipherChat(result)
|
||||||
}
|
}
|
||||||
@@ -972,6 +977,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(lastResponseChunk)
|
||||||
|
addRerolls(generationId, Object.values(lastResponseChunk))
|
||||||
|
|
||||||
currentChat = db.characters[selectedChar].chats[selectedChat]
|
currentChat = db.characters[selectedChar].chats[selectedChat]
|
||||||
const triggerResult = await runTrigger(currentChar, 'output', {chat:currentChat})
|
const triggerResult = await runTrigger(currentChar, 'output', {chat:currentChat})
|
||||||
if(triggerResult && triggerResult.chat){
|
if(triggerResult && triggerResult.chat){
|
||||||
|
|||||||
@@ -158,7 +158,6 @@ export async function runLocalModel(prompt:string){
|
|||||||
export async function installPython(){
|
export async function installPython(){
|
||||||
const appDir = await path.appDataDir()
|
const appDir = await path.appDataDir()
|
||||||
const completedPath = await path.join(appDir, 'python', 'completed.txt')
|
const completedPath = await path.join(appDir, 'python', 'completed.txt')
|
||||||
console.log(await resolveResource('/src-python/'))
|
|
||||||
if(await exists(completedPath)){
|
if(await exists(completedPath)){
|
||||||
alertMd("Python is already installed")
|
alertMd("Python is already installed")
|
||||||
return
|
return
|
||||||
@@ -176,7 +175,11 @@ export async function installPython(){
|
|||||||
await invoke('post_py_install', {
|
await invoke('post_py_install', {
|
||||||
path: appDir
|
path: appDir
|
||||||
})
|
})
|
||||||
|
const srvPath = await resolveResource('/src-python/')
|
||||||
|
await invoke('run_py_server', {
|
||||||
|
path: srvPath
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
alertClear()
|
alertClear()
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -49,7 +49,7 @@ type requestDataResponse = {
|
|||||||
}
|
}
|
||||||
}|{
|
}|{
|
||||||
type: "streaming",
|
type: "streaming",
|
||||||
result: ReadableStream<string>,
|
result: ReadableStream<StreamResponseChunk>,
|
||||||
special?: {
|
special?: {
|
||||||
emotion?: string
|
emotion?: string
|
||||||
}
|
}
|
||||||
@@ -61,6 +61,8 @@ type requestDataResponse = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface StreamResponseChunk{[key:string]:string}
|
||||||
|
|
||||||
interface OaiFunctions {
|
interface OaiFunctions {
|
||||||
name: string;
|
name: string;
|
||||||
description: string;
|
description: string;
|
||||||
@@ -503,7 +505,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
|||||||
body.n = db.genTime
|
body.n = db.genTime
|
||||||
}
|
}
|
||||||
let throughProxi = (!isTauri) && (!isNodeServer) && (!db.usePlainFetch) && (!Capacitor.isNativePlatform())
|
let throughProxi = (!isTauri) && (!isNodeServer) && (!db.usePlainFetch) && (!Capacitor.isNativePlatform())
|
||||||
if(db.useStreaming && arg.useStreaming && (!multiGen)){
|
if(db.useStreaming && arg.useStreaming){
|
||||||
body.stream = true
|
body.stream = true
|
||||||
let urlHost = new URL(replacerURL).host
|
let urlHost = new URL(replacerURL).host
|
||||||
if(urlHost.includes("localhost") || urlHost.includes("172.0.0.1") || urlHost.includes("0.0.0.0")){
|
if(urlHost.includes("localhost") || urlHost.includes("172.0.0.1") || urlHost.includes("0.0.0.0")){
|
||||||
@@ -556,12 +558,12 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
|||||||
|
|
||||||
let dataUint = new Uint8Array([])
|
let dataUint = new Uint8Array([])
|
||||||
|
|
||||||
const transtream = new TransformStream<Uint8Array, string>( {
|
const transtream = new TransformStream<Uint8Array, StreamResponseChunk>( {
|
||||||
async transform(chunk, control) {
|
async transform(chunk, control) {
|
||||||
dataUint = Buffer.from(new Uint8Array([...dataUint, ...chunk]))
|
dataUint = Buffer.from(new Uint8Array([...dataUint, ...chunk]))
|
||||||
try {
|
try {
|
||||||
const datas = dataUint.toString().split('\n')
|
const datas = dataUint.toString().split('\n')
|
||||||
let readed = ''
|
let readed:{[key:string]:string} = {}
|
||||||
for(const data of datas){
|
for(const data of datas){
|
||||||
if(data.startsWith("data: ")){
|
if(data.startsWith("data: ")){
|
||||||
try {
|
try {
|
||||||
@@ -570,9 +572,16 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
|||||||
control.enqueue(readed)
|
control.enqueue(readed)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const chunk = JSON.parse(rawChunk).choices[0].delta.content
|
const choices = JSON.parse(rawChunk).choices
|
||||||
if(chunk){
|
for(const choice of choices){
|
||||||
readed += chunk
|
const chunk = choice.delta.content
|
||||||
|
const ind = choice.index.toString()
|
||||||
|
if(chunk){
|
||||||
|
if(!readed[ind]){
|
||||||
|
readed[ind] = ""
|
||||||
|
}
|
||||||
|
readed[ind] += chunk
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user