Fix nativeFetch and comfy

This commit is contained in:
Kwaroran
2024-12-25 21:06:52 +09:00
parent 1c51afc626
commit 28d8bfdd22
4 changed files with 67 additions and 39 deletions

View File

@@ -1821,20 +1821,18 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream<Uint
* @throws {Error} - Throws an error if the request is aborted or if there is an error in the response.
*/
export async function fetchNative(url:string, arg:{
body:string|Uint8Array|ArrayBuffer,
body?:string|Uint8Array|ArrayBuffer,
headers?:{[key:string]:string},
method?:"POST"|"GET"|"PUT"|"DELETE",
signal?:AbortSignal,
useRisuTk?:boolean,
chatId?:string
}):Promise<{
body: ReadableStream<Uint8Array>;
headers: Headers;
status: number;
json: () => Promise<any>;
text: () => Promise<string>;
arrayBuffer: () => Promise<ArrayBuffer>;
}> {
}):Promise<Response> {
console.log(arg.body,'body')
if(arg.body === undefined && (arg.method === 'POST' || arg.method === 'PUT') ){
throw new Error('Body is required for POST and PUT requests')
}
const jsonizer = (body:ReadableStream<Uint8Array>) => {
return async () => {
@@ -1876,9 +1874,9 @@ export async function fetchNative(url:string, arg:{
let realBody:Uint8Array
if(arg.method === 'GET' || arg.method === 'DELETE'){
realBody = new Uint8Array(0)
realBody = undefined
}
if(typeof arg.body === 'string'){
else if(typeof arg.body === 'string'){
realBody = new TextEncoder().encode(arg.body)
}
else if(arg.body instanceof Uint8Array){
@@ -1990,18 +1988,15 @@ export async function fetchNative(url:string, arg:{
throw new Error(error)
}
return {
body: readableStream,
return new Response(readableStream, {
headers: new Headers(resHeaders),
status: status,
json: jsonizer(readableStream),
text: textizer(readableStream),
arrayBuffer: arrayBufferizer(readableStream)
}
status: status
})
}
else if(throughProxy){
const r = await fetch(hubURL + `/proxy2`, {
body: realBody,
headers: arg.useRisuTk ? {
@@ -2018,14 +2013,10 @@ export async function fetchNative(url:string, arg:{
signal: arg.signal
})
return {
body: pipeFetchLog(fetchLogIndex, r.body),
return new Response(r.body, {
headers: r.headers,
status: r.status,
json: jsonizer(r.body),
text: textizer(r.body),
arrayBuffer: arrayBufferizer(r.body)
}
status: r.status
})
}
else{
return await fetch(url, {

View File

@@ -2,7 +2,7 @@ import { get } from "svelte/store"
import { getDatabase, type character } from "../storage/database.svelte"
import { requestChatData } from "./request"
import { alertError } from "../alert"
import { globalFetch, readImage } from "../globalApi.svelte"
import { fetchNative, globalFetch, readImage } from "../globalApi.svelte"
import { CharEmotion } from "../stores.svelte"
import type { OpenAIChat } from "./index.svelte"
import { processZip } from "./processzip"
@@ -415,12 +415,14 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
}
if(db.sdProvider === 'comfy'){
if(db.sdProvider === 'comfy' || db.sdProvider === 'comfyui'){
const legacy = db.sdProvider === 'comfy' // Legacy Comfy mode
const {workflow, posNodeID, posInputName, negNodeID, negInputName} = db.comfyConfig
const baseUrl = new URL(db.comfyUiUrl)
const createUrl = (pathname: string, params: Record<string, string> = {}) => {
const url = new URL(pathname, baseUrl)
const url = db.comfyUiUrl.endsWith('/api') ? new URL(`${db.comfyUiUrl}${pathname}`) : new URL(pathname, baseUrl)
url.search = new URLSearchParams(params).toString()
return url.toString()
}
@@ -437,8 +439,27 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
try {
const prompt = JSON.parse(workflow)
prompt[posNodeID].inputs[posInputName] = genPrompt
prompt[negNodeID].inputs[negInputName] = neg
if(legacy){
prompt[posNodeID].inputs[posInputName] = genPrompt
prompt[negNodeID].inputs[negInputName] = neg
}
else{
//search all nodes for the prompt and negative prompt
const keys = Object.keys(prompt)
for(let i = 0; i < keys.length; i++){
const node = prompt[keys[i]]
const inputKeys = Object.keys(node.inputs)
for(let j = 0; j < inputKeys.length; j++){
let input = node.inputs[inputKeys[j]]
if(typeof input === 'string'){
input = input.replaceAll('{{risu_prompt}}', genPrompt)
input = input.replaceAll('{{risu_neg}}', neg)
}
node.inputs[inputKeys[j]] = input
}
}
}
const { prompt_id: id } = await fetchWrapper(createUrl('/prompt'), {
method: 'POST',
@@ -451,9 +472,10 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
const startTime = Date.now()
const timeout = db.comfyConfig.timeout * 1000
while (!(item = (await (await fetch(createUrl('/history'), {
while (!(item = (await (await fetchNative(createUrl('/history'), {
headers: { 'Content-Type': 'application/json' },
method: 'GET'})).json())[id])) {
method: 'GET'
})).json())[id])) {
console.log("Checking /history...")
if (Date.now() - startTime >= timeout) {
alertError("Error: Image generation took longer than expected.");
@@ -463,13 +485,14 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
} // Check history until the generation is complete.
const genImgInfo = Object.values(item.outputs).flatMap((output: any) => output.images)[0];
const imgResponse = await fetch(createUrl('/view', {
const imgResponse = await fetchNative(createUrl('/view', {
filename: genImgInfo.filename,
subfolder: genImgInfo.subfolder,
type: genImgInfo.type
}), {
headers: { 'Content-Type': 'application/json' },
method: 'GET'})
method: 'GET'
})
const img64 = Buffer.from(await imgResponse.arrayBuffer()).toString('base64')
if(returnSdData === 'inlay'){
@@ -552,7 +575,6 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
if(db.falModel === 'fal-ai/flux-pro'){
delete body.enable_safety_checker
}
console.log(body)
const res = await globalFetch('https://fal.run/' + model, {
headers: {
@@ -563,8 +585,6 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
body: body
})
console.log(res)
if(!res.ok){
alertError(JSON.stringify(res.data))
return false