Fix nativeFetch and comfy

This commit is contained in:
Kwaroran
2024-12-25 21:06:52 +09:00
parent 1c51afc626
commit 28d8bfdd22
4 changed files with 67 additions and 39 deletions

View File

@@ -173,6 +173,7 @@ export const languageEnglish = {
autoTranslateCachedOnly: "If enabled, it will automatically translate only the text that the user has translated previously.", autoTranslateCachedOnly: "If enabled, it will automatically translate only the text that the user has translated previously.",
presetChain: "If it is not blank, the preset will be changed and applied randomly every time when user sends a message in the preset list in this input. preset list should be seperated by comma, for example, `preset1,preset2`.", presetChain: "If it is not blank, the preset will be changed and applied randomly every time when user sends a message in the preset list in this input. preset list should be seperated by comma, for example, `preset1,preset2`.",
legacyMediaFindings: "If enabled, it will use the old method to find media assets, without using the additional search algorithm.", legacyMediaFindings: "If enabled, it will use the old method to find media assets, without using the additional search algorithm.",
comfyWorkflow: "Put the API workflow of comfy UI. you can get your API workflow in comfy UI by pressing the 'Workflow > Export (API)' button. you must also put {{risu_prompt}} in you workflow text. the {{risu_prompt}} will be replaced with the prompt provided by the Risu.",
}, },
setup: { setup: {
chooseProvider: "Choose AI Provider", chooseProvider: "Choose AI Provider",

View File

@@ -77,8 +77,13 @@
<OptionInput value="novelai" >Novel AI</OptionInput> <OptionInput value="novelai" >Novel AI</OptionInput>
<OptionInput value="dalle" >Dall-E</OptionInput> <OptionInput value="dalle" >Dall-E</OptionInput>
<OptionInput value="stability" >Stability API</OptionInput> <OptionInput value="stability" >Stability API</OptionInput>
<OptionInput value="comfy" >ComfyUI</OptionInput>
<OptionInput value="fal" >Fal.ai</OptionInput> <OptionInput value="fal" >Fal.ai</OptionInput>
<OptionInput value="comfyui" >ComfyUI</OptionInput>
<!-- Legacy -->
{#if DBState.db.sdProvider === 'comfy'}
<OptionInput value="comfy" >ComfyUI (Legacy)</OptionInput>
{/if}
</SelectInput> </SelectInput>
{#if DBState.db.sdProvider === 'webui'} {#if DBState.db.sdProvider === 'webui'}
@@ -277,8 +282,19 @@
{/if} {/if}
{/if} {/if}
{#if DBState.db.sdProvider === 'comfyui'}
<span class="text-textcolor mt-2">ComfyUI {language.providerURL}</span>
<TextInput size="sm" marginBottom placeholder="http://127.0.0.1:8188" bind:value={DBState.db.comfyUiUrl}/>
<span class="text-textcolor">Workflow <Help key="comfyWorkflow" /></span>
<TextInput size="sm" marginBottom bind:value={DBState.db.comfyConfig.workflow}/>
<span class="text-textcolor">Timeout (sec)</span>
<NumberInput size="sm" marginBottom bind:value={DBState.db.comfyConfig.timeout} min={1} max={120} />
{/if}
{#if DBState.db.sdProvider === 'comfy'} {#if DBState.db.sdProvider === 'comfy'}
<span class="text-draculared text-xs mb-2">The first image generated by the prompt will be selected. </span> <span class="text-draculared text-xs mb-2">The first image generated by the prompt will be selected. </span>
{#if !isTauri} {#if !isTauri}
<span class="text-draculared text-xs mb-2">"Please run comfyUI with --enable-cors-header."</span> <span class="text-draculared text-xs mb-2">"Please run comfyUI with --enable-cors-header."</span>
{/if} {/if}

View File

@@ -1821,20 +1821,18 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream<Uint
* @throws {Error} - Throws an error if the request is aborted or if there is an error in the response. * @throws {Error} - Throws an error if the request is aborted or if there is an error in the response.
*/ */
export async function fetchNative(url:string, arg:{ export async function fetchNative(url:string, arg:{
body:string|Uint8Array|ArrayBuffer, body?:string|Uint8Array|ArrayBuffer,
headers?:{[key:string]:string}, headers?:{[key:string]:string},
method?:"POST"|"GET"|"PUT"|"DELETE", method?:"POST"|"GET"|"PUT"|"DELETE",
signal?:AbortSignal, signal?:AbortSignal,
useRisuTk?:boolean, useRisuTk?:boolean,
chatId?:string chatId?:string
}):Promise<{ }):Promise<Response> {
body: ReadableStream<Uint8Array>;
headers: Headers; console.log(arg.body,'body')
status: number; if(arg.body === undefined && (arg.method === 'POST' || arg.method === 'PUT') ){
json: () => Promise<any>; throw new Error('Body is required for POST and PUT requests')
text: () => Promise<string>; }
arrayBuffer: () => Promise<ArrayBuffer>;
}> {
const jsonizer = (body:ReadableStream<Uint8Array>) => { const jsonizer = (body:ReadableStream<Uint8Array>) => {
return async () => { return async () => {
@@ -1876,9 +1874,9 @@ export async function fetchNative(url:string, arg:{
let realBody:Uint8Array let realBody:Uint8Array
if(arg.method === 'GET' || arg.method === 'DELETE'){ if(arg.method === 'GET' || arg.method === 'DELETE'){
realBody = new Uint8Array(0) realBody = undefined
} }
if(typeof arg.body === 'string'){ else if(typeof arg.body === 'string'){
realBody = new TextEncoder().encode(arg.body) realBody = new TextEncoder().encode(arg.body)
} }
else if(arg.body instanceof Uint8Array){ else if(arg.body instanceof Uint8Array){
@@ -1990,18 +1988,15 @@ export async function fetchNative(url:string, arg:{
throw new Error(error) throw new Error(error)
} }
return { return new Response(readableStream, {
body: readableStream,
headers: new Headers(resHeaders), headers: new Headers(resHeaders),
status: status, status: status
json: jsonizer(readableStream), })
text: textizer(readableStream),
arrayBuffer: arrayBufferizer(readableStream)
}
} }
else if(throughProxy){ else if(throughProxy){
const r = await fetch(hubURL + `/proxy2`, { const r = await fetch(hubURL + `/proxy2`, {
body: realBody, body: realBody,
headers: arg.useRisuTk ? { headers: arg.useRisuTk ? {
@@ -2018,14 +2013,10 @@ export async function fetchNative(url:string, arg:{
signal: arg.signal signal: arg.signal
}) })
return { return new Response(r.body, {
body: pipeFetchLog(fetchLogIndex, r.body),
headers: r.headers, headers: r.headers,
status: r.status, status: r.status
json: jsonizer(r.body), })
text: textizer(r.body),
arrayBuffer: arrayBufferizer(r.body)
}
} }
else{ else{
return await fetch(url, { return await fetch(url, {

View File

@@ -2,7 +2,7 @@ import { get } from "svelte/store"
import { getDatabase, type character } from "../storage/database.svelte" import { getDatabase, type character } from "../storage/database.svelte"
import { requestChatData } from "./request" import { requestChatData } from "./request"
import { alertError } from "../alert" import { alertError } from "../alert"
import { globalFetch, readImage } from "../globalApi.svelte" import { fetchNative, globalFetch, readImage } from "../globalApi.svelte"
import { CharEmotion } from "../stores.svelte" import { CharEmotion } from "../stores.svelte"
import type { OpenAIChat } from "./index.svelte" import type { OpenAIChat } from "./index.svelte"
import { processZip } from "./processzip" import { processZip } from "./processzip"
@@ -415,12 +415,14 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
} }
if(db.sdProvider === 'comfy'){
if(db.sdProvider === 'comfy' || db.sdProvider === 'comfyui'){
const legacy = db.sdProvider === 'comfy' // Legacy Comfy mode
const {workflow, posNodeID, posInputName, negNodeID, negInputName} = db.comfyConfig const {workflow, posNodeID, posInputName, negNodeID, negInputName} = db.comfyConfig
const baseUrl = new URL(db.comfyUiUrl) const baseUrl = new URL(db.comfyUiUrl)
const createUrl = (pathname: string, params: Record<string, string> = {}) => { const createUrl = (pathname: string, params: Record<string, string> = {}) => {
const url = new URL(pathname, baseUrl) const url = db.comfyUiUrl.endsWith('/api') ? new URL(`${db.comfyUiUrl}${pathname}`) : new URL(pathname, baseUrl)
url.search = new URLSearchParams(params).toString() url.search = new URLSearchParams(params).toString()
return url.toString() return url.toString()
} }
@@ -437,8 +439,27 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
try { try {
const prompt = JSON.parse(workflow) const prompt = JSON.parse(workflow)
prompt[posNodeID].inputs[posInputName] = genPrompt if(legacy){
prompt[negNodeID].inputs[negInputName] = neg prompt[posNodeID].inputs[posInputName] = genPrompt
prompt[negNodeID].inputs[negInputName] = neg
}
else{
//search all nodes for the prompt and negative prompt
const keys = Object.keys(prompt)
for(let i = 0; i < keys.length; i++){
const node = prompt[keys[i]]
const inputKeys = Object.keys(node.inputs)
for(let j = 0; j < inputKeys.length; j++){
let input = node.inputs[inputKeys[j]]
if(typeof input === 'string'){
input = input.replaceAll('{{risu_prompt}}', genPrompt)
input = input.replaceAll('{{risu_neg}}', neg)
}
node.inputs[inputKeys[j]] = input
}
}
}
const { prompt_id: id } = await fetchWrapper(createUrl('/prompt'), { const { prompt_id: id } = await fetchWrapper(createUrl('/prompt'), {
method: 'POST', method: 'POST',
@@ -451,9 +472,10 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
const startTime = Date.now() const startTime = Date.now()
const timeout = db.comfyConfig.timeout * 1000 const timeout = db.comfyConfig.timeout * 1000
while (!(item = (await (await fetch(createUrl('/history'), { while (!(item = (await (await fetchNative(createUrl('/history'), {
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
method: 'GET'})).json())[id])) { method: 'GET'
})).json())[id])) {
console.log("Checking /history...") console.log("Checking /history...")
if (Date.now() - startTime >= timeout) { if (Date.now() - startTime >= timeout) {
alertError("Error: Image generation took longer than expected."); alertError("Error: Image generation took longer than expected.");
@@ -463,13 +485,14 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
} // Check history until the generation is complete. } // Check history until the generation is complete.
const genImgInfo = Object.values(item.outputs).flatMap((output: any) => output.images)[0]; const genImgInfo = Object.values(item.outputs).flatMap((output: any) => output.images)[0];
const imgResponse = await fetch(createUrl('/view', { const imgResponse = await fetchNative(createUrl('/view', {
filename: genImgInfo.filename, filename: genImgInfo.filename,
subfolder: genImgInfo.subfolder, subfolder: genImgInfo.subfolder,
type: genImgInfo.type type: genImgInfo.type
}), { }), {
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
method: 'GET'}) method: 'GET'
})
const img64 = Buffer.from(await imgResponse.arrayBuffer()).toString('base64') const img64 = Buffer.from(await imgResponse.arrayBuffer()).toString('base64')
if(returnSdData === 'inlay'){ if(returnSdData === 'inlay'){
@@ -552,7 +575,6 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
if(db.falModel === 'fal-ai/flux-pro'){ if(db.falModel === 'fal-ai/flux-pro'){
delete body.enable_safety_checker delete body.enable_safety_checker
} }
console.log(body)
const res = await globalFetch('https://fal.run/' + model, { const res = await globalFetch('https://fal.run/' + model, {
headers: { headers: {
@@ -563,8 +585,6 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
body: body body: body
}) })
console.log(res)
if(!res.ok){ if(!res.ok){
alertError(JSON.stringify(res.data)) alertError(JSON.stringify(res.data))
return false return false