Fix nativeFetch and comfy
This commit is contained in:
@@ -173,6 +173,7 @@ export const languageEnglish = {
|
||||
autoTranslateCachedOnly: "If enabled, it will automatically translate only the text that the user has translated previously.",
|
||||
presetChain: "If it is not blank, the preset will be changed and applied randomly every time when user sends a message in the preset list in this input. preset list should be seperated by comma, for example, `preset1,preset2`.",
|
||||
legacyMediaFindings: "If enabled, it will use the old method to find media assets, without using the additional search algorithm.",
|
||||
comfyWorkflow: "Put the API workflow of comfy UI. you can get your API workflow in comfy UI by pressing the 'Workflow > Export (API)' button. you must also put {{risu_prompt}} in you workflow text. the {{risu_prompt}} will be replaced with the prompt provided by the Risu.",
|
||||
},
|
||||
setup: {
|
||||
chooseProvider: "Choose AI Provider",
|
||||
|
||||
@@ -77,8 +77,13 @@
|
||||
<OptionInput value="novelai" >Novel AI</OptionInput>
|
||||
<OptionInput value="dalle" >Dall-E</OptionInput>
|
||||
<OptionInput value="stability" >Stability API</OptionInput>
|
||||
<OptionInput value="comfy" >ComfyUI</OptionInput>
|
||||
<OptionInput value="fal" >Fal.ai</OptionInput>
|
||||
<OptionInput value="comfyui" >ComfyUI</OptionInput>
|
||||
|
||||
<!-- Legacy -->
|
||||
{#if DBState.db.sdProvider === 'comfy'}
|
||||
<OptionInput value="comfy" >ComfyUI (Legacy)</OptionInput>
|
||||
{/if}
|
||||
</SelectInput>
|
||||
|
||||
{#if DBState.db.sdProvider === 'webui'}
|
||||
@@ -277,6 +282,17 @@
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
{#if DBState.db.sdProvider === 'comfyui'}
|
||||
<span class="text-textcolor mt-2">ComfyUI {language.providerURL}</span>
|
||||
<TextInput size="sm" marginBottom placeholder="http://127.0.0.1:8188" bind:value={DBState.db.comfyUiUrl}/>
|
||||
|
||||
<span class="text-textcolor">Workflow <Help key="comfyWorkflow" /></span>
|
||||
<TextInput size="sm" marginBottom bind:value={DBState.db.comfyConfig.workflow}/>
|
||||
|
||||
<span class="text-textcolor">Timeout (sec)</span>
|
||||
<NumberInput size="sm" marginBottom bind:value={DBState.db.comfyConfig.timeout} min={1} max={120} />
|
||||
{/if}
|
||||
|
||||
{#if DBState.db.sdProvider === 'comfy'}
|
||||
<span class="text-draculared text-xs mb-2">The first image generated by the prompt will be selected. </span>
|
||||
{#if !isTauri}
|
||||
|
||||
@@ -1821,20 +1821,18 @@ const pipeFetchLog = (fetchLogIndex: number, readableStream: ReadableStream<Uint
|
||||
* @throws {Error} - Throws an error if the request is aborted or if there is an error in the response.
|
||||
*/
|
||||
export async function fetchNative(url:string, arg:{
|
||||
body:string|Uint8Array|ArrayBuffer,
|
||||
body?:string|Uint8Array|ArrayBuffer,
|
||||
headers?:{[key:string]:string},
|
||||
method?:"POST"|"GET"|"PUT"|"DELETE",
|
||||
signal?:AbortSignal,
|
||||
useRisuTk?:boolean,
|
||||
chatId?:string
|
||||
}):Promise<{
|
||||
body: ReadableStream<Uint8Array>;
|
||||
headers: Headers;
|
||||
status: number;
|
||||
json: () => Promise<any>;
|
||||
text: () => Promise<string>;
|
||||
arrayBuffer: () => Promise<ArrayBuffer>;
|
||||
}> {
|
||||
}):Promise<Response> {
|
||||
|
||||
console.log(arg.body,'body')
|
||||
if(arg.body === undefined && (arg.method === 'POST' || arg.method === 'PUT') ){
|
||||
throw new Error('Body is required for POST and PUT requests')
|
||||
}
|
||||
|
||||
const jsonizer = (body:ReadableStream<Uint8Array>) => {
|
||||
return async () => {
|
||||
@@ -1876,9 +1874,9 @@ export async function fetchNative(url:string, arg:{
|
||||
let realBody:Uint8Array
|
||||
|
||||
if(arg.method === 'GET' || arg.method === 'DELETE'){
|
||||
realBody = new Uint8Array(0)
|
||||
realBody = undefined
|
||||
}
|
||||
if(typeof arg.body === 'string'){
|
||||
else if(typeof arg.body === 'string'){
|
||||
realBody = new TextEncoder().encode(arg.body)
|
||||
}
|
||||
else if(arg.body instanceof Uint8Array){
|
||||
@@ -1990,18 +1988,15 @@ export async function fetchNative(url:string, arg:{
|
||||
throw new Error(error)
|
||||
}
|
||||
|
||||
return {
|
||||
body: readableStream,
|
||||
return new Response(readableStream, {
|
||||
headers: new Headers(resHeaders),
|
||||
status: status,
|
||||
json: jsonizer(readableStream),
|
||||
text: textizer(readableStream),
|
||||
arrayBuffer: arrayBufferizer(readableStream)
|
||||
}
|
||||
status: status
|
||||
})
|
||||
|
||||
|
||||
}
|
||||
else if(throughProxy){
|
||||
|
||||
const r = await fetch(hubURL + `/proxy2`, {
|
||||
body: realBody,
|
||||
headers: arg.useRisuTk ? {
|
||||
@@ -2018,14 +2013,10 @@ export async function fetchNative(url:string, arg:{
|
||||
signal: arg.signal
|
||||
})
|
||||
|
||||
return {
|
||||
body: pipeFetchLog(fetchLogIndex, r.body),
|
||||
return new Response(r.body, {
|
||||
headers: r.headers,
|
||||
status: r.status,
|
||||
json: jsonizer(r.body),
|
||||
text: textizer(r.body),
|
||||
arrayBuffer: arrayBufferizer(r.body)
|
||||
}
|
||||
status: r.status
|
||||
})
|
||||
}
|
||||
else{
|
||||
return await fetch(url, {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { get } from "svelte/store"
|
||||
import { getDatabase, type character } from "../storage/database.svelte"
|
||||
import { requestChatData } from "./request"
|
||||
import { alertError } from "../alert"
|
||||
import { globalFetch, readImage } from "../globalApi.svelte"
|
||||
import { fetchNative, globalFetch, readImage } from "../globalApi.svelte"
|
||||
import { CharEmotion } from "../stores.svelte"
|
||||
import type { OpenAIChat } from "./index.svelte"
|
||||
import { processZip } from "./processzip"
|
||||
@@ -415,12 +415,14 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
|
||||
|
||||
|
||||
}
|
||||
if(db.sdProvider === 'comfy'){
|
||||
|
||||
if(db.sdProvider === 'comfy' || db.sdProvider === 'comfyui'){
|
||||
const legacy = db.sdProvider === 'comfy' // Legacy Comfy mode
|
||||
const {workflow, posNodeID, posInputName, negNodeID, negInputName} = db.comfyConfig
|
||||
const baseUrl = new URL(db.comfyUiUrl)
|
||||
|
||||
const createUrl = (pathname: string, params: Record<string, string> = {}) => {
|
||||
const url = new URL(pathname, baseUrl)
|
||||
const url = db.comfyUiUrl.endsWith('/api') ? new URL(`${db.comfyUiUrl}${pathname}`) : new URL(pathname, baseUrl)
|
||||
url.search = new URLSearchParams(params).toString()
|
||||
return url.toString()
|
||||
}
|
||||
@@ -437,8 +439,27 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
|
||||
|
||||
try {
|
||||
const prompt = JSON.parse(workflow)
|
||||
if(legacy){
|
||||
prompt[posNodeID].inputs[posInputName] = genPrompt
|
||||
prompt[negNodeID].inputs[negInputName] = neg
|
||||
}
|
||||
else{
|
||||
//search all nodes for the prompt and negative prompt
|
||||
const keys = Object.keys(prompt)
|
||||
for(let i = 0; i < keys.length; i++){
|
||||
const node = prompt[keys[i]]
|
||||
const inputKeys = Object.keys(node.inputs)
|
||||
for(let j = 0; j < inputKeys.length; j++){
|
||||
let input = node.inputs[inputKeys[j]]
|
||||
if(typeof input === 'string'){
|
||||
input = input.replaceAll('{{risu_prompt}}', genPrompt)
|
||||
input = input.replaceAll('{{risu_neg}}', neg)
|
||||
}
|
||||
|
||||
node.inputs[inputKeys[j]] = input
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { prompt_id: id } = await fetchWrapper(createUrl('/prompt'), {
|
||||
method: 'POST',
|
||||
@@ -451,9 +472,10 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
|
||||
|
||||
const startTime = Date.now()
|
||||
const timeout = db.comfyConfig.timeout * 1000
|
||||
while (!(item = (await (await fetch(createUrl('/history'), {
|
||||
while (!(item = (await (await fetchNative(createUrl('/history'), {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
method: 'GET'})).json())[id])) {
|
||||
method: 'GET'
|
||||
})).json())[id])) {
|
||||
console.log("Checking /history...")
|
||||
if (Date.now() - startTime >= timeout) {
|
||||
alertError("Error: Image generation took longer than expected.");
|
||||
@@ -463,13 +485,14 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
|
||||
} // Check history until the generation is complete.
|
||||
const genImgInfo = Object.values(item.outputs).flatMap((output: any) => output.images)[0];
|
||||
|
||||
const imgResponse = await fetch(createUrl('/view', {
|
||||
const imgResponse = await fetchNative(createUrl('/view', {
|
||||
filename: genImgInfo.filename,
|
||||
subfolder: genImgInfo.subfolder,
|
||||
type: genImgInfo.type
|
||||
}), {
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
method: 'GET'})
|
||||
method: 'GET'
|
||||
})
|
||||
const img64 = Buffer.from(await imgResponse.arrayBuffer()).toString('base64')
|
||||
|
||||
if(returnSdData === 'inlay'){
|
||||
@@ -552,7 +575,6 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
|
||||
if(db.falModel === 'fal-ai/flux-pro'){
|
||||
delete body.enable_safety_checker
|
||||
}
|
||||
console.log(body)
|
||||
|
||||
const res = await globalFetch('https://fal.run/' + model, {
|
||||
headers: {
|
||||
@@ -563,8 +585,6 @@ export async function generateAIImage(genPrompt:string, currentChar:character, n
|
||||
body: body
|
||||
})
|
||||
|
||||
console.log(res)
|
||||
|
||||
if(!res.ok){
|
||||
alertError(JSON.stringify(res.data))
|
||||
return false
|
||||
|
||||
Reference in New Issue
Block a user