diff --git a/src/lib/SideBars/DevTool.svelte b/src/lib/SideBars/DevTool.svelte
index 0a1ca717..95978ee8 100644
--- a/src/lib/SideBars/DevTool.svelte
+++ b/src/lib/SideBars/DevTool.svelte
@@ -13,7 +13,7 @@
import TextAreaInput from "../UI/GUI/TextAreaInput.svelte";
import { FolderUpIcon, PlusIcon, TrashIcon } from "lucide-svelte";
import { selectSingleFile } from "src/ts/util";
- import { doingChat, previewFormated, sendChat } from "src/ts/process/index.svelte";
+ import { doingChat, previewFormated, previewBody, sendChat } from "src/ts/process/index.svelte";
import SelectInput from "../UI/GUI/SelectInput.svelte";
import { applyChatTemplate, chatTemplates } from "src/ts/process/templates/chatTemplate";
import OptionInput from "../UI/GUI/OptionInput.svelte";
@@ -31,7 +31,8 @@
}
alertWait("Loading...")
await sendChat(-1, {
- preview: true
+ preview: previewJoin !== 'prompt',
+ previewPrompt: previewJoin === 'prompt'
})
let md = ''
@@ -41,6 +42,15 @@
"system": "⚙️ System",
"assistant": "✨ Assistant",
}
+
+ if(previewJoin === 'prompt'){
+ md += '### Prompt\n'
+ md += '```json\n' + JSON.stringify(JSON.parse(previewBody), null, 2).replaceAll('```', '\\`\\`\\`') + '\n```\n'
+ $doingChat = false
+ alertMd(md)
+ return
+ }
+
let formated = safeStructuredClone(previewFormated)
if(previewJoin === 'yes'){
@@ -252,6 +262,7 @@
With Join
Without Join
+ As Request
diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts
index 8bda6569..0cd1c49e 100644
--- a/src/ts/process/index.svelte.ts
+++ b/src/ts/process/index.svelte.ts
@@ -63,6 +63,7 @@ export const doingChat = writable(false)
export const chatProcessStage = writable(0)
export const abortChat = writable(false)
export let previewFormated:OpenAIChat[] = []
+export let previewBody:string = ''
export async function sendChat(chatProcessIndex = -1,arg:{
chatAdditonalTokens?:number,
@@ -70,6 +71,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{
continue?:boolean,
usedContinueTokens?:number,
preview?:boolean
+ previewPrompt?:boolean
} = {}):Promise {
chatProcessStage.set(0)
@@ -1303,9 +1305,15 @@ export async function sendChat(chatProcessIndex = -1,arg:{
bias: {},
continue: arg.continue,
chatId: generationId,
- imageResponse: DBState.db.outputImageModal
+ imageResponse: DBState.db.outputImageModal,
+ previewBody: arg.previewPrompt
}, 'model', abortSignal)
+ if(arg.previewPrompt && req.type === 'success'){
+ previewBody = req.result
+ return true
+ }
+
let result = ''
let emoChanged = false
let resendChat = false
diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts
index d2e76f71..b4b44474 100644
--- a/src/ts/process/request.ts
+++ b/src/ts/process/request.ts
@@ -43,6 +43,7 @@ interface requestDataArgument{
schema?:string
extractJson?:string
imageResponse?:boolean
+ previewBody?:boolean
}
interface RequestDataArgumentExtended extends requestDataArgument{
@@ -726,8 +727,8 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise {
@@ -1572,6 +1638,17 @@ async function requestOoba(arg:RequestDataArgumentExtended):Promise 0) ? logit_bias.join("<<|>>") : undefined,
logit_bias_values: (logit_bias_values.length > 0) ? logit_bias_values.join("|") : undefined,
};
+
+
+ if(arg.previewBody){
+ return {
+ type: 'success',
+ result: JSON.stringify({
+ url: api_server_url + '/api',
+ body: send_body,
+ headers: headers
+ })
+ }
+ }
const response = await globalFetch(arg.customURL ?? api_server_url + '/api', {
method: 'POST',
headers: headers,
@@ -2344,6 +2466,15 @@ async function requestOllama(arg:RequestDataArgumentExtended):Promise 0 && claudeChat[claudeChat.length-1].role === chat.role){
let content = claudeChat[claudeChat.length-1].content
if(multimodals && multimodals.length > 0 && !Array.isArray(content)){
- content = [{
+ content = [{
type: 'text',
text: content
}]
@@ -2788,6 +2933,18 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise