From 73e94658fdd97ea845c4d711b009975eb8fa539d Mon Sep 17 00:00:00 2001 From: Kwaroran Date: Fri, 20 Dec 2024 02:15:48 +0900 Subject: [PATCH] Add thoughts --- src/ts/model/modellist.ts | 12 ++++++++++- src/ts/parser.svelte.ts | 12 +++++++++++ src/ts/process/index.svelte.ts | 7 +------ src/ts/process/request.ts | 38 +++++++++++++++++++++++++++++----- 4 files changed, 57 insertions(+), 12 deletions(-) diff --git a/src/ts/model/modellist.ts b/src/ts/model/modellist.ts index bb335228..223dac58 100644 --- a/src/ts/model/modellist.ts +++ b/src/ts/model/modellist.ts @@ -16,7 +16,7 @@ export enum LLMFlags{ poolSupported, hasVideoInput, OAICompletionTokens, - DeveloperRole + DeveloperRole, } export enum LLMProvider{ @@ -809,6 +809,16 @@ export const LLMModels: LLMModel[] = [ tokenizer: LLMTokenizer.GoogleCloud, recommended: true }, + { + name: "Gemini Flash 2.0 Thinking 1219", + id: 'gemini-2.0-flash-thinking-exp-1219', + provider: LLMProvider.GoogleCloud, + format: LLMFormat.GoogleCloud, + flags: [LLMFlags.hasImageInput, LLMFlags.hasFirstSystemPrompt, LLMFlags.poolSupported, LLMFlags.hasAudioInput, LLMFlags.hasVideoInput, LLMFlags.hasStreaming], + parameters: ['temperature', 'top_k', 'top_p', 'presence_penalty', 'frequency_penalty'], + tokenizer: LLMTokenizer.GoogleCloud, + recommended: true + }, { name: "Gemini Pro 1.5", id: 'gemini-1.5-pro-latest', diff --git a/src/ts/parser.svelte.ts b/src/ts/parser.svelte.ts index 71ae8af0..2bf650da 100644 --- a/src/ts/parser.svelte.ts +++ b/src/ts/parser.svelte.ts @@ -14,6 +14,7 @@ import { getModuleAssets, getModuleLorebooks } from './process/modules'; import type { OpenAIChat } from './process/index.svelte'; import hljs from 'highlight.js/lib/core' import 'highlight.js/styles/atom-one-dark.min.css' +import { language } from 'src/lang'; const markdownItOptions = { html: true, @@ -495,6 +496,11 @@ export interface simpleCharacterArgument{ triggerscript?: triggerscript[] } +function parseThoughts(data:string){ + return data.replace(/(.+)<\/Thoughts>/gms, (full, txt) => { + return `
${language.cot}${txt}
` + }) +} export async function ParseMarkdown( data:string, @@ -506,18 +512,24 @@ export async function ParseMarkdown( let firstParsed = '' const additionalAssetMode = (mode === 'back') ? 'back' : 'normal' let char = (typeof(charArg) === 'string') ? (findCharacterbyId(charArg)) : (charArg) + if(char && char.type !== 'group'){ data = await parseAdditionalAssets(data, char, additionalAssetMode, 'pre') firstParsed = data } + if(char){ data = (await processScriptFull(char, data, 'editdisplay', chatID, cbsConditions)).data } + if(firstParsed !== data && char && char.type !== 'group'){ data = await parseAdditionalAssets(data, char, additionalAssetMode, 'post') } + data = await parseInlayAssets(data ?? '') + data = parseThoughts(data) + data = encodeStyle(data) if(mode === 'normal'){ data = await renderHighlightableMarkdown(data) diff --git a/src/ts/process/index.svelte.ts b/src/ts/process/index.svelte.ts index c2edba4d..b10d701d 100644 --- a/src/ts/process/index.svelte.ts +++ b/src/ts/process/index.svelte.ts @@ -752,12 +752,7 @@ export async function sendChat(chatProcessIndex = -1,arg:{ break } } - if(usingPromptTemplate && DBState.db.promptSettings.maxThoughtTagDepth !== -1){ - const depth = ms.length - index - if(depth >= DBState.db.promptSettings.maxThoughtTagDepth){ - formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') - } - } + formatedChat = formatedChat.replace(/(.+?)<\/Thoughts>/gm, '') const chat:OpenAIChat = { role: role, diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 2120535e..e79ed17c 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -1665,8 +1665,28 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise const data = JSON.parse(reformatted) let r = '' + let r2 = '' + let bump = false for(const d of data){ - r += d.candidates[0].content.parts[0].text + const parts = d.candidates[0].content?.parts + for(let i=0;i${r}\n\n${r2}` } control.enqueue({ '0': r @@ -1697,11 +1717,14 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise } } - let fullRes = '' - + let r = '' + let r2 = '' const processDataItem = (data:any) => { if(data?.candidates?.[0]?.content?.parts?.[0]?.text){ - fullRes += data.candidates[0].content.parts[0].text + r += data.candidates[0].content.parts[0].text + } + if(data?.candidates?.[0]?.content?.parts?.[1]?.text){ + r2 += data.candidates[0].content.parts[1].text } else if(data?.errors){ return { @@ -1726,9 +1749,14 @@ async function requestGoogleCloudVertex(arg:RequestDataArgumentExtended):Promise processDataItem(res.data) } + + if(r2){ + r = `${r}\n\n${r2}` + } + return { type: 'success', - result: fullRes + result: r } }