Add cache point

This commit is contained in:
Kwaroran
2025-03-08 17:49:19 +09:00
parent 02fb780395
commit 93a53da983
6 changed files with 70 additions and 6 deletions

View File

@@ -40,6 +40,7 @@ export interface OpenAIChat{
attr?:string[]
multimodals?: MultiModal[]
thoughts?: string[]
cachePoint?: boolean
}
export interface MultiModal{
@@ -635,6 +636,9 @@ export async function sendChat(chatProcessIndex = -1,arg:{
supaMemoryCardUsed = true
break
}
case 'cache':{
break
}
}
}
}
@@ -1142,6 +1146,22 @@ export async function sendChat(chatProcessIndex = -1,arg:{
}
pushPrompts(pmt)
break
}
case 'cache':{
let pointer = formated.length - 1
let depthRemaining = card.depth
while(pointer >= 0){
if(depthRemaining === 0){
break
}
if(formated[pointer].role === card.role || card.role === 'all'){
formated[pointer].cachePoint = true
depthRemaining--
}
}
break
}
}
}

View File

@@ -4,7 +4,7 @@ import { getDatabase, presetTemplate, setDatabase, type Database } from "../stor
import { alertError, alertNormal } from "../alert";
import type { OobaChatCompletionRequestParams } from "../model/ooba";
export type PromptItem = PromptItemPlain|PromptItemTyped|PromptItemChat|PromptItemAuthorNote|PromptItemChatML
export type PromptItem = PromptItemPlain|PromptItemTyped|PromptItemChat|PromptItemAuthorNote|PromptItemChatML|PromptItemCache
export type PromptType = PromptItem['type'];
export type PromptSettings = {
assistantPrefill: string
@@ -52,6 +52,14 @@ export interface PromptItemChat {
name?: string
}
export interface PromptItemCache {
type: 'cache';
name: string
depth: number
role: 'user'|'assistant'|'system'|'all'
}
export async function tokenizePreset(prompts:PromptItem[], consti:boolean = false){
let total = 0
for(const prompt of prompts){

View File

@@ -370,6 +370,7 @@ export interface OpenAIChatExtra {
thoughts?:string[]
prefix?:boolean
reasoning_content?:string
cachePoint?:boolean
}
function reformater(formated:OpenAIChat[],modelInfo:LLMModel){
@@ -570,6 +571,7 @@ async function requestOpenAI(arg:RequestDataArgumentExtended):Promise<requestDat
delete formatedChat[i].attr
delete formatedChat[i].multimodals
delete formatedChat[i].thoughts
delete formatedChat[i].cachePoint
}
if(aiModel === 'reverse_proxy' && db.reverseProxyOobaMode && formatedChat[i].role === 'system'){
const cont = formatedChat[i].content
@@ -2490,7 +2492,8 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
const addClaudeChat = (chat:{
role: 'user'|'assistant'
content: string
content: string,
cache: boolean
}, multimodals?:MultiModal[]) => {
if(claudeChat.length > 0 && claudeChat[claudeChat.length-1].role === chat.role){
let content = claudeChat[claudeChat.length-1].content
@@ -2533,6 +2536,11 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
}
}
}
if(chat.cache){
content[content.length-1].cache_control = {
type: 'ephemeral'
}
}
claudeChat[claudeChat.length-1].content = content
}
else{
@@ -2566,6 +2574,11 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
}
}
if(chat.cache){
formatedChat.content[0].cache_control = {
type: 'ephemeral'
}
}
claudeChat.push(formatedChat)
}
}
@@ -2574,14 +2587,16 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
case 'user':{
addClaudeChat({
role: 'user',
content: chat.content
content: chat.content,
cache: chat.cachePoint
}, chat.multimodals)
break
}
case 'assistant':{
addClaudeChat({
role: 'assistant',
content: chat.content
content: chat.content,
cache: chat.cachePoint
}, chat.multimodals)
break
}
@@ -2592,7 +2607,8 @@ async function requestClaude(arg:RequestDataArgumentExtended):Promise<requestDat
else{
addClaudeChat({
role: 'user',
content: "System: " + chat.content
content: "System: " + chat.content,
cache: chat.cachePoint
})
}
break