[feat] added streaming

This commit is contained in:
kwaroran
2023-05-18 04:43:54 +09:00
parent ab16a526fa
commit 8de92e6cd3
12 changed files with 103 additions and 18 deletions

View File

@@ -248,5 +248,5 @@ export const languageEnglish = {
useExperimental: "Able Experimental Features",
showMemoryLimit: "Show Memory Limit",
roundIcons: "Round Icons",
useStreaming: "Use Streaming"
streaming: "Streaming"
}

View File

@@ -225,5 +225,14 @@ export const languageKorean = {
selective: "멀티플 키",
SecondaryKeys: '두번째 키',
useGlobalSettings: "글로벌 설정 사용",
recursiveScanning: "재귀 검색"
recursiveScanning: "재귀 검색",
creator: "제작자",
CharVersion: "캐릭터 버전",
Speech: "음성",
ToggleSuperMemory: "SupaMemory 토글",
SuperMemory:"SupaMemory",
useExperimental: "실험적 요소 보이기",
showMemoryLimit: "기억 한계치 보이기",
roundIcons: "둥근 아이콘",
streaming: "스트리밍"
}

View File

@@ -121,7 +121,11 @@
{/if}
{#if $DataBase.aiModel === 'gpt35' || $DataBase.aiModel === 'gpt4' || $DataBase.subModel === 'gpt4' || $DataBase.subModel === 'gpt35'}
<span class="text-neutral-200">OpenAI {language.apiKey} <Help key="oaiapikey"/></span>
<input class="text-neutral-200 mb-4 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="sk-XXXXXXXXXXXXXXXXXXXX" bind:value={$DataBase.openAIKey}>
<input class="text-neutral-200 p-2 bg-transparent input-text focus:bg-selected text-sm" placeholder="sk-XXXXXXXXXXXXXXXXXXXX" bind:value={$DataBase.openAIKey}>
<div class="flex items-center mt-2 mb-4">
<Check bind:check={$DataBase.useStreaming}/>
<span>OpenAI {language.streaming}</span>
</div>
{/if}
{#if $DataBase.aiModel === 'custom'}
<span class="text-neutral-200 mt-2">{language.plugin}</span>

View File

@@ -1,12 +1,17 @@
import "./styles.css";
import App from "./App.svelte";
import { loadData } from "./ts/globalApi";
import { ReadableStream, WritableStream, TransformStream } from "web-streams-polyfill/ponyfill/es2018";
import { Buffer as BufferPolyfill } from 'buffer'
import { initHotkey } from "./ts/hotkey";
//Polyfills
declare var Buffer: typeof BufferPolyfill;
globalThis.Buffer = BufferPolyfill
//@ts-ignore
globalThis.WritableStream = globalThis.WritableStream ?? WritableStream
globalThis.ReadableStream = globalThis.ReadableStream ?? ReadableStream
globalThis.TransformStream = globalThis.TransformStream ?? TransformStream
const app = new App({
target: document.getElementById("app"),

View File

@@ -7,7 +7,7 @@ import { cloneDeep } from 'lodash';
export const DataBase = writable({} as any as Database)
export const loadedStore = writable(false)
export let appVer = '1.10.1'
export let appVer = '1.11.0'
export function setDatabase(data:Database){

View File

@@ -328,7 +328,8 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
const req = await requestChatData({
formated: formated,
bias: bias,
currentChar: currentChar
currentChar: currentChar,
useStreaming: true
}, 'model')
let result = ''
@@ -339,7 +340,23 @@ export async function sendChat(chatProcessIndex = -1):Promise<boolean> {
return false
}
else if(req.type === 'streaming'){
const reader = req.result.getReader()
const msgIndex = db.characters[selectedChar].chats[selectedChat].message.length
db.characters[selectedChar].chats[selectedChat].message.push({
role: 'char',
data: "",
saying: currentChar.chaId
})
while(true){
const readed = (await reader.read())
if(readed.value){
db.characters[selectedChar].chats[selectedChat].message[msgIndex].data =readed.value
setDatabase(db)
}
if(readed.done){
break
}
}
}
else{
const result2 = processScriptFull(currentChar, reformatContent(req.result), 'editoutput')

View File

@@ -13,7 +13,8 @@ interface requestDataArgument{
temperature?: number
maxTokens?:number
PresensePenalty?: number
frequencyPenalty?: number
frequencyPenalty?: number,
useStreaming?:boolean
}
type requestDataResponse = {
@@ -21,7 +22,7 @@ type requestDataResponse = {
result: string
}|{
type: "streaming",
result: ReadableStreamDefaultReader<Uint8Array>
result: ReadableStream<string>
}
export async function requestChatData(arg:requestDataArgument, model:'model'|'submodel'):Promise<requestDataResponse> {
@@ -60,6 +61,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
presence_penalty: arg.PresensePenalty ?? (db.PresensePenalty / 100),
frequency_penalty: arg.frequencyPenalty ?? (db.frequencyPenalty / 100),
logit_bias: bias,
stream: false
})
let replacerURL = replacer === '' ? 'https://api.openai.com/v1/chat/completions' : replacer
@@ -71,19 +73,59 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
replacerURL += 'chat/completions'
}
if(db.useStreaming){
if(db.useStreaming && arg.useStreaming){
body.stream = true
const da = await fetch(replacerURL, {
body: JSON.stringify(body),
method: "POST",
headers: {
"Authorization": "Bearer " + db.openAIKey
"Authorization": "Bearer " + db.openAIKey,
"Content-Type": "application/json"
},
})
const reader = da.body.getReader()
if(da.status !== 200){
return {
type: "fail",
result: await da.text()
}
}
let dataUint = new Uint8Array([])
const transtream = new TransformStream<Uint8Array, string>( {
async transform(chunk, control) {
dataUint = Buffer.from(new Uint8Array([...dataUint, ...chunk]))
try {
const datas = dataUint.toString().split('\n')
let readed = ''
for(const data of datas){
if(data.startsWith("data: ")){
try {
const rawChunk = data.replace("data: ", "")
if(rawChunk === "[DONE]"){
control.enqueue(readed)
return
}
const chunk = JSON.parse(rawChunk).choices[0].delta.content
if(chunk){
readed += chunk
}
} catch (error) {}
}
}
control.enqueue(readed)
} catch (error) {
}
}
},)
da.body.pipeTo(transtream.writable)
return {
type: 'streaming',
result: reader
result: transtream.readable
}
}

View File

@@ -46,7 +46,7 @@ function versionStringToNumber(versionString:string):number {
return Number(
versionString
.split(".")
.map((component) => component.padStart(2, "0"))
.map((component) => component.padStart(4, "0"))
.join("")
);
}