Remove deep
This commit is contained in:
@@ -1,40 +0,0 @@
|
|||||||
import md5 from "blueimp-md5";
|
|
||||||
import { globalFetch } from "../storage/globalApi";
|
|
||||||
import type { OpenAIChat } from ".";
|
|
||||||
|
|
||||||
function randomBytes(size: number): Uint8Array {
|
|
||||||
const array = new Uint8Array(size);
|
|
||||||
return crypto.getRandomValues(array);
|
|
||||||
}
|
|
||||||
export async function createDeep(messages: OpenAIChat[]) {
|
|
||||||
const userAgent = navigator.userAgent;
|
|
||||||
|
|
||||||
const part1 = Math.floor(Math.random() * Math.pow(10, 11)).toString();
|
|
||||||
|
|
||||||
const md5Text = (text: string): string => {
|
|
||||||
return md5(text).split('').reverse().join('');
|
|
||||||
}
|
|
||||||
|
|
||||||
const part2 = md5Text(userAgent + md5Text(userAgent + md5Text(userAgent + part1 + "x")));
|
|
||||||
|
|
||||||
const apiKey = `tryit-${part1}-${part2}`;
|
|
||||||
|
|
||||||
const headers = {
|
|
||||||
"api-key": apiKey,
|
|
||||||
"user-agent": userAgent
|
|
||||||
};
|
|
||||||
|
|
||||||
const body = new URLSearchParams();
|
|
||||||
body.append("chat_style", "chat");
|
|
||||||
console.log(messages);
|
|
||||||
body.append("chatHistory", JSON.stringify(messages));
|
|
||||||
|
|
||||||
const response = await globalFetch("https://api.deepai.org/chat_response", {
|
|
||||||
method: 'POST',
|
|
||||||
headers: headers,
|
|
||||||
body: body,
|
|
||||||
rawResponse: true
|
|
||||||
});
|
|
||||||
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
@@ -1,13 +1,11 @@
|
|||||||
import { get } from "svelte/store";
|
import { get } from "svelte/store";
|
||||||
import type { MultiModal, OpenAIChat, OpenAIChatFull } from ".";
|
import type { MultiModal, OpenAIChat, OpenAIChatFull } from ".";
|
||||||
import { DataBase, setDatabase, type character } from "../storage/database";
|
import { DataBase, type character } from "../storage/database";
|
||||||
import { pluginProcess } from "../plugins/plugins";
|
import { pluginProcess } from "../plugins/plugins";
|
||||||
import { language } from "../../lang";
|
import { language } from "../../lang";
|
||||||
import { stringlizeAINChat, stringlizeChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize";
|
import { stringlizeAINChat, stringlizeChat, getStopStrings, unstringlizeAIN, unstringlizeChat } from "./stringlize";
|
||||||
import { addFetchLog, fetchNative, globalFetch, isNodeServer, isTauri, textifyReadableStream } from "../storage/globalApi";
|
import { addFetchLog, fetchNative, globalFetch, isNodeServer, isTauri, textifyReadableStream } from "../storage/globalApi";
|
||||||
import { sleep } from "../util";
|
import { sleep } from "../util";
|
||||||
import { createDeep } from "./deepai";
|
|
||||||
import { hubURL } from "../characterCards";
|
|
||||||
import { NovelAIBadWordIds, stringlizeNAIChat } from "./models/nai";
|
import { NovelAIBadWordIds, stringlizeNAIChat } from "./models/nai";
|
||||||
import { strongBan, tokenize, tokenizeNum } from "../tokenizer";
|
import { strongBan, tokenize, tokenizeNum } from "../tokenizer";
|
||||||
import { runGGUFModel } from "./models/local";
|
import { runGGUFModel } from "./models/local";
|
||||||
@@ -22,7 +20,7 @@ import { OaifixBias } from "../plugins/fixer";
|
|||||||
import { Capacitor } from "@capacitor/core";
|
import { Capacitor } from "@capacitor/core";
|
||||||
import { getFreeOpenRouterModel } from "../model/openrouter";
|
import { getFreeOpenRouterModel } from "../model/openrouter";
|
||||||
import { runTransformers } from "./transformers";
|
import { runTransformers } from "./transformers";
|
||||||
import {createParser, type ParsedEvent, type ReconnectInterval} from 'eventsource-parser'
|
import {createParser} from 'eventsource-parser'
|
||||||
import {Ollama} from 'ollama/dist/browser.mjs'
|
import {Ollama} from 'ollama/dist/browser.mjs'
|
||||||
import { applyChatTemplate } from "./templates/chatTemplate";
|
import { applyChatTemplate } from "./templates/chatTemplate";
|
||||||
|
|
||||||
@@ -1450,40 +1448,6 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
|
|||||||
'result': unstr
|
'result': unstr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case "deepai":{
|
|
||||||
|
|
||||||
for(let i=0;i<formated.length;i++){
|
|
||||||
delete formated[i].memo
|
|
||||||
delete formated[i].name
|
|
||||||
if(arg.isGroupChat && formated[i].name && formated[i].role === 'assistant'){
|
|
||||||
formated[i].content = formated[i].name + ": " + formated[i].content
|
|
||||||
}
|
|
||||||
if(formated[i].role !== 'assistant' && formated[i].role !== 'user'){
|
|
||||||
formated[i].content = formated[i].role + ": " + formated[i].content
|
|
||||||
formated[i].role = 'assistant'
|
|
||||||
}
|
|
||||||
formated[i].name = undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await createDeep([{
|
|
||||||
role: 'user',
|
|
||||||
content: stringlizeChat(formated, currentChar?.name ?? '', arg.continue)
|
|
||||||
}])
|
|
||||||
|
|
||||||
if(!response.ok){
|
|
||||||
return {
|
|
||||||
type: 'fail',
|
|
||||||
result: response.data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = Buffer.from(response.data).toString('utf-8')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'type': 'success',
|
|
||||||
'result': result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case 'risullm-proto':{
|
case 'risullm-proto':{
|
||||||
const res = await globalFetch('https://sv.risuai.xyz/risullm', {
|
const res = await globalFetch('https://sv.risuai.xyz/risullm', {
|
||||||
body: {
|
body: {
|
||||||
|
|||||||
Reference in New Issue
Block a user