diff --git a/package.json b/package.json index fcf73bab..8fb18f08 100644 --- a/package.json +++ b/package.json @@ -19,6 +19,7 @@ "@msgpack/msgpack": "3.0.0-beta2", "@tauri-apps/api": "1.3.0", "@xenova/transformers": "^2.1.1", + "blueimp-md5": "^2.19.0", "body-parser": "^1.20.2", "buffer": "^6.0.3", "core-js": "^3.30.2", @@ -50,6 +51,7 @@ "@tailwindcss/typography": "^0.5.9", "@tauri-apps/cli": "1.3.1", "@tsconfig/svelte": "^3.0.0", + "@types/blueimp-md5": "^2.18.0", "@types/dompurify": "^3.0.1", "@types/lodash": "^4.14.194", "@types/lodash.clonedeep": "^4.5.7", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 670f657c..a955b2e4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -16,6 +16,9 @@ dependencies: '@xenova/transformers': specifier: ^2.1.1 version: 2.1.1 + blueimp-md5: + specifier: ^2.19.0 + version: 2.19.0 body-parser: specifier: ^1.20.2 version: 1.20.2 @@ -105,6 +108,9 @@ devDependencies: '@tsconfig/svelte': specifier: ^3.0.0 version: 3.0.0 + '@types/blueimp-md5': + specifier: ^2.18.0 + version: 2.18.0 '@types/dompurify': specifier: ^3.0.1 version: 3.0.1 @@ -731,6 +737,10 @@ packages: resolution: {integrity: sha512-pYrtLtOwku/7r1i9AMONsJMVYAtk3hzOfiGNekhtq5tYBGA7unMve8RvUclKLMT3PrihvJqUmzsRGh0RP84hKg==} dev: true + /@types/blueimp-md5@2.18.0: + resolution: {integrity: sha512-f4A+++lGZGJvVSgeyMkqA7BEf2BVQli6F+qEykKb49c5ieWQBkfpn6CP5c1IZr2Yi2Ofl6Fj+v0e1fN18Z8Cnw==} + dev: true + /@types/dompurify@3.0.1: resolution: {integrity: sha512-ubq8VKmf8W+U48jUOiZO4BoSGS7NnbITPMvrF+7HgMN4L+eezCKv8QBPB8p3o4YPicLMmNeTyDkE5X4c2ViHJQ==} dependencies: @@ -912,6 +922,10 @@ packages: readable-stream: 3.6.2 dev: false + /blueimp-md5@2.19.0: + resolution: {integrity: sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==} + dev: false + /body-parser@1.20.1: resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} diff --git a/src/ts/process/deepai.ts b/src/ts/process/deepai.ts new file mode 100644 index 00000000..8d117df1 --- /dev/null +++ b/src/ts/process/deepai.ts @@ -0,0 +1,40 @@ +import md5 from "blueimp-md5"; +import { globalFetch } from "../storage/globalApi"; +import type { OpenAIChat } from "."; + +function randomBytes(size: number): Uint8Array { + const array = new Uint8Array(size); + return crypto.getRandomValues(array); +} +export async function createDeep(messages: OpenAIChat[]) { + const userAgent = navigator.userAgent; + + const part1 = Math.floor(Math.random() * Math.pow(10, 11)).toString(); + + const md5Text = (text: string): string => { + return md5(text).split('').reverse().join(''); + } + + const part2 = md5Text(userAgent + md5Text(userAgent + md5Text(userAgent + part1 + "x"))); + + const apiKey = `tryit-${part1}-${part2}`; + + const headers = { + "api-key": apiKey, + "user-agent": userAgent + }; + + const body = new URLSearchParams(); + body.append("chat_style", "chat"); + console.log(messages); + body.append("chatHistory", JSON.stringify(messages)); + + const response = await globalFetch("https://api.deepai.org/chat_response", { + method: 'POST', + headers: headers, + body: body, + rawResponse: true + }); + + return response; +} \ No newline at end of file diff --git a/src/ts/process/index.ts b/src/ts/process/index.ts index 2f0fb543..3797ec00 100644 --- a/src/ts/process/index.ts +++ b/src/ts/process/index.ts @@ -139,6 +139,11 @@ export async function sendChat(chatProcessIndex = -1,arg:{chatAdditonalTokens?:n maxContextTokens = 8000 } } + if(db.aiModel === 'deepai'){ + if(maxContextTokens > 3000){ + maxContextTokens = 3000 + } + } let unformated = { diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts index 8bfa6506..b81eeadd 100644 --- a/src/ts/process/request.ts +++ b/src/ts/process/request.ts @@ -6,6 +6,7 @@ import { language } from "../../lang"; import { stringlizeChat, unstringlizeChat } from "./stringlize"; import { globalFetch, isTauri } from "../storage/globalApi"; import { sleep } from "../util"; +import { createDeep } from "./deepai"; interface requestDataArgument{ formated: OpenAIChat[] @@ -579,6 +580,40 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model' 'result': unstringlizeChat(result, formated, currentChar?.name ?? '') } } + case "deepai":{ + + for(let i=0;i= 200 && da.status < 300) - return { - ok: da.ok && da.status >= 200 && da.status < 300, - data: dat, - headers: Object.fromEntries(da.headers) + const daText = await da.text() + try { + const dat = JSON.parse(daText) + addFetchLog(dat, da.ok && da.status >= 200 && da.status < 300) + return { + ok: da.ok && da.status >= 200 && da.status < 300, + data: dat, + headers: Object.fromEntries(da.headers) + } + } catch (error) { + addFetchLog(daText, false) + return { + ok:false, + data: daText, + headers: Object.fromEntries(da.headers) + } } } } catch (error) { @@ -853,4 +883,14 @@ export function openURL(url:string){ else{ window.open(url, "_blank") } -} \ No newline at end of file +} + +function formDataToString(formData: FormData): string { + const params: string[] = []; + + for (const [name, value] of formData.entries()) { + params.push(`${encodeURIComponent(name)}=${encodeURIComponent(value.toString())}`); + } + + return params.join('&'); + } \ No newline at end of file