diff --git a/src/lib/Setting/Pages/BotSettings.svelte b/src/lib/Setting/Pages/BotSettings.svelte
index c8f6fb51..13c830a7 100644
--- a/src/lib/Setting/Pages/BotSettings.svelte
+++ b/src/lib/Setting/Pages/BotSettings.svelte
@@ -329,7 +329,7 @@
{($DataBase.top_k).toFixed(0)}
{/if}
-{#if $DataBase.aiModel === 'textgen_webui' || $DataBase.aiModel === 'mancer' || $DataBase.aiModel.startsWith('local_')}
+{#if $DataBase.aiModel === 'textgen_webui' || $DataBase.aiModel === 'mancer' || $DataBase.aiModel.startsWith('local_') || $DataBase.aiModel.startsWith('hf:::')}
Repetition Penalty
{($DataBase.ooba.repetition_penalty).toFixed(2)}
diff --git a/src/lib/UI/ModelList.svelte b/src/lib/UI/ModelList.svelte
index d7077cd6..a06e6955 100644
--- a/src/lib/UI/ModelList.svelte
+++ b/src/lib/UI/ModelList.svelte
@@ -3,7 +3,6 @@
import { getHordeModels } from "src/ts/horde/getModels";
import Arcodion from "./Arcodion.svelte";
import { language } from "src/lang";
- import { isNodeServer, isTauri } from "src/ts/storage/globalApi";
import Help from "../Others/Help.svelte";
import CheckInput from "./GUI/CheckInput.svelte";
@@ -86,6 +85,10 @@
const split = name.split(":::")
return `Horde ${split[1]}`
}
+ if(name.startsWith('tf:::')){
+ const split = name.split(":::")
+ return `${split[1]}`
+ }
return name
}
}
@@ -190,7 +193,14 @@
{/each}
{/await}
-
+
+ {#if showUnrec}
+
+
+
+
+
+ {/if}
{#if $DataBase.plugins.length > 0}
{/if}
diff --git a/src/ts/process/embedding/transformers.ts b/src/ts/process/embedding/transformers.ts
index 09874b33..f4898500 100644
--- a/src/ts/process/embedding/transformers.ts
+++ b/src/ts/process/embedding/transformers.ts
@@ -1,6 +1,6 @@
import {env, AutoTokenizer, pipeline, type SummarizationOutput, type TextGenerationConfig, type TextGenerationOutput, FeatureExtractionPipeline, TextToAudioPipeline } from '@xenova/transformers';
import { unzip } from 'fflate';
-import { loadAsset, saveAsset } from 'src/ts/storage/globalApi';
+import { globalFetch, loadAsset, saveAsset } from 'src/ts/storage/globalApi';
import { selectSingleFile } from 'src/ts/util';
import { v4 } from 'uuid';
let tfCache:Cache = null
diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts
index 6eb45635..dc1d7bcf 100644
--- a/src/ts/process/request.ts
+++ b/src/ts/process/request.ts
@@ -21,6 +21,7 @@ import { supportsInlayImage } from "../image";
import { OaifixEmdash } from "../plugins/fixer";
import { Capacitor } from "@capacitor/core";
import { getFreeOpenRouterModel } from "../model/openrouter";
+import { runTransformers } from "./embedding/transformers";
@@ -1632,6 +1633,23 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
}
+ }
+ if(aiModel.startsWith('hf:::')){
+ const realModel = aiModel.split(":::")[1]
+ const suggesting = model === "submodel"
+ const proompt = stringlizeChatOba(formated, currentChar.name, suggesting, arg.continue)
+ const v = await runTransformers(proompt, realModel, {
+ temperature: temperature,
+ max_new_tokens: maxTokens,
+ top_k: db.ooba.top_k,
+ top_p: db.ooba.top_p,
+ repetition_penalty: db.ooba.repetition_penalty,
+ typical_p: db.ooba.typical_p,
+ })
+ return {
+ type: 'success',
+ result: unstringlizeChat(v.generated_text, formated, currentChar?.name ?? '')
+ }
}
if(aiModel.startsWith('local_')){
console.log('running local model')