diff --git a/src-tauri/src-python/main.py b/src-tauri/src-python/main.py
index cb49a1ce..c91cbf3b 100644
--- a/src-tauri/src-python/main.py
+++ b/src-tauri/src-python/main.py
@@ -66,17 +66,39 @@ def stream_chat_llamacpp(item:LlamaItem):
chunks = app.llm.create_completion(
prompt = item.prompt,
temperature = item.temperature,
- top_p = item.top_p,
- top_k = item.top_k,
- max_tokens = item.max_tokens,
- presence_penalty = item.presence_penalty,
- frequency_penalty = item.frequency_penalty,
- repeat_penalty = item.repeat_penalty,
- stop=item.stop,
- stream=True
+ # top_p = item.top_p,
+ # top_k = item.top_k,
+ # max_tokens = item.max_tokens,
+ # presence_penalty = item.presence_penalty,
+ # frequency_penalty = item.frequency_penalty,
+ # repeat_penalty = item.repeat_penalty,
+ # stop=item.stop,
+ stream=False,
)
+ if(type(chunks) == str):
+ print(chunks, end="")
+ yield chunks
+ return
+ if(type(chunks) == bytes):
+ print(chunks.decode('utf-8'), end="")
+ yield chunks.decode('utf-8')
+ return
+ if(type(chunks) == dict and "choices" in chunks):
+ print(chunks["choices"][0]["text"], end="")
+ yield chunks["choices"][0]["text"]
+ return
+
for chunk in chunks:
+ if(type(chunk) == str):
+ print(chunk, end="")
+ yield chunk
+ continue
+ if(type(chunk) == bytes):
+ print(chunk.decode('utf-8'), end="")
+ yield chunk.decode('utf-8')
+ continue
cont:CompletionChunk = chunk
+ print(cont)
encoded = cont["choices"][0]["text"]
print(encoded, end="")
yield encoded
diff --git a/src/lib/UI/ModelList.svelte b/src/lib/UI/ModelList.svelte
index bc2c40d5..72c29bb8 100644
--- a/src/lib/UI/ModelList.svelte
+++ b/src/lib/UI/ModelList.svelte
@@ -5,6 +5,8 @@
import { language } from "src/lang";
import Help from "../Others/Help.svelte";
import CheckInput from "./GUI/CheckInput.svelte";
+ import { isTauri } from "src/ts/storage/globalApi";
+ import {open} from '@tauri-apps/api/dialog'
export let value = ""
export let onChange: (v:string) => void = (v) => {}
@@ -89,6 +91,10 @@
const split = name.split(":::")
return `${split[1]}`
}
+ if(name.startsWith('local_')){
+ const realName = name.replace('local_', '').split(/(\\|\/)/g).at(-1)
+ return `GGUF ${realName}`
+ }
return name
}
}
@@ -143,9 +149,17 @@
{/if}
- {#if import.meta.env.DEV}
+ {#if $DataBase.tpo && isTauri}
{/if}
diff --git a/src/ts/process/request.ts b/src/ts/process/request.ts
index e2abe7d5..f8697ad4 100644
--- a/src/ts/process/request.ts
+++ b/src/ts/process/request.ts
@@ -1685,6 +1685,7 @@ export async function requestChatDataMain(arg:requestDataArgument, model:'model'
const suggesting = model === "submodel"
const proompt = stringlizeChatOba(formated, currentChar.name, suggesting, arg.continue)
const stopStrings = getStopStrings(suggesting)
+ console.log(stopStrings)
const modelPath = aiModel.replace('local_', '')
const res = await runGGUFModel({
prompt: proompt,