Update version to 1.73.0
This commit is contained in:
@@ -8,7 +8,7 @@
|
||||
},
|
||||
"package": {
|
||||
"productName": "RisuAI",
|
||||
"version": "1.72.0"
|
||||
"version": "1.73.0"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
export const patchNote = {
|
||||
version: "1.72",
|
||||
version: "1.73",
|
||||
content:
|
||||
`
|
||||
# Update 1.72
|
||||
- Added custom chain of thoughts
|
||||
- Added thought tag depth
|
||||
- Added Openrouter fallback option
|
||||
- Added Openrouter middle-out option
|
||||
# Update 1.73
|
||||
- Added WebLLM Local
|
||||
- WebLLM Local is a option for users who want to use LLM directly on their computer, without sending any data to the server.
|
||||
- WebLLM Local is experimental, and may not work on all devices.
|
||||
- Currently WebLLM Local only supports three models, but more will be added in the future.
|
||||
- Also, in future updates, You may be able to use WebLLM Local with any transformer model.
|
||||
- Currently WebLLM Local only supports CPU, but GPU support with WebGPU will be added in the future.
|
||||
`
|
||||
}
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ import type { OobaChatCompletionRequestParams } from '../model/ooba';
|
||||
|
||||
export const DataBase = writable({} as any as Database)
|
||||
export const loadedStore = writable(false)
|
||||
export let appVer = "1.72.0"
|
||||
export let appVer = "1.73.0"
|
||||
export let webAppSubVer = ''
|
||||
|
||||
export function setDatabase(data:Database){
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":"1.72.0"}
|
||||
{"version":"1.73.0"}
|
||||
Reference in New Issue
Block a user