mirror of
https://github.com/lencx/ChatGPT.git
synced 2024-10-01 01:06:13 -04:00
refactor: global shortcut
This commit is contained in:
parent
e1f8030009
commit
ba1fe9a603
@ -3,6 +3,7 @@
|
||||
## v0.7.4
|
||||
|
||||
fix:
|
||||
- trying to resolve linux errors: `error while loading shared libraries`
|
||||
- customize global shortcuts (`Menu -> Preferences -> Control Center -> General -> Global Shortcut`)
|
||||
|
||||
## v0.7.3
|
||||
|
@ -26,6 +26,7 @@ walkdir = "2.3.2"
|
||||
regex = "1.7.0"
|
||||
tokio = { version = "1.23.0", features = ["macros"] }
|
||||
reqwest = "0.11.13"
|
||||
wry = "0.23.4"
|
||||
|
||||
[dependencies.tauri-plugin-log]
|
||||
git = "https://github.com/tauri-apps/tauri-plugin-log"
|
||||
|
@ -88,9 +88,18 @@ pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
|
||||
let mut rdr = csv::Reader::from_reader(data.as_bytes());
|
||||
let mut list = vec![];
|
||||
for result in rdr.deserialize() {
|
||||
let record: PromptRecord = result.unwrap();
|
||||
let record: PromptRecord = result.unwrap_or_else(|err| {
|
||||
info!("parse_prompt_error: {}", err);
|
||||
PromptRecord {
|
||||
cmd: None,
|
||||
act: "".to_string(),
|
||||
prompt: "".to_string(),
|
||||
}
|
||||
});
|
||||
if !record.act.is_empty() {
|
||||
list.push(record);
|
||||
}
|
||||
}
|
||||
list
|
||||
}
|
||||
|
||||
@ -222,19 +231,22 @@ pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>>
|
||||
|
||||
#[command]
|
||||
pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<ModelRecord>> {
|
||||
let res = utils::get_data(&url, None).await.unwrap();
|
||||
let res = utils::get_data(&url, None).await.unwrap_or_else(|err| {
|
||||
info!("chatgpt_http_error: {}", err);
|
||||
None
|
||||
});
|
||||
|
||||
info!("chatgpt_http_url: {}", url);
|
||||
|
||||
if let Some(v) = res {
|
||||
let data;
|
||||
if data_type == "csv" {
|
||||
info!("chatgpt_http_csv_parser");
|
||||
info!("chatgpt_http_csv_parse");
|
||||
data = parse_prompt(v);
|
||||
} else if data_type == "json" {
|
||||
info!("chatgpt_http_json_parser");
|
||||
info!("chatgpt_http_json_parse");
|
||||
data = serde_json::from_str(&v).unwrap_or_else(|err| {
|
||||
info!("chatgpt_http_json_parser_error: {}", err);
|
||||
info!("chatgpt_http_json_parse_error: {}", err);
|
||||
vec![]
|
||||
});
|
||||
} else {
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::{app::window, conf::ChatConfJson, utils};
|
||||
use log::info;
|
||||
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, GlobalShortcutManager, Manager};
|
||||
use wry::application::accelerator::Accelerator;
|
||||
|
||||
pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> {
|
||||
info!("stepup");
|
||||
@ -14,7 +15,10 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
|
||||
});
|
||||
|
||||
if let Some(v) = chat_conf.global_shortcut {
|
||||
info!("global_shortcut");
|
||||
info!("global_shortcut: `{}`", v);
|
||||
match v.parse::<Accelerator>() {
|
||||
Ok(_) => {
|
||||
info!("global_shortcut_register");
|
||||
let handle = app.app_handle();
|
||||
let mut shortcut = app.global_shortcut_manager();
|
||||
shortcut
|
||||
@ -31,6 +35,11 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
|
||||
.unwrap_or_else(|err| {
|
||||
info!("global_shortcut_register_error: {}", err);
|
||||
});
|
||||
}
|
||||
Err(err) => {
|
||||
info!("global_shortcut_parse_error: {}", err);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
info!("global_shortcut_unregister");
|
||||
};
|
||||
|
@ -30,7 +30,7 @@ pub fn control_window(handle: &tauri::AppHandle) {
|
||||
let app = handle.clone();
|
||||
tokio::spawn(async move {
|
||||
WindowBuilder::new(&app, "main", WindowUrl::App("index.html".into()))
|
||||
.title("ChatGPT")
|
||||
.title("Control Center")
|
||||
.resizable(true)
|
||||
.fullscreen(false)
|
||||
.inner_size(800.0, 600.0)
|
||||
|
8
src/view/model/SyncCustom/index.tsx
vendored
8
src/view/model/SyncCustom/index.tsx
vendored
@ -34,7 +34,9 @@ export default function SyncCustom() {
|
||||
if (!opInfo.opType) return;
|
||||
if (opInfo.opType === 'sync') {
|
||||
const filename = `${opInfo?.opRecord?.id}.json`;
|
||||
handleSync(filename).then(() => {
|
||||
handleSync(filename).then((isOk: boolean) => {
|
||||
opInfo.resetRecord();
|
||||
if (!isOk) return;
|
||||
const data = opReplace(opInfo?.opRecord?.[opSafeKey], { ...opInfo?.opRecord, last_updated: Date.now() });
|
||||
modelSet(data);
|
||||
opInfo.resetRecord();
|
||||
@ -70,10 +72,11 @@ export default function SyncCustom() {
|
||||
await modelCacheSet(data as [], file);
|
||||
await modelCacheCmd();
|
||||
message.success('ChatGPT Prompts data has been synchronized!');
|
||||
return true;
|
||||
} else {
|
||||
message.error('ChatGPT Prompts data sync failed, please try again!');
|
||||
return false;
|
||||
}
|
||||
return;
|
||||
}
|
||||
// local
|
||||
if (isJson) {
|
||||
@ -87,6 +90,7 @@ export default function SyncCustom() {
|
||||
await modelCacheSet(fmtData(list), file);
|
||||
}
|
||||
await modelCacheCmd();
|
||||
return true;
|
||||
};
|
||||
|
||||
const handleOk = () => {
|
||||
|
Loading…
Reference in New Issue
Block a user