From 02fb4dd3b7dcbf4f03d5f9806754a78ee0dba480 Mon Sep 17 00:00:00 2001 From: lencx Date: Sun, 18 Dec 2022 13:30:27 +0800 Subject: [PATCH 1/3] chore: windows conf --- src-tauri/tauri.conf.json | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index ec1c936..710d767 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -51,13 +51,16 @@ "shortDescription": "ChatGPT", "targets": "all", "windows": { - "webviewInstallMode": { - "silent": true, - "type": "downloadBootstrapper" - }, "certificateThumbprint": null, "digestAlgorithm": "sha256", - "timestampUrl": "" + "timestampUrl": "", + "webviewInstallMode": { + "silent": true, + "type": "embedBootstrapper" + }, + "wix": { + "language": ["zh-CN", "en-US"] + } } }, "security": { From c54aec88c01da0736b277c66fdd24088a272310e Mon Sep 17 00:00:00 2001 From: lencx Date: Mon, 19 Dec 2022 02:56:53 +0800 Subject: [PATCH 2/3] feat: chatgpt-prompts sync --- README-ZH_CN.md | 6 +- README.md | 6 +- UPDATE_LOG.md | 4 + package.json | 1 + src-tauri/Cargo.toml | 2 + src-tauri/src/app/cmd.rs | 17 +++++ src-tauri/src/app/fs_extra.rs | 123 +++++++++++++++++++++++++++++++ src-tauri/src/app/mod.rs | 1 + src-tauri/src/assets/cmd.js | 8 +- src-tauri/src/main.rs | 5 +- src-tauri/tauri.conf.json | 5 ++ src/hooks/useChatModel.ts | 8 +- src/hooks/useInit.ts | 2 +- src/layout/index.scss | 5 ++ src/layout/index.tsx | 27 ++++++- src/routes.tsx | 10 +++ src/utils.ts | 14 +++- src/view/LanguageModel/index.tsx | 2 +- src/view/SyncPrompts/config.tsx | 45 +++++++++++ src/view/SyncPrompts/index.scss | 28 +++++++ src/view/SyncPrompts/index.tsx | 71 ++++++++++++++++++ 21 files changed, 367 insertions(+), 23 deletions(-) create mode 100644 src-tauri/src/app/fs_extra.rs create mode 100644 src/view/SyncPrompts/config.tsx create mode 100644 src/view/SyncPrompts/index.scss create mode 100644 src/view/SyncPrompts/index.tsx diff --git a/README-ZH_CN.md b/README-ZH_CN.md index 6f0f581..17ca33e 100644 --- a/README-ZH_CN.md +++ b/README-ZH_CN.md @@ -22,9 +22,9 @@ **最新版:** -- `Mac`: [ChatGPT_0.4.2_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64.dmg) -- `Linux`: [chat-gpt_0.4.2_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/chat-gpt_0.4.2_amd64.deb) -- `Windows`: [ChatGPT_0.4.2_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64_en-US.msi) +- `Mac`: [ChatGPT_0.5.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64.dmg) +- `Linux`: [chat-gpt_0.5.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/chat-gpt_0.5.0_amd64.deb) +- `Windows`: [ChatGPT_0.5.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64_en-US.msi) [其他版本...](https://github.com/lencx/ChatGPT/releases) diff --git a/README.md b/README.md index 723b5f9..1911f04 100644 --- a/README.md +++ b/README.md @@ -23,9 +23,9 @@ **Latest:** -- `Mac`: [ChatGPT_0.4.2_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64.dmg) -- `Linux`: [chat-gpt_0.4.2_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/chat-gpt_0.4.2_amd64.deb) -- `Windows`: [ChatGPT_0.4.2_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64_en-US.msi) +- `Mac`: [ChatGPT_0.5.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64.dmg) +- `Linux`: [chat-gpt_0.5.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/chat-gpt_0.5.0_amd64.deb) +- `Windows`: [ChatGPT_0.5.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64_en-US.msi) [Other version...](https://github.com/lencx/ChatGPT/releases) diff --git a/UPDATE_LOG.md b/UPDATE_LOG.md index 4c124fb..21d5253 100644 --- a/UPDATE_LOG.md +++ b/UPDATE_LOG.md @@ -1,5 +1,9 @@ # UPDATE LOG +## v0.5.0 + +feat: `Control Center` added `chatgpt-prompts` synchronization + ## v0.4.2 add chatgpt log (path: `~/.chatgpt/chatgpt.log`) diff --git a/package.json b/package.json index dea3025..a026e54 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "@ant-design/icons": "^4.8.0", "@tauri-apps/api": "^1.2.0", "antd": "^5.0.6", + "dayjs": "^1.11.7", "lodash": "^4.17.21", "react": "^18.2.0", "react-dom": "^18.2.0", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index c9aa894..5cbf212 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -20,6 +20,8 @@ serde = { version = "1.0", features = ["derive"] } tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] } tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] } log = "0.4.17" +csv = "1.1.6" +thiserror = "1.0.38" [dependencies.tauri-plugin-log] git = "https://github.com/tauri-apps/tauri-plugin-log" diff --git a/src-tauri/src/app/cmd.rs b/src-tauri/src/app/cmd.rs index 17968c3..0d4adb5 100644 --- a/src-tauri/src/app/cmd.rs +++ b/src-tauri/src/app/cmd.rs @@ -71,3 +71,20 @@ pub fn get_chat_model() -> serde_json::Value { let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string()); serde_json::from_str(&content).unwrap() } + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +pub struct PromptRecord { + pub act: String, + pub prompt: String, +} + +#[command] +pub fn parse_prompt(data: String) -> Vec { + let mut rdr = csv::Reader::from_reader(data.as_bytes()); + let mut list = vec![]; + for result in rdr.deserialize() { + let record: PromptRecord = result.unwrap(); + list.push(record); + } + list +} diff --git a/src-tauri/src/app/fs_extra.rs b/src-tauri/src/app/fs_extra.rs new file mode 100644 index 0000000..72453fb --- /dev/null +++ b/src-tauri/src/app/fs_extra.rs @@ -0,0 +1,123 @@ +// https://github.com/tauri-apps/tauri-plugin-fs-extra/blob/dev/src/lib.rs + +// Copyright 2019-2021 Tauri Programme within The Commons Conservancy +// SPDX-License-Identifier: Apache-2.0 +// SPDX-License-Identifier: MIT + +use serde::{ser::Serializer, Serialize}; +use std::{ + path::PathBuf, + time::{SystemTime, UNIX_EPOCH}, +}; +use tauri::command; + +#[cfg(unix)] +use std::os::unix::fs::{MetadataExt, PermissionsExt}; +#[cfg(windows)] +use std::os::windows::fs::MetadataExt; + +type Result = std::result::Result; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error(transparent)] + Io(#[from] std::io::Error), +} + +impl Serialize for Error { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: Serializer, + { + serializer.serialize_str(self.to_string().as_ref()) + } +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct Permissions { + readonly: bool, + #[cfg(unix)] + mode: u32, +} + +#[cfg(unix)] +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct UnixMetadata { + dev: u64, + ino: u64, + mode: u32, + nlink: u64, + uid: u32, + gid: u32, + rdev: u64, + blksize: u64, + blocks: u64, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Metadata { + accessed_at_ms: u64, + created_at_ms: u64, + modified_at_ms: u64, + is_dir: bool, + is_file: bool, + is_symlink: bool, + size: u64, + permissions: Permissions, + #[cfg(unix)] + #[serde(flatten)] + unix: UnixMetadata, + #[cfg(windows)] + file_attributes: u32, +} + +fn system_time_to_ms(time: std::io::Result) -> u64 { + time.map(|t| { + let duration_since_epoch = t.duration_since(UNIX_EPOCH).unwrap(); + duration_since_epoch.as_millis() as u64 + }) + .unwrap_or_default() +} + +#[command] +pub async fn metadata(path: PathBuf) -> Result { + let metadata = std::fs::metadata(path)?; + let file_type = metadata.file_type(); + let permissions = metadata.permissions(); + Ok(Metadata { + accessed_at_ms: system_time_to_ms(metadata.accessed()), + created_at_ms: system_time_to_ms(metadata.created()), + modified_at_ms: system_time_to_ms(metadata.modified()), + is_dir: file_type.is_dir(), + is_file: file_type.is_file(), + is_symlink: file_type.is_symlink(), + size: metadata.len(), + permissions: Permissions { + readonly: permissions.readonly(), + #[cfg(unix)] + mode: permissions.mode(), + }, + #[cfg(unix)] + unix: UnixMetadata { + dev: metadata.dev(), + ino: metadata.ino(), + mode: metadata.mode(), + nlink: metadata.nlink(), + uid: metadata.uid(), + gid: metadata.gid(), + rdev: metadata.rdev(), + blksize: metadata.blksize(), + blocks: metadata.blocks(), + }, + #[cfg(windows)] + file_attributes: metadata.file_attributes(), + }) +} + +// #[command] +// pub async fn exists(path: PathBuf) -> bool { +// path.exists() +// } diff --git a/src-tauri/src/app/mod.rs b/src-tauri/src/app/mod.rs index 5466c45..46d47f9 100644 --- a/src-tauri/src/app/mod.rs +++ b/src-tauri/src/app/mod.rs @@ -1,4 +1,5 @@ pub mod cmd; +pub mod fs_extra; pub mod menu; pub mod setup; pub mod window; diff --git a/src-tauri/src/assets/cmd.js b/src-tauri/src/assets/cmd.js index 2bfe285..71c8cfe 100644 --- a/src-tauri/src/assets/cmd.js +++ b/src-tauri/src/assets/cmd.js @@ -62,8 +62,10 @@ function init() { async function cmdTip() { const chatModelJson = await invoke('get_chat_model') || {}; - if (!chatModelJson.data && chatModelJson.data.length <= 0) return; - const data = chatModelJson.data || []; + const user_custom = chatModelJson.user_custom || []; + const sys_sync_prompts = chatModelJson.sys_sync_prompts || []; + const data = [...user_custom, ...sys_sync_prompts]; + if (data.length <= 0) return; const modelDom = document.createElement('div'); modelDom.classList.add('chat-model-cmd-list'); @@ -74,7 +76,7 @@ async function cmdTip() { } document.querySelector('form').appendChild(modelDom); - const itemDom = (v) => `
/${v.cmd}${v.act}
`; + const itemDom = (v) => `
/${v.cmd}${v.act}
`; const searchInput = document.querySelector('form textarea'); // Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`. diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index a4f2d21..04ce341 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -7,7 +7,7 @@ mod app; mod conf; mod utils; -use app::{cmd, menu, setup}; +use app::{cmd, fs_extra, menu, setup}; use conf::{ChatConfJson, ChatState}; use tauri::api::path; use tauri_plugin_log::{fern::colors::ColoredLevelConfig, LogTarget, LoggerBuilder}; @@ -22,6 +22,7 @@ fn main() { // https://github.com/tauri-apps/tauri/pull/2736 .plugin( LoggerBuilder::new() + // .level(log::LevelFilter::Error) .with_colors(colors) .targets([ // LogTarget::LogDir, @@ -44,6 +45,8 @@ fn main() { cmd::form_msg, cmd::open_file, cmd::get_chat_model, + cmd::parse_prompt, + fs_extra::metadata, ]) .setup(setup::init) .plugin(tauri_plugin_positioner::init()) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 710d767..1096648 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -12,6 +12,11 @@ "tauri": { "allowlist": { "all": true, + "http": { + "scope": [ + "https://raw.githubusercontent.com/*" + ] + }, "fs": { "all": true, "scope": [ diff --git a/src/hooks/useChatModel.ts b/src/hooks/useChatModel.ts index e6f3ffe..dca7058 100644 --- a/src/hooks/useChatModel.ts +++ b/src/hooks/useChatModel.ts @@ -4,20 +4,20 @@ import { clone } from 'lodash'; import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils'; import useInit from '@/hooks/useInit'; -export default function useChatModel() { +export default function useChatModel(key: string) { const [modelJson, setModelJson] = useState>({}); useInit(async () => { - const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', data: [] }); + const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', [key]: [] }); setModelJson(data); }); const modelSet = async (data: Record[]) => { const oData = clone(modelJson); - oData.data = data; + oData[key] = data; await writeJSON(CHAT_MODEL_JSON, oData); setModelJson(oData); } - return { modelJson, modelSet, modelData: modelJson?.data || [] } + return { modelJson, modelSet, modelData: modelJson?.[key] || [] } } \ No newline at end of file diff --git a/src/hooks/useInit.ts b/src/hooks/useInit.ts index 3443d54..b5438ad 100644 --- a/src/hooks/useInit.ts +++ b/src/hooks/useInit.ts @@ -8,5 +8,5 @@ export default function useInit(callback: () => void) { callback(); isInit.current = false; } - }, []) + }) } \ No newline at end of file diff --git a/src/layout/index.scss b/src/layout/index.scss index 9ef7327..338149f 100644 --- a/src/layout/index.scss +++ b/src/layout/index.scss @@ -8,6 +8,11 @@ } } +.ant-layout-sider-trigger { + user-select: none; + -webkit-user-select: none; +} + .chat-container { padding: 20px; overflow: hidden; diff --git a/src/layout/index.tsx b/src/layout/index.tsx index 2bf2691..e799981 100644 --- a/src/layout/index.tsx +++ b/src/layout/index.tsx @@ -17,13 +17,32 @@ const ChatLayout: FC = ({ children }) => { const go = useNavigate(); return ( - - setCollapsed(value)}> + + setCollapsed(value)} + style={{ + overflow: 'auto', + height: '100vh', + position: 'fixed', + left: 0, + top: 0, + bottom: 0, + zIndex: 999, + }} + >
go(i.key)} /> - - + +