diff --git a/.gitattributes b/.gitattributes index 4fabca3..95f78c3 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ *.js linguist-vendored *.tsx linguist-vendored -*.scss linguist-vendored \ No newline at end of file +*.scss linguist-vendored +src/**/*.ts linguist-vendored \ No newline at end of file diff --git a/README-ZH_CN.md b/README-ZH_CN.md index 0661453..3e64e9a 100644 --- a/README-ZH_CN.md +++ b/README-ZH_CN.md @@ -22,9 +22,9 @@ **最新版:** -- `Mac`: [ChatGPT_0.3.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.3.0/ChatGPT_0.3.0_x64.dmg) -- `Linux`: [chat-gpt_0.3.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.3.0/chat-gpt_0.3.0_amd64.deb) -- `Windows`: [ChatGPT_0.3.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.3.0/ChatGPT_0.3.0_x64_en-US.msi) +- `Mac`: [ChatGPT_0.4.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64.dmg) +- `Linux`: [chat-gpt_0.4.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/chat-gpt_0.4.0_amd64.deb) +- `Windows`: [ChatGPT_0.4.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64_en-US.msi) [其他版本...](https://github.com/lencx/ChatGPT/releases) @@ -47,6 +47,25 @@ tap repo, "https://github.com/#{repo}.git" cask "popcorn-time", args: { "no-quarantine": true } ~~~ +## 📢 公告 + +这是一个令人兴奋的重大更新。像 `Telegram 机器人指令` 那样工作,帮助你快速填充自定模型,来让 ChatGPT 按照你想要的方式去工作。这个项目倾注了我大量业余时间,如果它对你有所帮助,宣传转发,或者 star 都是对我的巨大鼓励。我希望我可以持续更新下去,加入更多有趣的功能。 + +### 如何使用指令? + +你可以从 [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 来寻找有趣的功能来导入到应用。 + +![chat cmd](./assets/chat-cmd-1.png) +![chat cmd](./assets/chat-cmd-2.png) + +数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。 + +项目会维护一份常用命令,您也可以直接将 [chat.model.json](https://github.com/lencx/ChatGPT/blob/main/chat.model.json) 复制到你的本地目录 `~/.chatgpt/chat.model.json`。 + +在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。 + +![chatgpt](assets/chatgpt.gif) + ## ✨ 功能概览 - 跨平台: `macOS` `Linux` `Windows` @@ -60,11 +79,13 @@ cask "popcorn-time", args: { "no-quarantine": true } - **Preferences (喜好)** - `Theme` - `Light`, `Dark` (仅支持 macOS 和 Windows) - - `Always On Top`: 窗口置顶 + - `Stay On Top`: 窗口置顶 - `Titlebar`: 是否显示 `Titlebar`,仅 macOS 支持 - `Inject Script`: 用于修改网站的用户自定义脚本 + - `Hide Dock Icon` ([#35](https://github.com/lencx/ChatGPT/issues/35)): 隐藏 Dock 中的应用图标 (仅 macOS 支持) + - 系统图盘右键单击打开菜单,然后在菜单项中点击 `Show Dock Icon` 可以重新将应用图标显示在 Dock(`SystemTrayMenu -> Show Dock Icon`) - `Control Center`: ChatGPT 应用的控制中心,它将为应用提供无限的可能 - - 设置 `Theme`,`Always on Top`,`Titlebar` 等 + - 设置 `Theme`,`Stay On Top`,`Titlebar` 等 - `User Agent` ([#17](https://github.com/lencx/ChatGPT/issues/17)): 自定义 `user agent` 防止网站安全检测,默认值为空 - `Switch Origin` ([#14](https://github.com/lencx/ChatGPT/issues/14)): 切换网站源地址,默认为 `https://chat.openai.com`。需要注意的是镜像网站的 UI 需要和原网站一致,否则可能会导致某些功能不工作 - `Go to Config`: 打开 ChatGPT 配置目录 (`path: ~/.chatgpt/*`) @@ -141,6 +162,7 @@ yarn build ## ❤️ 感谢 - 分享按钮的代码从 [@liady](https://github.com/liady) 的插件获得,并做了一些本地化修改 +- 感谢 [Awesome ChatGPT Prompts](https://github.com/f/awesome-chatgpt-prompts) 项目为这个应用自定义指令功能所带来的启发 --- diff --git a/README.md b/README.md index 959bfa1..c81ce6c 100644 --- a/README.md +++ b/README.md @@ -23,9 +23,9 @@ **Latest:** -- `Mac`: [ChatGPT_0.3.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.3.0/ChatGPT_0.3.0_x64.dmg) -- `Linux`: [chat-gpt_0.3.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.3.0/chat-gpt_0.3.0_amd64.deb) -- `Windows`: [ChatGPT_0.3.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.3.0/ChatGPT_0.3.0_x64_en-US.msi) +- `Mac`: [ChatGPT_0.4.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64.dmg) +- `Linux`: [chat-gpt_0.4.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/chat-gpt_0.4.0_amd64.deb) +- `Windows`: [ChatGPT_0.4.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.0/ChatGPT_0.4.0_x64_en-US.msi) [Other version...](https://github.com/lencx/ChatGPT/releases) @@ -48,6 +48,25 @@ tap repo, "https://github.com/#{repo}.git" cask "popcorn-time", args: { "no-quarantine": true } ~~~ +## 📢 Announcement + +This is a major and exciting update. It works like a `Telegram bot command` and helps you quickly populate custom models to make chatgpt work the way you want it to. This project has taken a lot of my spare time, so if it helps you, please help spread the word or star it would be a great encouragement to me. I hope I can keep updating it and adding more interesting features. + +### How does it work? + +You can look at [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) to find interesting features to import into the app. + +![chat cmd](./assets/chat-cmd-1.png) +![chat cmd](./assets/chat-cmd-2.png) + +After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`). + +The project maintains a list of common commands, or you can copy [chat.model.json](https://github.com/lencx/ChatGPT/blob/main/chat.model.json) directly to your local directory `~/.chatgpt/chat.model.json` + +In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key. + +![chatgpt](assets/chatgpt.gif) + ## ✨ Features - Multi-platform: `macOS` `Linux` `Windows` @@ -61,11 +80,13 @@ cask "popcorn-time", args: { "no-quarantine": true } - **Preferences** - `Theme` - `Light`, `Dark` (Only macOS and Windows are supported). - - `Always on Top`: The window is always on top of other windows. + - `Stay On Top`: The window is stay on top of other windows. - `Titlebar`: Whether to display the titlebar, supported by macOS only. + - `Hide Dock Icon` ([#35](https://github.com/lencx/ChatGPT/issues/35)): Hide application icons from the Dock(support macOS only). + - Right-click on the SystemTray to open the menu, then click `Show Dock Icon` in the menu item to re-display the application icon in the Dock (`SystemTrayMenu -> Show Dock Icon`). - `Inject Script`: Using scripts to modify pages. - `Control Center`: The control center of ChatGPT application, it will give unlimited imagination to the application. - - `Theme`, `Always on Top`, `Titlebar`, ... + - `Theme`, `Stay On Top`, `Titlebar`, ... - `User Agent` ([#17](https://github.com/lencx/ChatGPT/issues/17)): Custom `user agent`, which may be required in some scenarios. The default value is the empty string. - `Switch Origin` ([#14](https://github.com/lencx/ChatGPT/issues/14)): Switch the site source address, the default is `https://chat.openai.com`, please make sure the mirror site UI is the same as the original address. Otherwise, some functions may not be available. - `Go to Config`: Open the configuration file directory (`path: ~/.chatgpt/*`). @@ -145,6 +166,7 @@ yarn build ## ❤️ Thanks - The core implementation of the share button code was copied from the [@liady](https://github.com/liady) extension with some modifications. +- Thanks to the [Awesome ChatGPT Prompts](https://github.com/f/awesome-chatgpt-prompts) repository for inspiring the custom command function for this application. --- diff --git a/UPDATE_LOG.md b/UPDATE_LOG.md index 53f128c..f2bda9b 100644 --- a/UPDATE_LOG.md +++ b/UPDATE_LOG.md @@ -1,5 +1,11 @@ # UPDATE LOG +## v0.4.0 + +feat: +- customize the ChatGPT prompts command (https://github.com/lencx/ChatGPT#-announcement) +- menu enhancement: hide application icons from the Dock (support macOS only) + ## v0.3.0 fix: can't open ChatGPT @@ -36,7 +42,7 @@ feat: tray window ## v0.1.6 feat: -- always on top +- stay on top - export ChatGPT history ## v0.1.5 diff --git a/assets/chat-cmd-1.png b/assets/chat-cmd-1.png new file mode 100644 index 0000000..e83d4c2 Binary files /dev/null and b/assets/chat-cmd-1.png differ diff --git a/assets/chat-cmd-2.png b/assets/chat-cmd-2.png new file mode 100644 index 0000000..36e971c Binary files /dev/null and b/assets/chat-cmd-2.png differ diff --git a/assets/chatgpt.gif b/assets/chatgpt.gif new file mode 100644 index 0000000..d9be47a Binary files /dev/null and b/assets/chatgpt.gif differ diff --git a/chat.model.json b/chat.model.json new file mode 100644 index 0000000..c5a08bd --- /dev/null +++ b/chat.model.json @@ -0,0 +1,82 @@ +{ + "name": "ChatGPT Model", + "link": "https://github.com/lencx/ChatGPT/blob/main/chat.model.md", + "data": [ + { + "cmd": "cfm", + "act": "‘Character’ from ‘Movie/Book/Anything’", + "tags": [], + "enable": true, + "prompt": "I want you to act like {Character} from {series}. I want you to respond and answer like {Character}. do not write any explanations. only answer like {character}. You must know all of the knowledge of {character}. My first sentence is “Hi Character”" + }, + { + "cmd": "pc", + "act": "Plagiarism Checker", + "tags": [], + "enable": true, + "prompt": "I want you to act as a plagiarism checker. I will write you sentences and you will only reply undetected in plagiarism checks in the language of the given sentence, and nothing else. Do not write explanations on replies. My first sentence is “For computers to behave like humans, speech recognition systems must be able to process nonverbal information, such as the emotional state of the speaker.”" + }, + { + "cmd": "drunk", + "act": "Drunk", + "tags": [], + "enable": true, + "prompt": "Let’s play a game. You will role play as if you were drunk. Make your answer’s incoherent." + }, + { + "cmd": "tg", + "act": "Travel Guide", + "tags": [], + "enable": true, + "prompt": "I want you to act as a travel guide. I will write you my location and you will suggest a place to visit near my location. In some cases, I will also give you the type of places I will visit. You will also suggest me places of similar type that are close to my first location. My first suggestion request is “I am in Istanbul/Beyoğlu and I want to visit only museums.”" + }, + { + "cmd": "eph", + "act": "English Pronunciation Helper", + "tags": [], + "enable": true, + "prompt": "I want you to act as an English pronunciation assistant for Turkish speaking people. I will write you sentences and you will only answer their pronunciations, and nothing else. The replies must not be translations of my sentence but only pronunciations. Pronunciations should use Turkish Latin letters for phonetics. Do not write explanations on replies. My first sentence is “how the weather is in Istanbul?”" + }, + { + "cmd": "excel", + "act": "Excel Sheet", + "tags": [], + "enable": true, + "prompt": "I want you to act as a text based excel. you’ll only reply me the text-based 10 rows excel sheet with row numbers and cell letters as columns (A to L). First column header should be empty to reference row number. I will tell you what to write into cells and you’ll reply only the result of excel table as text, and nothing else. Do not write explanations. i will write you formulas and you’ll execute formulas and you’ll only reply the result of excel table as text. First, reply me the empty sheet." + }, + { + "cmd": "console", + "act": "JavaScript Console", + "tags": [], + "enable": true, + "prompt": "I want you to act as a javascript console. I will type commands and you will reply with what the javascript console should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. do not write explanations. do not type commands unless I instruct you to do so. when i need to tell you something in english, i will do so by putting text inside curly brackets {like this}. my first command is console.log(“Hello World”);" + }, + { + "cmd": "pi", + "act": "position Interviewer", + "tags": [], + "enable": true, + "prompt": "I want you to act as an interviewer. I will be the candidate and you will ask me the interview questions for the position position. I want you to only reply as the interviewer. Do not write all the conservation at once. I want you to only do the interview with me. Ask me the questions and wait for my answers. Do not write explanations. Ask me the questions one by one like an interviewer does and wait for my answers. My first sentence is “Hi”" + }, + { + "cmd": "trans", + "act": "English Translator and Improver", + "tags": [ + "tools", + "cx", + "x" + ], + "enable": true, + "prompt": "I want you to act as an English translator, spelling corrector and improver. I will speak to you in any language and you will detect the language, translate it and answer in the corrected and improved version of my text, in English. I want you to replace my simplified A0-level words and sentences with more beautiful and elegant, upper level English words and sentences. Keep the meaning same, but make them more literary. I want you to only reply the correction, the improvements and nothing else, do not write explanations. My first sentence is \"istanbulu cok seviyom burada olmak cok guzel\"" + }, + { + "cmd": "terminal", + "act": "Linux Terminal", + "tags": [ + "dev" + ], + "enable": true, + "prompt": "i want you to act as a linux terminal. I will type commands and you will reply with what the terminal should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. do not write explanations. do not type commands unless I instruct you to do so. when i need to tell you something in english, i will do so by putting text inside curly brackets {like this}. my first command is pwd" + } + ] +} \ No newline at end of file diff --git a/chat.model.md b/chat.model.md new file mode 100644 index 0000000..9a80fc8 --- /dev/null +++ b/chat.model.md @@ -0,0 +1,3 @@ +# ChatGPT Model + +- [Awesome ChatGPT Prompts](https://github.com/f/awesome-chatgpt-prompts) diff --git a/package.json b/package.json index 7ee7be4..dea3025 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,8 @@ "lodash": "^4.17.21", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router-dom": "^6.4.5" + "react-router-dom": "^6.4.5", + "uuid": "^9.0.0" }, "devDependencies": { "@tauri-apps/cli": "^1.2.2", @@ -45,6 +46,7 @@ "@types/node": "^18.7.10", "@types/react": "^18.0.15", "@types/react-dom": "^18.0.6", + "@types/uuid": "^9.0.0", "@vitejs/plugin-react": "^3.0.0", "sass": "^1.56.2", "typescript": "^4.9.4", diff --git a/scripts/download.js b/scripts/download.js index 41e3347..5ed93f4 100644 --- a/scripts/download.js +++ b/scripts/download.js @@ -25,7 +25,7 @@ async function rewrite(filename) { async function init() { rewrite('README.md'); - rewrite('README-ZH.md'); + rewrite('README-ZH_CN.md'); } init().catch(console.error); \ No newline at end of file diff --git a/src-tauri/src/app/cmd.rs b/src-tauri/src/app/cmd.rs index b407979..b770a81 100644 --- a/src-tauri/src/app/cmd.rs +++ b/src-tauri/src/app/cmd.rs @@ -1,5 +1,5 @@ use crate::{conf::ChatConfJson, utils}; -use std::fs; +use std::{fs, path::PathBuf}; use tauri::{api, command, AppHandle, Manager}; #[command] @@ -59,3 +59,15 @@ pub fn form_msg(app: AppHandle, label: &str, title: &str, msg: &str) { let win = app.app_handle().get_window(label); tauri::api::dialog::message(win.as_ref(), title, msg); } + +#[command] +pub fn open_file(path: PathBuf) { + utils::open_file(path); +} + +#[command] +pub fn get_chat_model() -> serde_json::Value { + let path = utils::chat_root().join("chat.model.json"); + let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string()); + serde_json::from_str(&content).unwrap() +} \ No newline at end of file diff --git a/src-tauri/src/app/menu.rs b/src-tauri/src/app/menu.rs index cb769ad..453df65 100644 --- a/src-tauri/src/app/menu.rs +++ b/src-tauri/src/app/menu.rs @@ -3,15 +3,15 @@ use crate::{ utils, }; use tauri::{ - utils::assets::EmbeddedAssets, AboutMetadata, AppHandle, Context, CustomMenuItem, Manager, - Menu, MenuItem, Submenu, SystemTray, SystemTrayEvent, SystemTrayMenu, WindowMenuEvent, + AboutMetadata, AppHandle, CustomMenuItem, Manager, Menu, MenuItem, Submenu, SystemTray, + SystemTrayEvent, SystemTrayMenu, WindowMenuEvent, SystemTrayMenuItem, }; use tauri_plugin_positioner::{on_tray_event, Position, WindowExt}; // --- Menu -pub fn init(context: &Context) -> Menu { +pub fn init() -> Menu { let chat_conf = ChatConfJson::get_chat_conf(); - let name = &context.package_info().name; + let name = "ChatGPT"; let app_menu = Submenu::new( name, Menu::new() @@ -25,18 +25,18 @@ pub fn init(context: &Context) -> Menu { .add_native_item(MenuItem::Quit), ); - let always_on_top = CustomMenuItem::new("always_on_top".to_string(), "Always on Top") - .accelerator("CmdOrCtrl+T"); + let stay_on_top = + CustomMenuItem::new("stay_on_top".to_string(), "Stay On Top").accelerator("CmdOrCtrl+T"); let titlebar = CustomMenuItem::new("titlebar".to_string(), "Titlebar").accelerator("CmdOrCtrl+B"); let theme_light = CustomMenuItem::new("theme_light".to_string(), "Light"); let theme_dark = CustomMenuItem::new("theme_dark".to_string(), "Dark"); let is_dark = chat_conf.theme == "Dark"; - let always_on_top_menu = if chat_conf.always_on_top { - always_on_top.selected() + let stay_on_top_menu = if chat_conf.stay_on_top { + stay_on_top.selected() } else { - always_on_top + stay_on_top }; let titlebar_menu = if chat_conf.titlebar { titlebar.selected() @@ -62,9 +62,11 @@ pub fn init(context: &Context) -> Menu { }), ) .into(), - always_on_top_menu.into(), + stay_on_top_menu.into(), #[cfg(target_os = "macos")] titlebar_menu.into(), + #[cfg(target_os = "macos")] + CustomMenuItem::new("hide_dock_icon".to_string(), "Hide Dock Icon").into(), MenuItem::Separator.into(), CustomMenuItem::new("inject_script".to_string(), "Inject Script") .accelerator("CmdOrCtrl+J") @@ -119,7 +121,6 @@ pub fn init(context: &Context) -> Menu { CustomMenuItem::new("scroll_bottom".to_string(), "Scroll to Bottom of Screen") .accelerator("CmdOrCtrl+Down"), ) - .add_native_item(MenuItem::Zoom) .add_native_item(MenuItem::Separator) .add_item( CustomMenuItem::new("reload".to_string(), "Refresh the Screen") @@ -127,6 +128,13 @@ pub fn init(context: &Context) -> Menu { ), ); + let window_menu = Submenu::new( + "Window", + Menu::new() + .add_native_item(MenuItem::Minimize) + .add_native_item(MenuItem::Zoom), + ); + let help_menu = Submenu::new( "Help", Menu::new() @@ -143,6 +151,7 @@ pub fn init(context: &Context) -> Menu { .add_submenu(preferences_menu) .add_submenu(edit_menu) .add_submenu(view_menu) + .add_submenu(window_menu) .add_submenu(help_menu) } @@ -165,6 +174,9 @@ pub fn menu_handler(event: WindowMenuEvent) { "go_conf" => utils::open_file(utils::chat_root()), "clear_conf" => utils::clear_conf(&app), "awesome" => open(&app, conf::AWESOME_URL.to_string()), + "hide_dock_icon" => { + ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": true }), Some(app)).unwrap() + } "titlebar" => { let chat_conf = conf::ChatConfJson::get_chat_conf(); ChatConfJson::amend( @@ -182,19 +194,15 @@ pub fn menu_handler(event: WindowMenuEvent) { }; ChatConfJson::amend(&serde_json::json!({ "theme": theme }), Some(app)).unwrap(); } - "always_on_top" => { - let mut always_on_top = state.always_on_top.lock().unwrap(); - *always_on_top = !*always_on_top; + "stay_on_top" => { + let mut stay_on_top = state.stay_on_top.lock().unwrap(); + *stay_on_top = !*stay_on_top; menu_handle .get_item(menu_id) - .set_selected(*always_on_top) + .set_selected(*stay_on_top) .unwrap(); - win.set_always_on_top(*always_on_top).unwrap(); - ChatConfJson::amend( - &serde_json::json!({ "always_on_top": *always_on_top }), - None, - ) - .unwrap(); + win.set_always_on_top(*stay_on_top).unwrap(); + ChatConfJson::amend(&serde_json::json!({ "stay_on_top": *stay_on_top }), None).unwrap(); } // View "reload" => win.eval("window.location.reload()").unwrap(), @@ -230,24 +238,64 @@ pub fn menu_handler(event: WindowMenuEvent) { // --- SystemTray Menu pub fn tray_menu() -> SystemTray { - SystemTray::new().with_menu(SystemTrayMenu::new()) + SystemTray::new().with_menu( + SystemTrayMenu::new() + .add_item(CustomMenuItem::new("control_center".to_string(), "Control Center")) + .add_item(CustomMenuItem::new("show_dock_icon".to_string(), "Show Dock Icon")) + .add_item(CustomMenuItem::new("hide_dock_icon".to_string(), "Hide Dock Icon")) + .add_native_item(SystemTrayMenuItem::Separator) + .add_item(CustomMenuItem::new("quit".to_string(), "Quit ChatGPT")) + ) } // --- SystemTray Event pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) { - let core_win = handle.get_window("core").unwrap(); on_tray_event(handle, &event); - if let SystemTrayEvent::LeftClick { .. } = event { - core_win.minimize().unwrap(); - let mini_win = handle.get_window("mini").unwrap(); - mini_win.move_window(Position::TrayCenter).unwrap(); + let app = handle.clone(); - if mini_win.is_visible().unwrap() { - mini_win.hide().unwrap(); - } else { - mini_win.show().unwrap(); + match event { + SystemTrayEvent::LeftClick { .. } => { + let chat_conf = conf::ChatConfJson::get_chat_conf(); + + if !chat_conf.hide_dock_icon { + let core_win = handle.get_window("core").unwrap(); + core_win.minimize().unwrap(); + } + + let tray_win = handle.get_window("tray").unwrap(); + tray_win.move_window(Position::TrayCenter).unwrap(); + + if tray_win.is_visible().unwrap() { + tray_win.hide().unwrap(); + } else { + tray_win.show().unwrap(); + } } + SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() { + "control_center" => app.get_window("main").unwrap().show().unwrap(), + "restart" => tauri::api::process::restart(&handle.env()), + "show_dock_icon" => { + ChatConfJson::amend( + &serde_json::json!({ "hide_dock_icon": false }), + Some(app), + ) + .unwrap(); + }, + "hide_dock_icon" => { + let chat_conf = conf::ChatConfJson::get_chat_conf(); + if !chat_conf.hide_dock_icon { + ChatConfJson::amend( + &serde_json::json!({ "hide_dock_icon": true }), + Some(app), + ) + .unwrap(); + } + }, + "quit" => std::process::exit(0), + _ => (), + } + _ => (), } } diff --git a/src-tauri/src/app/setup.rs b/src-tauri/src/app/setup.rs index d7beb2e..d12cdab 100644 --- a/src-tauri/src/app/setup.rs +++ b/src-tauri/src/app/setup.rs @@ -5,40 +5,55 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box let chat_conf = ChatConfJson::get_chat_conf(); let url = chat_conf.origin.to_string(); let theme = ChatConfJson::theme(); - window::mini_window(&app.app_handle()); + let handle = app.app_handle(); - #[cfg(target_os = "macos")] - WindowBuilder::new(app, "core", WindowUrl::App(url.into())) - .resizable(true) - .fullscreen(false) - .inner_size(800.0, 600.0) - .hidden_title(true) - .theme(theme) - .always_on_top(chat_conf.always_on_top) - .title_bar_style(ChatConfJson::titlebar()) - .initialization_script(&utils::user_script()) - .initialization_script(include_str!("../assets/html2canvas.js")) - .initialization_script(include_str!("../assets/jspdf.js")) - .initialization_script(include_str!("../assets/core.js")) - .initialization_script(include_str!("../assets/export.js")) - .user_agent(&chat_conf.ua_window) - .build()?; + std::thread::spawn(move || { + window::tray_window(&handle); + }); - #[cfg(not(target_os = "macos"))] - WindowBuilder::new(app, "core", WindowUrl::App(url.into())) - .title("ChatGPT") - .resizable(true) - .fullscreen(false) - .inner_size(800.0, 600.0) - .theme(theme) - .always_on_top(chat_conf.always_on_top) - .initialization_script(&utils::user_script()) - .initialization_script(include_str!("../assets/html2canvas.js")) - .initialization_script(include_str!("../assets/jspdf.js")) - .initialization_script(include_str!("../assets/core.js")) - .initialization_script(include_str!("../assets/export.js")) - .user_agent(&chat_conf.ua_window) - .build()?; + if chat_conf.hide_dock_icon { + #[cfg(target_os = "macos")] + app.set_activation_policy(tauri::ActivationPolicy::Accessory); + } else { + let app = app.handle(); + std::thread::spawn(move || { + #[cfg(target_os = "macos")] + WindowBuilder::new(&app, "core", WindowUrl::App(url.into())) + .title("ChatGPT") + .resizable(true) + .fullscreen(false) + .inner_size(800.0, 600.0) + .hidden_title(true) + .theme(theme) + .always_on_top(chat_conf.stay_on_top) + .title_bar_style(ChatConfJson::titlebar()) + .initialization_script(&utils::user_script()) + .initialization_script(include_str!("../assets/html2canvas.js")) + .initialization_script(include_str!("../assets/jspdf.js")) + .initialization_script(include_str!("../assets/core.js")) + .initialization_script(include_str!("../assets/export.js")) + .initialization_script(include_str!("../assets/cmd.js")) + .user_agent(&chat_conf.ua_window) + .build().unwrap(); + + #[cfg(not(target_os = "macos"))] + WindowBuilder::new(&app, "core", WindowUrl::App(url.into())) + .title("ChatGPT") + .resizable(true) + .fullscreen(false) + .inner_size(800.0, 600.0) + .theme(theme) + .always_on_top(chat_conf.stay_on_top) + .initialization_script(&utils::user_script()) + .initialization_script(include_str!("../assets/html2canvas.js")) + .initialization_script(include_str!("../assets/jspdf.js")) + .initialization_script(include_str!("../assets/core.js")) + .initialization_script(include_str!("../assets/export.js")) + .initialization_script(include_str!("../assets/cmd.js")) + .user_agent(&chat_conf.ua_window) + .build().unwrap(); + }); + } Ok(()) } diff --git a/src-tauri/src/app/window.rs b/src-tauri/src/app/window.rs index ea6b27e..e3c6413 100644 --- a/src-tauri/src/app/window.rs +++ b/src-tauri/src/app/window.rs @@ -1,25 +1,30 @@ use crate::{conf, utils}; use tauri::{utils::config::WindowUrl, window::WindowBuilder}; -pub fn mini_window(handle: &tauri::AppHandle) { +pub fn tray_window(handle: &tauri::AppHandle) { let chat_conf = conf::ChatConfJson::get_chat_conf(); let theme = conf::ChatConfJson::theme(); + let app = handle.clone(); - WindowBuilder::new(handle, "mini", WindowUrl::App(chat_conf.origin.into())) - .resizable(false) - .fullscreen(false) - .inner_size(360.0, 540.0) - .decorations(false) - .always_on_top(true) - .theme(theme) - .initialization_script(&utils::user_script()) - .initialization_script(include_str!("../assets/html2canvas.js")) - .initialization_script(include_str!("../assets/jspdf.js")) - .initialization_script(include_str!("../assets/core.js")) - .initialization_script(include_str!("../assets/export.js")) - .user_agent(&chat_conf.ua_tray) - .build() - .unwrap() - .hide() - .unwrap(); + std::thread::spawn(move || { + WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into())) + .title("ChatGPT") + .resizable(false) + .fullscreen(false) + .inner_size(360.0, 540.0) + .decorations(false) + .always_on_top(true) + .theme(theme) + .initialization_script(&utils::user_script()) + .initialization_script(include_str!("../assets/html2canvas.js")) + .initialization_script(include_str!("../assets/jspdf.js")) + .initialization_script(include_str!("../assets/core.js")) + .initialization_script(include_str!("../assets/export.js")) + .initialization_script(include_str!("../assets/cmd.js")) + .user_agent(&chat_conf.ua_tray) + .build() + .unwrap() + .hide() + .unwrap(); + }); } diff --git a/src-tauri/src/assets/cmd.js b/src-tauri/src/assets/cmd.js new file mode 100644 index 0000000..97cb6bc --- /dev/null +++ b/src-tauri/src/assets/cmd.js @@ -0,0 +1,150 @@ +// *** Core Script - CMD *** + +function init() { + const styleDom = document.createElement('style'); + styleDom.innerHTML = `form { + position: relative; + } + .chat-model-cmd-list { + position: absolute; + width: 400px; + bottom: 60px; + max-height: 100px; + overflow: auto; + z-index: 9999; + } + .chat-model-cmd-list>div { + border: solid 2px #d8d8d8; + border-radius: 5px; + background-color: #fff; + } + .chat-model-cmd-list .cmd-item { + font-size: 12px; + border-bottom: solid 1px #888; + padding: 2px 4px; + display: flex; + user-select: none; + cursor: pointer; + } + .chat-model-cmd-list .cmd-item:last-child { + border-bottom: none; + } + .chat-model-cmd-list .cmd-item b { + display: inline-block; + width: 120px; + border-radius: 4px; + margin-right: 10px; + color: #2a2a2a; + } + .chat-model-cmd-list .cmd-item i { + width: 270px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + text-align: right; + color: #888; + }`; + document.head.append(styleDom); + + if (window.formInterval) { + clearInterval(window.formInterval); + } + window.formInterval = setInterval(() => { + const form = document.querySelector("form"); + if (!form) return; + clearInterval(window.formInterval); + cmdTip(); + }, 200); +} + +async function cmdTip() { + const chatModelJson = await invoke('get_chat_model') || {}; + if (!chatModelJson.data && chatModelJson.data.length <= 0) return; + const data = chatModelJson.data || []; + + const modelDom = document.createElement('div'); + modelDom.classList.add('chat-model-cmd-list'); + document.querySelector('form').appendChild(modelDom); + const itemDom = (v) => `
/${v.cmd}${v.act}
`; + const searchInput = document.querySelector('form textarea'); + + // Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`. + // If more than one command appears in the search results, the first one will be used by default. + searchInput.addEventListener('keydown', (event) => { + if (!window.__CHAT_MODEL_CMD__) { + return; + } + + if (event.keyCode === 32) { + searchInput.value = window.__CHAT_MODEL_CMD__; + modelDom.innerHTML = ''; + delete window.__CHAT_MODEL_CMD__; + } + if (event.keyCode === 13) { + modelDom.innerHTML = ''; + delete window.__CHAT_MODEL_CMD__; + } + }); + + searchInput.addEventListener('input', (event) => { + const query = searchInput.value; + if (!query || !/^\//.test(query)) { + modelDom.innerHTML = ''; + return; + } + + // all cmd result + if (query === '/') { + const result = data.filter(i => i.enable); + modelDom.innerHTML = `
${result.map(itemDom).join('')}
`; + window.__CHAT_MODEL_CMD__ = result[0]?.prompt.trim(); + return; + } + + const result = data.filter(i => i.enable && new RegExp(query.substring(1)).test(i.cmd)); + if (result.length > 0) { + modelDom.innerHTML = `
${result.map(itemDom).join('')}
`; + window.__CHAT_MODEL_CMD__ = result[0]?.prompt.trim(); + } else { + modelDom.innerHTML = ''; + delete window.__CHAT_MODEL_CMD__; + } + }, { + capture: false, + passive: true, + once: false + }); + + if (window.searchInterval) { + clearInterval(window.searchInterval); + } + window.searchInterval = setInterval(() => { + // The `chatgpt prompt` fill can be done by clicking on the event. + const searchDom = document.querySelector("form .chat-model-cmd-list>div"); + if (!searchDom) return; + searchDom.addEventListener('click', (event) => { + // .cmd-item + const item = event.target.closest("div"); + if (item) { + const val = decodeURIComponent(item.getAttribute('data-prompt')); + searchInput.value = val; + document.querySelector('form textarea').focus(); + window.__CHAT_MODEL_CMD__ = val; + modelDom.innerHTML = ''; + } + }, { + capture: false, + passive: true, + once: false + }); + }, 200); +} + +if ( + document.readyState === "complete" || + document.readyState === "interactive" +) { + init(); +} else { + document.addEventListener("DOMContentLoaded", init); +} \ No newline at end of file diff --git a/src-tauri/src/assets/core.js b/src-tauri/src/assets/core.js index 143c1a8..3a2a433 100644 --- a/src-tauri/src/assets/core.js +++ b/src-tauri/src/assets/core.js @@ -41,7 +41,7 @@ window.invoke = invoke; window.transformCallback = transformCallback; async function init() { - if (__TAURI_METADATA__.__currentWindow.label === 'mini') { + if (__TAURI_METADATA__.__currentWindow.label === 'tray') { document.getElementsByTagName('html')[0].style['font-size'] = '70%'; } diff --git a/src-tauri/src/conf.rs b/src-tauri/src/conf.rs index 482a37e..921804f 100644 --- a/src-tauri/src/conf.rs +++ b/src-tauri/src/conf.rs @@ -14,18 +14,20 @@ pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues"; pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md"; pub const AWESOME_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/AWESOME.md"; pub const DEFAULT_CHAT_CONF: &str = r#"{ - "always_on_top": false, + "stay_on_top": false, "theme": "Light", "titlebar": true, + "hide_dock_icon": false, "default_origin": "https://chat.openai.com", "origin": "https://chat.openai.com", "ua_window": "", "ua_tray": "" }"#; pub const DEFAULT_CHAT_CONF_MAC: &str = r#"{ - "always_on_top": false, + "stay_on_top": false, "theme": "Light", "titlebar": false, + "hide_dock_icon": false, "default_origin": "https://chat.openai.com", "origin": "https://chat.openai.com", "ua_window": "", @@ -33,22 +35,27 @@ pub const DEFAULT_CHAT_CONF_MAC: &str = r#"{ }"#; pub struct ChatState { - pub always_on_top: Mutex, + pub stay_on_top: Mutex, } impl ChatState { pub fn default(chat_conf: ChatConfJson) -> Self { ChatState { - always_on_top: Mutex::new(chat_conf.always_on_top), + stay_on_top: Mutex::new(chat_conf.stay_on_top), } } } #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] pub struct ChatConfJson { + // support macOS only pub titlebar: bool, - pub always_on_top: bool, + pub hide_dock_icon: bool, + + // macOS and Windows pub theme: String, + + pub stay_on_top: bool, pub default_origin: String, pub origin: String, pub ua_window: String, diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index d1cab6c..cd53040 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -26,10 +26,12 @@ fn main() { cmd::form_cancel, cmd::form_confirm, cmd::form_msg, + cmd::open_file, + cmd::get_chat_model, ]) .setup(setup::init) .plugin(tauri_plugin_positioner::init()) - .menu(menu::init(&context)) + .menu(menu::init()) .system_tray(menu::tray_menu()) .on_menu_event(menu::menu_handler) .on_system_tray_event(menu::tray_handler) diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json index 6e31ac3..b2a8956 100644 --- a/src-tauri/tauri.conf.json +++ b/src-tauri/tauri.conf.json @@ -7,15 +7,22 @@ }, "package": { "productName": "ChatGPT", - "version": "0.3.0" + "version": "0.4.0" }, "tauri": { "allowlist": { - "all": true + "all": true, + "fs": { + "all": true, + "scope": [ + "$HOME/.chatgpt/*" + ] + } }, "systemTray": { "iconPath": "icons/tray-icon.png", - "iconAsTemplate": true + "iconAsTemplate": true, + "menuOnLeftClick": false }, "bundle": { "active": true, @@ -71,7 +78,9 @@ "title": "ChatGPT", "visible": false, "width": 800, - "height": 600 + "height": 600, + "minWidth": 800, + "minHeight": 600 } ] } diff --git a/src/components/Tags/index.tsx b/src/components/Tags/index.tsx new file mode 100644 index 0000000..b88fa88 --- /dev/null +++ b/src/components/Tags/index.tsx @@ -0,0 +1,98 @@ +import { FC, useEffect, useRef, useState } from 'react'; +import { PlusOutlined } from '@ant-design/icons'; +import { Input, Tag } from 'antd'; +import type { InputRef } from 'antd'; + +import { DISABLE_AUTO_COMPLETE } from '@/utils'; + +interface TagsProps { + value?: string[]; + onChange?: (v: string[]) => void; +} + +const Tags: FC = ({ value = [], onChange }) => { + const [tags, setTags] = useState(value); + const [inputVisible, setInputVisible] = useState(false); + const [inputValue, setInputValue] = useState(''); + const inputRef = useRef(null); + + useEffect(() => { + setTags(value); + }, [value]) + + useEffect(() => { + if (inputVisible) { + inputRef.current?.focus(); + } + }, [inputVisible]); + + const handleClose = (removedTag: string) => { + const newTags = tags.filter((tag) => tag !== removedTag); + setTags(newTags); + }; + + const showInput = () => { + setInputVisible(true); + }; + + const handleInputChange = (e: React.ChangeEvent) => { + setInputValue(e.target.value); + }; + + const handleInputConfirm = () => { + if (inputValue && tags.indexOf(inputValue) === -1) { + const val = [...tags, inputValue]; + setTags(val); + onChange && onChange(val); + } + setInputVisible(false); + setInputValue(''); + }; + + const forMap = (tag: string) => { + const tagElem = ( + { + e.preventDefault(); + handleClose(tag); + }} + > + {tag} + + ); + return ( + + {tagElem} + + ); + }; + + const tagChild = tags.map(forMap); + + return ( + <> + {tagChild} + {inputVisible && ( + + )} + {!inputVisible && ( + + New Tag + + )} + + ); +}; + +export default Tags; \ No newline at end of file diff --git a/src/hooks/useChatModel.ts b/src/hooks/useChatModel.ts new file mode 100644 index 0000000..e6f3ffe --- /dev/null +++ b/src/hooks/useChatModel.ts @@ -0,0 +1,23 @@ +import { useState } from 'react'; +import { clone } from 'lodash'; + +import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils'; +import useInit from '@/hooks/useInit'; + +export default function useChatModel() { + const [modelJson, setModelJson] = useState>({}); + + useInit(async () => { + const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', data: [] }); + setModelJson(data); + }); + + const modelSet = async (data: Record[]) => { + const oData = clone(modelJson); + oData.data = data; + await writeJSON(CHAT_MODEL_JSON, oData); + setModelJson(oData); + } + + return { modelJson, modelSet, modelData: modelJson?.data || [] } +} \ No newline at end of file diff --git a/src/hooks/useColumns.ts b/src/hooks/useColumns.ts new file mode 100644 index 0000000..ed6c19a --- /dev/null +++ b/src/hooks/useColumns.ts @@ -0,0 +1,44 @@ +import { useState, useCallback } from 'react'; + +export default function useColumns(columns: any[] = []) { + const [opType, setOpType] = useState(''); + const [opRecord, setRecord] = useState | null>(null); + const [opTime, setNow] = useState(null); + const [opExtra, setExtra] = useState(null); + + const handleRecord = useCallback((row: Record | null, type: string) => { + setOpType(type); + setRecord(row); + setNow(Date.now()); + }, []); + + const resetRecord = useCallback(() => { + setRecord(null); + setOpType(''); + setNow(Date.now()); + }, []); + + const opNew = useCallback(() => handleRecord(null, 'new'), [handleRecord]); + + const cols = columns.map((i: any) => { + if (i.render) { + const opRender = i.render; + i.render = (text: string, row: Record) => { + return opRender(text, row, { setRecord: handleRecord, setExtra }); + }; + } + return i; + }); + + return { + opTime, + opType, + opNew, + columns: cols, + opRecord, + setRecord: handleRecord, + resetRecord, + setExtra, + opExtra, + }; +} \ No newline at end of file diff --git a/src/hooks/useData.ts b/src/hooks/useData.ts new file mode 100644 index 0000000..be70c6e --- /dev/null +++ b/src/hooks/useData.ts @@ -0,0 +1,35 @@ +import { useState, useEffect } from 'react'; +import { v4 } from 'uuid'; + +const safeKey = Symbol('chat-id'); + +export default function useData(oData: any[]) { + const [opData, setData] = useState([]); + + useEffect(() => { + const nData = oData.map(i => ({ [safeKey]: v4(), ...i })); + setData(nData); + }, [oData]) + + const opAdd = (val: any) => { + const v = [val, ...opData]; + setData(v); + return v; + }; + + const opRemove = (id: string) => { + const nData = opData.filter(i => i[safeKey] !== id); + setData(nData); + return nData; + }; + + const opReplace = (id: string, data: any) => { + const nData = [...opData]; + const idx = opData.findIndex(v => v[safeKey] === id); + nData[idx] = data; + setData(nData); + return nData; + }; + + return { opSafeKey: safeKey, opReplace, opAdd, opRemove, opData }; +} \ No newline at end of file diff --git a/src/hooks/useInit.ts b/src/hooks/useInit.ts new file mode 100644 index 0000000..3443d54 --- /dev/null +++ b/src/hooks/useInit.ts @@ -0,0 +1,12 @@ +import { useRef, useEffect } from 'react'; + +// fix: Two interface requests will be made in development mode +export default function useInit(callback: () => void) { + const isInit = useRef(true); + useEffect(() => { + if (isInit.current) { + callback(); + isInit.current = false; + } + }, []) +} \ No newline at end of file diff --git a/src/layout/index.scss b/src/layout/index.scss index 0da7e39..9ef7327 100644 --- a/src/layout/index.scss +++ b/src/layout/index.scss @@ -10,6 +10,7 @@ .chat-container { padding: 20px; + overflow: hidden; } .ant-menu { diff --git a/src/layout/index.tsx b/src/layout/index.tsx index f6afada..2bf2691 100644 --- a/src/layout/index.tsx +++ b/src/layout/index.tsx @@ -1,9 +1,8 @@ import { FC, useState } from 'react'; import { Layout, Menu } from 'antd'; -import { useNavigate } from 'react-router-dom'; +import { useNavigate, useLocation } from 'react-router-dom'; import Routes, { menuItems } from '@/routes'; - import './index.scss'; const { Content, Footer, Sider } = Layout; @@ -14,13 +13,14 @@ interface ChatLayoutProps { const ChatLayout: FC = ({ children }) => { const [collapsed, setCollapsed] = useState(false); + const location = useLocation(); const go = useNavigate(); return ( setCollapsed(value)}>
- go(i.key)} /> + go(i.key)} /> diff --git a/src/routes.tsx b/src/routes.tsx index c885548..e73aca8 100644 --- a/src/routes.tsx +++ b/src/routes.tsx @@ -1,13 +1,13 @@ import { useRoutes } from 'react-router-dom'; import { DesktopOutlined, - BulbOutlined + BulbOutlined, } from '@ant-design/icons'; import type { RouteObject } from 'react-router-dom'; import type { MenuProps } from 'antd'; import General from '@view/General'; -import ChatGPTPrompts from '@view/ChatGPTPrompts'; +import LanguageModel from '@/view/LanguageModel'; export type ChatRouteObject = { label: string; @@ -24,10 +24,10 @@ export const routes: Array = [ }, }, { - path: '/chatgpt-prompts', - element: , + path: '/language-model', + element: , meta: { - label: 'ChatGPT Prompts', + label: 'Language Model', icon: , }, }, diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..116afc2 --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,43 @@ +import { readTextFile, writeTextFile, exists } from '@tauri-apps/api/fs'; +import { homeDir, join } from '@tauri-apps/api/path'; + +export const CHAT_MODEL_JSON = 'chat.model.json'; +export const DISABLE_AUTO_COMPLETE = { + autoCapitalize: 'off', + autoComplete: 'off', + spellCheck: false +}; + +export const chatRoot = async () => { + return join(await homeDir(), '.chatgpt') +} + +export const chatModelPath = async () => { + return join(await chatRoot(), CHAT_MODEL_JSON); +} + +export const readJSON = async (path: string, defaultVal = {}) => { + const root = await chatRoot(); + const file = await join(root, path); + + if (!await exists(file)) { + writeTextFile(file, JSON.stringify({ + name: 'ChatGPT', + link: 'https://github.com/lencx/ChatGPT/blob/main/chat.model.md', + data: null, + ...defaultVal, + }, null, 2)) + } + + try { + return JSON.parse(await readTextFile(file)); + } catch(e) { + return {}; + } +} + +export const writeJSON = async (path: string, data: Record) => { + const root = await chatRoot(); + const file = await join(root, path); + await writeTextFile(file, JSON.stringify(data, null, 2)); +} \ No newline at end of file diff --git a/src/view/ChatGPTPrompts.tsx b/src/view/ChatGPTPrompts.tsx deleted file mode 100644 index 3e4087f..0000000 --- a/src/view/ChatGPTPrompts.tsx +++ /dev/null @@ -1,7 +0,0 @@ -export default function Dashboard() { - return ( -
- TODO: ChatGPT Prompts -
- ) -} \ No newline at end of file diff --git a/src/view/General.tsx b/src/view/General.tsx index d1934ec..5e7dbbe 100644 --- a/src/view/General.tsx +++ b/src/view/General.tsx @@ -7,6 +7,8 @@ import { ask } from '@tauri-apps/api/dialog'; import { relaunch } from '@tauri-apps/api/process'; import { clone, omit, isEqual } from 'lodash'; +import { DISABLE_AUTO_COMPLETE } from '@/utils'; + const OriginLabel = ({ url }: { url: string }) => { return ( @@ -15,12 +17,6 @@ const OriginLabel = ({ url }: { url: string }) => { ) } -const disableAuto = { - autoCapitalize: 'off', - autoComplete: 'off', - spellCheck: false -} - export default function General() { const [form] = Form.useForm(); const [platformInfo, setPlatform] = useState(''); @@ -72,7 +68,7 @@ export default function General() { Dark - + {platformInfo === 'darwin' && ( @@ -81,13 +77,13 @@ export default function General() { )} } name="origin"> - + - + - + diff --git a/src/view/LanguageModel/Form.tsx b/src/view/LanguageModel/Form.tsx new file mode 100644 index 0000000..87bc39f --- /dev/null +++ b/src/view/LanguageModel/Form.tsx @@ -0,0 +1,66 @@ +import { useEffect, ForwardRefRenderFunction, useImperativeHandle, forwardRef } from 'react'; +import { Form, Input, Switch } from 'antd'; +import type { FormProps } from 'antd'; + +import Tags from '@comps/Tags'; +import { DISABLE_AUTO_COMPLETE } from '@/utils'; + +interface LanguageModelProps { + record?: Record | null; +} + +const initFormValue = { + act: '', + enable: true, + tags: [], + prompt: '', +}; + +const LanguageModel: ForwardRefRenderFunction = ({ record }, ref) => { + const [form] = Form.useForm(); + useImperativeHandle(ref, () => ({ form })); + + useEffect(() => { + if (record) { + form.setFieldsValue(record); + } + }, [record]); + + return ( +
+ + + + + + + + + + + + + + + +
+ ) +} + +export default forwardRef(LanguageModel); diff --git a/src/view/LanguageModel/config.tsx b/src/view/LanguageModel/config.tsx new file mode 100644 index 0000000..4667f96 --- /dev/null +++ b/src/view/LanguageModel/config.tsx @@ -0,0 +1,55 @@ +import { Tag, Switch, Tooltip, Space } from 'antd'; + +export const modelColumns = () => [ + { + title: '/{cmd}', + dataIndex: 'cmd', + fixed: 'left', + width: 120, + key: 'cmd', + render: (v: string) => /{v} + }, + { + title: 'Act', + dataIndex: 'act', + key: 'act', + width: 200, + }, + { + title: 'Tags', + dataIndex: 'tags', + key: 'tags', + width: 150, + render: (v: string[]) => ( + {v?.map(i => {i})} + ), + }, + { + title: 'Enable', + dataIndex: 'enable', + key: 'enable', + width: 80, + render: (v: boolean = false) => , + }, + { + title: 'Prompt', + dataIndex: 'prompt', + key: 'prompt', + width: 300, + render: (v: string) => ( + {v} + ), + }, + { + title: 'Action', + key: 'action', + fixed: 'right', + width: 120, + render: (_: any, row: any, actions: any) => ( + + actions.setRecord(row, 'edit')}>Edit + actions.setRecord(row, 'delete')}>Delete + + ), + } +]; diff --git a/src/view/LanguageModel/index.scss b/src/view/LanguageModel/index.scss new file mode 100644 index 0000000..f4be422 --- /dev/null +++ b/src/view/LanguageModel/index.scss @@ -0,0 +1,39 @@ +.chat-prompts-val { + display: inline-block; + width: 100%; + max-width: 300px; + overflow: hidden; + text-overflow: ellipsis; + display: -webkit-box; + -webkit-line-clamp: 3; + -webkit-box-orient: vertical; +} + +.chat-prompts-tags { + .ant-tag { + margin: 2px; + } +} + +.add-btn { + margin-bottom: 5px; +} + +.chat-model-path { + font-size: 12px; + font-weight: bold; + color: #888; + margin-bottom: 5px; + + span { + display: inline-block; + // background-color: #d8d8d8; + color: #4096ff; + padding: 0 8px; + height: 20px; + line-height: 20px; + border-radius: 4px; + cursor: pointer; + text-decoration: underline; + } +} \ No newline at end of file diff --git a/src/view/LanguageModel/index.tsx b/src/view/LanguageModel/index.tsx new file mode 100644 index 0000000..d9a3b2f --- /dev/null +++ b/src/view/LanguageModel/index.tsx @@ -0,0 +1,95 @@ +import { useState, useRef, useEffect } from 'react'; +import { Table, Button, Modal, message } from 'antd'; +import { invoke } from '@tauri-apps/api'; + +import useChatModel from '@/hooks/useChatModel'; +import useColumns from '@/hooks/useColumns'; +import useData from '@/hooks/useData'; +import { chatModelPath } from '@/utils'; +import { modelColumns } from './config'; +import LanguageModelForm from './Form'; +import './index.scss'; + +export default function LanguageModel() { + const [isVisible, setVisible] = useState(false); + const [modelPath, setChatModelPath] = useState(''); + const { modelData, modelSet } = useChatModel(); + const { opData, opAdd, opRemove, opReplace, opSafeKey } = useData(modelData); + const { columns, ...opInfo } = useColumns(modelColumns()); + const formRef = useRef(null); + + useEffect(() => { + if (!opInfo.opType) return; + if (['edit', 'new'].includes(opInfo.opType)) { + setVisible(true); + } + if (['delete'].includes(opInfo.opType)) { + const data = opRemove(opInfo?.opRecord?.[opSafeKey]); + modelSet(data); + opInfo.resetRecord(); + } + }, [opInfo.opType, formRef]); + + const hide = () => { + setVisible(false); + opInfo.resetRecord(); + }; + + const handleOk = () => { + formRef.current?.form?.validateFields() + .then((vals: Record) => { + if (modelData.map((i: any) => i.cmd).includes(vals.cmd) && opInfo?.opRecord?.cmd !== vals.cmd) { + message.warning(`"cmd: /${vals.cmd}" already exists, please change the "${vals.cmd}" name and resubmit.`); + return; + } + let data = []; + switch (opInfo.opType) { + case 'new': data = opAdd(vals); break; + case 'edit': data = opReplace(opInfo?.opRecord?.[opSafeKey], vals); break; + default: break; + } + modelSet(data) + hide(); + }) + }; + + const handleOpenFile = async () => { + const path = await chatModelPath(); + setChatModelPath(path); + invoke('open_file', { path }); + }; + + const modalTitle = `${({ new: 'Create', edit: 'Edit' })[opInfo.opType]} Language Model`; + + return ( +
+ +
PATH: {modelPath}
+ Total {total} items, + }} + /> + + + + + ) +} \ No newline at end of file