diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index b628ae7..e53314f 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -17,6 +17,8 @@ use tauri_plugin_log::{ fn main() { ChatConfJson::init(); + // If the file does not exist, creating the file will block menu synchronization + utils::create_chatgpt_prompts(); let chat_conf = ChatConfJson::get_chat_conf(); let context = tauri::generate_context!(); let colors = ColoredLevelConfig { diff --git a/src-tauri/src/utils.rs b/src-tauri/src/utils.rs index 6191596..e8fe0fb 100644 --- a/src-tauri/src/utils.rs +++ b/src-tauri/src/utils.rs @@ -30,6 +30,14 @@ pub fn create_file(path: &Path) -> Result { File::create(path).map_err(Into::into) } +pub fn create_chatgpt_prompts() { + let sync_file = chat_root().join("cache_model").join("chatgpt_prompts.json"); + if !exists(&sync_file) { + create_file(&sync_file).unwrap(); + fs::write(&sync_file, "[]").unwrap(); + } +} + pub fn script_path() -> PathBuf { let script_file = chat_root().join("main.js"); if !exists(&script_file) { diff --git a/src/hooks/useEvent.ts b/src/hooks/useEvent.ts index d230fc4..3a4146e 100644 --- a/src/hooks/useEvent.ts +++ b/src/hooks/useEvent.ts @@ -1,11 +1,12 @@ -import { invoke, http, fs, dialog } from '@tauri-apps/api'; +import { invoke, path, http, fs, dialog } from '@tauri-apps/api'; import useInit from '@/hooks/useInit'; -import useChatModel from '@/hooks/useChatModel'; -import { GITHUB_PROMPTS_CSV_URL, chatPromptsPath, genCmd } from '@/utils'; +import useChatModel, { useCacheModel } from '@/hooks/useChatModel'; +import { GITHUB_PROMPTS_CSV_URL, chatRoot, genCmd } from '@/utils'; export default function useEvent() { const { modelSet } = useChatModel('sync_prompts'); + const { modelCacheSet } = useCacheModel(); // Using `emit` and `listen` will be triggered multiple times in development mode. // So here we use `eval` to call `__sync_prompt` useInit(() => { @@ -16,9 +17,14 @@ export default function useEvent() { }); const data = (res.data || '') as string; if (res.ok) { - await fs.writeTextFile(await chatPromptsPath(), data); + const file = await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json'); const list: Record[] = await invoke('parse_prompt', { data }); - modelSet(list.map(i => ({ cmd: genCmd(i.act), enable: true, tags: ['chatgpt-prompts'], ...i }))); + const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] })); + await modelCacheSet(fmtList, file); + modelSet({ + id: 'chatgpt_prompts', + last_updated: Date.now(), + }); dialog.message('ChatGPT Prompts data has been synchronized!'); } else { dialog.message('ChatGPT Prompts data sync failed, please try again!');