1
0
mirror of https://github.com/lencx/ChatGPT.git synced 2024-10-01 01:06:13 -04:00

chore: menu sync

This commit is contained in:
lencx 2022-12-23 18:52:56 +08:00
parent e4be2bc2f3
commit 2a9fba7d27
3 changed files with 21 additions and 5 deletions

View File

@ -17,6 +17,8 @@ use tauri_plugin_log::{
fn main() {
ChatConfJson::init();
// If the file does not exist, creating the file will block menu synchronization
utils::create_chatgpt_prompts();
let chat_conf = ChatConfJson::get_chat_conf();
let context = tauri::generate_context!();
let colors = ColoredLevelConfig {

View File

@ -30,6 +30,14 @@ pub fn create_file(path: &Path) -> Result<File> {
File::create(path).map_err(Into::into)
}
pub fn create_chatgpt_prompts() {
let sync_file = chat_root().join("cache_model").join("chatgpt_prompts.json");
if !exists(&sync_file) {
create_file(&sync_file).unwrap();
fs::write(&sync_file, "[]").unwrap();
}
}
pub fn script_path() -> PathBuf {
let script_file = chat_root().join("main.js");
if !exists(&script_file) {

16
src/hooks/useEvent.ts vendored
View File

@ -1,11 +1,12 @@
import { invoke, http, fs, dialog } from '@tauri-apps/api';
import { invoke, path, http, fs, dialog } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useChatModel from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatPromptsPath, genCmd } from '@/utils';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatRoot, genCmd } from '@/utils';
export default function useEvent() {
const { modelSet } = useChatModel('sync_prompts');
const { modelCacheSet } = useCacheModel();
// Using `emit` and `listen` will be triggered multiple times in development mode.
// So here we use `eval` to call `__sync_prompt`
useInit(() => {
@ -16,9 +17,14 @@ export default function useEvent() {
});
const data = (res.data || '') as string;
if (res.ok) {
await fs.writeTextFile(await chatPromptsPath(), data);
const file = await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
modelSet(list.map(i => ({ cmd: genCmd(i.act), enable: true, tags: ['chatgpt-prompts'], ...i })));
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList, file);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
dialog.message('ChatGPT Prompts data has been synchronized!');
} else {
dialog.message('ChatGPT Prompts data sync failed, please try again!');