1
0
mirror of https://github.com/lencx/ChatGPT.git synced 2024-10-01 01:06:13 -04:00

Merge pull request #70 from lencx/dev

This commit is contained in:
lencx 2022-12-24 20:26:23 +08:00 committed by GitHub
commit 0b0b832130
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 192 additions and 112 deletions

View File

@ -22,9 +22,9 @@
**最新版:**
- `Mac`: [ChatGPT_0.6.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64.dmg)
- `Linux`: [chat-gpt_0.6.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/chat-gpt_0.6.4_amd64.deb)
- `Windows`: [ChatGPT_0.6.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64_en-US.msi)
- `Mac`: [ChatGPT_0.6.5_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.5/ChatGPT_0.6.5_x64.dmg)
- `Linux`: [chat-gpt_0.6.5_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.5/chat-gpt_0.6.5_amd64.deb)
- `Windows`: [ChatGPT_0.6.5_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.5/ChatGPT_0.6.5_x64_en-US.msi)
[其他版本...](https://github.com/lencx/ChatGPT/releases)

View File

@ -24,9 +24,9 @@
**Latest:**
- `Mac`: [ChatGPT_0.6.4_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64.dmg)
- `Linux`: [chat-gpt_0.6.4_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/chat-gpt_0.6.4_amd64.deb)
- `Windows`: [ChatGPT_0.6.4_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.4/ChatGPT_0.6.4_x64_en-US.msi)
- `Mac`: [ChatGPT_0.6.5_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.6.5/ChatGPT_0.6.5_x64.dmg)
- `Linux`: [chat-gpt_0.6.5_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.6.5/chat-gpt_0.6.5_amd64.deb)
- `Windows`: [ChatGPT_0.6.5_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.6.5/ChatGPT_0.6.5_x64_en-US.msi)
[Other version...](https://github.com/lencx/ChatGPT/releases)

View File

@ -1,5 +1,9 @@
# UPDATE LOG
## v0.6.5
fix: unable to synchronize
## v0.6.4
fix: path not allowed on the configured scope

View File

@ -23,6 +23,7 @@ log = "0.4.17"
csv = "1.1.6"
thiserror = "1.0.38"
walkdir = "2.3.2"
regex = "1.7.0"
# tokio = { version = "1.23.0", features = ["macros"] }
# reqwest = "0.11.13"

View File

@ -1,5 +1,5 @@
use crate::{conf::ChatConfJson, utils};
use std::{fs, path::PathBuf};
use std::{collections::HashMap, fs, path::PathBuf};
use tauri::{api, command, AppHandle, Manager};
#[command]
@ -72,7 +72,7 @@ pub fn get_chat_model_cmd() -> serde_json::Value {
serde_json::from_str(&content).unwrap()
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord {
pub cmd: Option<String>,
pub act: String,
@ -99,9 +99,8 @@ pub fn window_reload(app: AppHandle, label: &str) {
.unwrap();
}
use walkdir::WalkDir;
use utils::chat_root;
use walkdir::WalkDir;
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ModelRecord {
@ -115,12 +114,14 @@ pub struct ModelRecord {
#[command]
pub fn cmd_list() -> Vec<ModelRecord> {
let mut list = vec![];
for entry in WalkDir::new(chat_root().join("cache_model")).into_iter().filter_map(|e| e.ok()) {
for entry in WalkDir::new(chat_root().join("cache_model"))
.into_iter()
.filter_map(|e| e.ok())
{
let file = fs::read_to_string(entry.path().display().to_string());
if let Ok(v) = file {
let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let enable_list = data.into_iter()
.filter(|v| v.enable);
let enable_list = data.into_iter().filter(|v| v.enable);
list.extend(enable_list)
}
}
@ -128,3 +129,65 @@ pub fn cmd_list() -> Vec<ModelRecord> {
list.sort_by(|a, b| a.cmd.len().cmp(&b.cmd.len()));
list
}
#[command]
pub fn sync_prompts(app: AppHandle, data: String, time: u64) {
let data = parse_prompt(data)
.iter()
.map(move |i| ModelRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
utils::gen_cmd(i.act.clone())
},
act: i.act.clone(),
prompt: i.prompt.clone(),
tags: vec!["chatgpt-prompts".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
let model = chat_root().join("chat.model.json");
let model_cmd = chat_root().join("chat.model.cmd.json");
let chatgpt_prompts = chat_root().join("cache_model").join("chatgpt_prompts.json");
// chatgpt_prompts.json
fs::write(
chatgpt_prompts,
serde_json::to_string_pretty(&data).unwrap(),
)
.unwrap();
let cmd_data = cmd_list();
// chat.model.cmd.json
fs::write(
model_cmd,
serde_json::to_string_pretty(&serde_json::json!({
"name": "ChatGPT CMD",
"last_updated": time,
"data": cmd_data,
}))
.unwrap(),
)
.unwrap();
let mut kv = HashMap::new();
kv.insert(
"sync_prompts".to_string(),
serde_json::json!({ "id": "chatgpt_prompts", "last_updated": time }),
);
let model_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&model).unwrap()).unwrap(),
&kv,
);
// chat.model.json
fs::write(model, serde_json::to_string_pretty(&model_data).unwrap()).unwrap();
// refresh window
api::dialog::message(
app.get_window("core").as_ref(),
"Sync Prompts",
"ChatGPT Prompts data has been synchronized!",
);
window_reload(app, "core");
}

View File

@ -8,6 +8,8 @@ use tauri::{
};
use tauri_plugin_positioner::{on_tray_event, Position, WindowExt};
use super::window;
// --- Menu
pub fn init() -> Menu {
let chat_conf = ChatConfJson::get_chat_conf();
@ -174,7 +176,7 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
match menu_id {
// Preferences
"control_center" => app.get_window("main").unwrap().show().unwrap(),
"control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&app.env()),
"inject_script" => open(&app, script_path),
"go_conf" => utils::open_file(utils::chat_root()),
@ -182,12 +184,12 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
"awesome" => open(&app, conf::AWESOME_URL.to_string()),
"sync_prompts" => {
tauri::api::dialog::ask(
app.get_window("main").as_ref(),
app.get_window("core").as_ref(),
"Sync Prompts",
"Data sync will enable all prompts, are you sure you want to sync?",
move |is_restart| {
if is_restart {
app.get_window("main")
app.get_window("core")
.unwrap()
.eval("window.__sync_prompts && window.__sync_prompts()")
.unwrap()
@ -304,7 +306,7 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
}
}
SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() {
"control_center" => app.get_window("main").unwrap().show().unwrap(),
"control_center" => window::control_window(&app),
"restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => {
ChatConfJson::amend(&serde_json::json!({ "hide_dock_icon": false }), Some(app))

View File

@ -5,11 +5,11 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
let chat_conf = ChatConfJson::get_chat_conf();
let url = chat_conf.origin.to_string();
let theme = ChatConfJson::theme();
let handle = app.app_handle();
// let handle = app.app_handle();
std::thread::spawn(move || {
window::tray_window(&handle);
});
// std::thread::spawn(move || {
// window::tray_window(&handle);
// });
if chat_conf.hide_dock_icon {
#[cfg(target_os = "macos")]

View File

@ -28,3 +28,17 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.unwrap();
});
}
pub fn control_window(handle: &tauri::AppHandle) {
let app = handle.clone();
std::thread::spawn(move || {
WindowBuilder::new(&app, "main", WindowUrl::App("index.html".into()))
.title("ChatGPT")
.resizable(false)
.fullscreen(false)
.inner_size(800.0, 600.0)
.min_inner_size(800.0, 600.0)
.build()
.unwrap();
});
}

View File

@ -106,8 +106,6 @@ async function cmdTip() {
// input text
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) {
console.log('«110» /src/assets/cmd.js ~> ', __CHAT_MODEL_STATUS__);
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_STATUS__;

View File

@ -86,6 +86,26 @@ async function init() {
}
}
});
window.__sync_prompts = async function() {
const res = await fetch('https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv');
if (res.ok) {
const data = await res.text();
console.log('«94» /src/assets/core.js ~> ', data);
await invoke('sync_prompts', { data, time: Date.now() });
} else {
invoke('messageDialog', {
__tauriModule: 'Dialog',
message: {
cmd: 'messageDialog',
message: 'ChatGPT Prompts data sync failed, please try again!'.toString(),
title: 'Sync Prompts'.toString(),
type: 'error'
}
})
}
}
}
if (

View File

@ -61,6 +61,7 @@ fn main() {
cmd::open_file,
cmd::get_chat_model_cmd,
cmd::parse_prompt,
cmd::sync_prompts,
cmd::window_reload,
cmd::cmd_list,
fs_extra::metadata,
@ -76,7 +77,7 @@ fn main() {
if let tauri::WindowEvent::CloseRequested { api, .. } = event.event() {
let win = event.window();
if win.label() == "main" {
win.hide().unwrap();
win.close().unwrap();
} else {
// TODO: https://github.com/tauri-apps/tauri/issues/3084
// event.window().hide().unwrap();

View File

@ -1,6 +1,9 @@
use anyhow::Result;
use log::info;
use regex::Regex;
use serde_json::Value;
use std::{
collections::HashMap,
fs::{self, File},
path::{Path, PathBuf},
process::Command,
@ -89,3 +92,21 @@ pub fn clear_conf(app: &tauri::AppHandle) {
},
);
}
pub fn merge(v: &Value, fields: &HashMap<String, Value>) -> Value {
match v {
Value::Object(m) => {
let mut m = m.clone();
for (k, v) in fields {
m.insert(k.clone(), v.clone());
}
Value::Object(m)
}
v => v.clone(),
}
}
pub fn gen_cmd(name: String) -> String {
let re = Regex::new(r"[^a-zA-Z0-9]").unwrap();
re.replace_all(&name, "_").to_lowercase()
}

View File

@ -7,7 +7,7 @@
},
"package": {
"productName": "ChatGPT",
"version": "0.6.4"
"version": "0.6.5"
},
"tauri": {
"allowlist": {
@ -22,9 +22,7 @@
"fs": {
"all": true,
"scope": [
"$HOME/.chatgpt/*",
"$HOME/.chatgpt/**",
"$HOME/.chatgpt/cache_model/*"
"*"
]
}
},
@ -79,18 +77,6 @@
"https://lencx.github.io/ChatGPT/install.json"
],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEIxMjY4OUI5MTVFNjBEMDUKUldRRkRlWVZ1WWttc1NGWEE0RFNSb0RqdnhsekRJZTkwK2hVLzhBZTZnaHExSEZ1ZEdzWkpXTHkK"
},
"windows": [
{
"label": "main",
"url": "index.html",
"title": "ChatGPT",
"visible": false,
"width": 800,
"height": 600,
"minWidth": 800,
"minHeight": 600
}
]
}
}
}

View File

@ -18,8 +18,6 @@ export default function useData(oData: any[]) {
const opInit = (val: any[] = []) => {
if (!val || !Array.isArray(val)) return;
console.log('«20» /src/hooks/useData.ts ~> ', val);
const nData = val.map(i => ({ [safeKey]: v4(), ...i }));
setData(nData);
};

34
src/hooks/useEvent.ts vendored
View File

@ -1,34 +0,0 @@
import { invoke, path, http, fs, dialog } from '@tauri-apps/api';
import useInit from '@/hooks/useInit';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import { GITHUB_PROMPTS_CSV_URL, chatRoot, genCmd } from '@/utils';
export default function useEvent() {
const { modelSet } = useChatModel('sync_prompts');
const { modelCacheSet } = useCacheModel();
// Using `emit` and `listen` will be triggered multiple times in development mode.
// So here we use `eval` to call `__sync_prompt`
useInit(() => {
(window as any).__sync_prompts = async () => {
const res = await http.fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: http.ResponseType.Text,
});
const data = (res.data || '') as string;
if (res.ok) {
const file = await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json');
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fmtList = list.map(i => ({ ...i, cmd: i.cmd ? i.cmd : genCmd(i.act), enable: true, tags: ['chatgpt-prompts'] }));
await modelCacheSet(fmtList, file);
modelSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
dialog.message('ChatGPT Prompts data has been synchronized!');
} else {
dialog.message('ChatGPT Prompts data sync failed, please try again!');
}
}
})
}

14
src/main.tsx vendored
View File

@ -2,23 +2,15 @@ import { StrictMode, Suspense } from 'react';
import { BrowserRouter } from 'react-router-dom';
import ReactDOM from 'react-dom/client';
import useEvent from '@/hooks/useEvent';
import Layout from '@/layout';
import './main.scss';
const App = () => {
useEvent();
return (
<BrowserRouter>
<Layout/>
</BrowserRouter>
);
}
ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
<StrictMode>
<Suspense fallback={null}>
<App />
<BrowserRouter>
<Layout/>
</BrowserRouter>
</Suspense>
</StrictMode>
);

6
src/utils.ts vendored
View File

@ -20,10 +20,6 @@ export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON);
}
// export const chatModelSyncPath = async (): Promise<string> => {
// return join(await chatRoot(), CHAT_MODEL_SYNC_JSON);
// }
export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV);
}
@ -35,7 +31,9 @@ export const readJSON = async (path: string, opts: readJSONOpts = {}) => {
const file = await join(isRoot ? '' : root, path);
if (!await exists(file)) {
if (await dirname(file) !== root) {
await createDir(await dirname(file), { recursive: true });
}
await writeTextFile(file, isList ? '[]' : JSON.stringify({
name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT',

View File

@ -8,6 +8,7 @@ import useInit from '@/hooks/useInit';
interface SyncFormProps {
record?: Record<string|symbol, any> | null;
type: string;
}
const initFormValue = {
@ -17,7 +18,8 @@ const initFormValue = {
prompt: '',
};
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }, ref) => {
const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record, type }, ref) => {
const isDisabled = type === 'edit';
const [form] = Form.useForm();
useImperativeHandle(ref, () => ({ form }));
const [root, setRoot] = useState('');
@ -34,7 +36,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
const pathOptions = (
<Form.Item noStyle name="protocol" initialValue="https">
<Select>
<Select disabled={isDisabled}>
<Select.Option value="local">{root}</Select.Option>
<Select.Option value="http">http://</Select.Option>
<Select.Option value="https">https://</Select.Option>
@ -43,7 +45,7 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
);
const extOptions = (
<Form.Item noStyle name="ext" initialValue="json">
<Select>
<Select disabled={isDisabled}>
<Select.Option value="csv">.csv</Select.Option>
<Select.Option value="json">.json</Select.Option>
</Select>
@ -91,7 +93,12 @@ const SyncForm: ForwardRefRenderFunction<FormProps, SyncFormProps> = ({ record }
name="path"
rules={[{ required: true, message: 'Please input path!' }]}
>
<Input placeholder="YOUR_PATH" addonBefore={pathOptions} addonAfter={extOptions} {...DISABLE_AUTO_COMPLETE} />
<Input
placeholder="YOUR_PATH"
addonBefore={pathOptions}
addonAfter={extOptions}
{...DISABLE_AUTO_COMPLETE}
/>
</Form.Item>
<Form.Item style={{ display: 'none' }} name="id" initialValue={v4().replace(/-/g, '')}><input /></Form.Item>
</Form>

View File

@ -34,7 +34,7 @@ export const syncColumns = () => [
key: 'last_updated',
width: 140,
render: (v: number) => (
<div style={{ textAlign: 'center' }}>
<div>
<HistoryOutlined style={{ marginRight: 5, color: v ? '#52c41a' : '#ff4d4f' }} />
{ v ? fmtDate(v) : ''}
</div>
@ -47,7 +47,15 @@ export const syncColumns = () => [
render: (_: any, row: any, actions: any) => {
return (
<Space>
<a onClick={() => actions.setRecord(row, 'sync')}>Sync</a>
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
onConfirm={() => actions.setRecord(row, 'sync')}
okText="Yes"
cancelText="No"
>
<a>Sync</a>
</Popconfirm>
{row.last_updated && <Link to={`${row.id}`} state={row}>View</Link>}
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<Popconfirm

View File

@ -128,12 +128,12 @@ export default function SyncCustom() {
<Modal
open={isVisible}
onCancel={hide}
title="Model PATH"
title="Sync PATH"
onOk={handleOk}
destroyOnClose
maskClosable={false}
>
<SyncForm ref={formRef} record={opInfo?.opRecord} />
<SyncForm ref={formRef} record={opInfo?.opRecord} type={opInfo.opType} />
</Modal>
</div>
)

View File

@ -69,6 +69,16 @@ export default function SyncPrompts() {
return (
<div>
<div className="chat-table-btns">
<Popconfirm
overlayStyle={{ width: 250 }}
title="Sync will overwrite the previous data, confirm to sync?"
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
<div>
{selectedItems.length > 0 && (
<>
@ -78,15 +88,6 @@ export default function SyncPrompts() {
</>
)}
</div>
<Popconfirm
title={<span>Data sync will enable all prompts,<br/>are you sure you want to sync?</span>}
placement="topLeft"
onConfirm={handleSync}
okText="Yes"
cancelText="No"
>
<Button type="primary">Sync</Button>
</Popconfirm>
</div>
<div className="chat-table-tip">
<div className="chat-sync-path">

View File

@ -10,7 +10,7 @@ export const syncColumns = () => [
// width: 120,
key: 'cmd',
render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{genCmd(row.act)}</Tag>
<Tag color="#2a2a2a">/{row.cmd ? row.cmd : genCmd(row.act)}</Tag>
),
},
{