1
0
mirror of https://github.com/lencx/ChatGPT.git synced 2024-10-01 01:06:13 -04:00

Merge pull request #46 from lencx/dev

This commit is contained in:
lencx 2022-12-19 03:12:07 +08:00 committed by GitHub
commit 8966ebbd03
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 379 additions and 29 deletions

View File

@ -22,9 +22,9 @@
**最新版:** **最新版:**
- `Mac`: [ChatGPT_0.4.2_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64.dmg) - `Mac`: [ChatGPT_0.5.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64.dmg)
- `Linux`: [chat-gpt_0.4.2_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/chat-gpt_0.4.2_amd64.deb) - `Linux`: [chat-gpt_0.5.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/chat-gpt_0.5.0_amd64.deb)
- `Windows`: [ChatGPT_0.4.2_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64_en-US.msi) - `Windows`: [ChatGPT_0.5.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64_en-US.msi)
[其他版本...](https://github.com/lencx/ChatGPT/releases) [其他版本...](https://github.com/lencx/ChatGPT/releases)

View File

@ -23,9 +23,9 @@
**Latest:** **Latest:**
- `Mac`: [ChatGPT_0.4.2_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64.dmg) - `Mac`: [ChatGPT_0.5.0_x64.dmg](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64.dmg)
- `Linux`: [chat-gpt_0.4.2_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/chat-gpt_0.4.2_amd64.deb) - `Linux`: [chat-gpt_0.5.0_amd64.deb](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/chat-gpt_0.5.0_amd64.deb)
- `Windows`: [ChatGPT_0.4.2_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.4.2/ChatGPT_0.4.2_x64_en-US.msi) - `Windows`: [ChatGPT_0.5.0_x64_en-US.msi](https://github.com/lencx/ChatGPT/releases/download/v0.5.0/ChatGPT_0.5.0_x64_en-US.msi)
[Other version...](https://github.com/lencx/ChatGPT/releases) [Other version...](https://github.com/lencx/ChatGPT/releases)

View File

@ -1,5 +1,9 @@
# UPDATE LOG # UPDATE LOG
## v0.5.0
feat: `Control Center` added `chatgpt-prompts` synchronization
## v0.4.2 ## v0.4.2
add chatgpt log (path: `~/.chatgpt/chatgpt.log`) add chatgpt log (path: `~/.chatgpt/chatgpt.log`)

View File

@ -33,6 +33,7 @@
"@ant-design/icons": "^4.8.0", "@ant-design/icons": "^4.8.0",
"@tauri-apps/api": "^1.2.0", "@tauri-apps/api": "^1.2.0",
"antd": "^5.0.6", "antd": "^5.0.6",
"dayjs": "^1.11.7",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",

View File

@ -20,6 +20,8 @@ serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] } tauri = { version = "1.2.2", features = ["api-all", "devtools", "system-tray", "updater"] }
tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] } tauri-plugin-positioner = { version = "1.0.4", features = ["system-tray"] }
log = "0.4.17" log = "0.4.17"
csv = "1.1.6"
thiserror = "1.0.38"
[dependencies.tauri-plugin-log] [dependencies.tauri-plugin-log]
git = "https://github.com/tauri-apps/tauri-plugin-log" git = "https://github.com/tauri-apps/tauri-plugin-log"

View File

@ -71,3 +71,20 @@ pub fn get_chat_model() -> serde_json::Value {
let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string()); let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string());
serde_json::from_str(&content).unwrap() serde_json::from_str(&content).unwrap()
} }
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord {
pub act: String,
pub prompt: String,
}
#[command]
pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![];
for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap();
list.push(record);
}
list
}

View File

@ -0,0 +1,123 @@
// https://github.com/tauri-apps/tauri-plugin-fs-extra/blob/dev/src/lib.rs
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use serde::{ser::Serializer, Serialize};
use std::{
path::PathBuf,
time::{SystemTime, UNIX_EPOCH},
};
use tauri::command;
#[cfg(unix)]
use std::os::unix::fs::{MetadataExt, PermissionsExt};
#[cfg(windows)]
use std::os::windows::fs::MetadataExt;
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Io(#[from] std::io::Error),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct Permissions {
readonly: bool,
#[cfg(unix)]
mode: u32,
}
#[cfg(unix)]
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct UnixMetadata {
dev: u64,
ino: u64,
mode: u32,
nlink: u64,
uid: u32,
gid: u32,
rdev: u64,
blksize: u64,
blocks: u64,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
accessed_at_ms: u64,
created_at_ms: u64,
modified_at_ms: u64,
is_dir: bool,
is_file: bool,
is_symlink: bool,
size: u64,
permissions: Permissions,
#[cfg(unix)]
#[serde(flatten)]
unix: UnixMetadata,
#[cfg(windows)]
file_attributes: u32,
}
fn system_time_to_ms(time: std::io::Result<SystemTime>) -> u64 {
time.map(|t| {
let duration_since_epoch = t.duration_since(UNIX_EPOCH).unwrap();
duration_since_epoch.as_millis() as u64
})
.unwrap_or_default()
}
#[command]
pub async fn metadata(path: PathBuf) -> Result<Metadata> {
let metadata = std::fs::metadata(path)?;
let file_type = metadata.file_type();
let permissions = metadata.permissions();
Ok(Metadata {
accessed_at_ms: system_time_to_ms(metadata.accessed()),
created_at_ms: system_time_to_ms(metadata.created()),
modified_at_ms: system_time_to_ms(metadata.modified()),
is_dir: file_type.is_dir(),
is_file: file_type.is_file(),
is_symlink: file_type.is_symlink(),
size: metadata.len(),
permissions: Permissions {
readonly: permissions.readonly(),
#[cfg(unix)]
mode: permissions.mode(),
},
#[cfg(unix)]
unix: UnixMetadata {
dev: metadata.dev(),
ino: metadata.ino(),
mode: metadata.mode(),
nlink: metadata.nlink(),
uid: metadata.uid(),
gid: metadata.gid(),
rdev: metadata.rdev(),
blksize: metadata.blksize(),
blocks: metadata.blocks(),
},
#[cfg(windows)]
file_attributes: metadata.file_attributes(),
})
}
// #[command]
// pub async fn exists(path: PathBuf) -> bool {
// path.exists()
// }

View File

@ -1,4 +1,5 @@
pub mod cmd; pub mod cmd;
pub mod fs_extra;
pub mod menu; pub mod menu;
pub mod setup; pub mod setup;
pub mod window; pub mod window;

View File

@ -62,8 +62,10 @@ function init() {
async function cmdTip() { async function cmdTip() {
const chatModelJson = await invoke('get_chat_model') || {}; const chatModelJson = await invoke('get_chat_model') || {};
if (!chatModelJson.data && chatModelJson.data.length <= 0) return; const user_custom = chatModelJson.user_custom || [];
const data = chatModelJson.data || []; const sys_sync_prompts = chatModelJson.sys_sync_prompts || [];
const data = [...user_custom, ...sys_sync_prompts];
if (data.length <= 0) return;
const modelDom = document.createElement('div'); const modelDom = document.createElement('div');
modelDom.classList.add('chat-model-cmd-list'); modelDom.classList.add('chat-model-cmd-list');
@ -74,7 +76,7 @@ async function cmdTip() {
} }
document.querySelector('form').appendChild(modelDom); document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" data-prompt="${encodeURIComponent(v.prompt)}"><b>/${v.cmd}</b><i>${v.act}</i></div>`; const itemDom = (v) => `<div class="cmd-item" title="${v.prompt}" data-prompt="${encodeURIComponent(v.prompt)}"><b title="${v.cmd}">/${v.cmd}</b><i>${v.act}</i></div>`;
const searchInput = document.querySelector('form textarea'); const searchInput = document.querySelector('form textarea');
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`. // Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.

View File

@ -7,7 +7,7 @@ mod app;
mod conf; mod conf;
mod utils; mod utils;
use app::{cmd, menu, setup}; use app::{cmd, fs_extra, menu, setup};
use conf::{ChatConfJson, ChatState}; use conf::{ChatConfJson, ChatState};
use tauri::api::path; use tauri::api::path;
use tauri_plugin_log::{fern::colors::ColoredLevelConfig, LogTarget, LoggerBuilder}; use tauri_plugin_log::{fern::colors::ColoredLevelConfig, LogTarget, LoggerBuilder};
@ -22,6 +22,7 @@ fn main() {
// https://github.com/tauri-apps/tauri/pull/2736 // https://github.com/tauri-apps/tauri/pull/2736
.plugin( .plugin(
LoggerBuilder::new() LoggerBuilder::new()
// .level(log::LevelFilter::Error)
.with_colors(colors) .with_colors(colors)
.targets([ .targets([
// LogTarget::LogDir, // LogTarget::LogDir,
@ -44,6 +45,8 @@ fn main() {
cmd::form_msg, cmd::form_msg,
cmd::open_file, cmd::open_file,
cmd::get_chat_model, cmd::get_chat_model,
cmd::parse_prompt,
fs_extra::metadata,
]) ])
.setup(setup::init) .setup(setup::init)
.plugin(tauri_plugin_positioner::init()) .plugin(tauri_plugin_positioner::init())

View File

@ -7,11 +7,16 @@
}, },
"package": { "package": {
"productName": "ChatGPT", "productName": "ChatGPT",
"version": "0.4.2" "version": "0.5.0"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {
"all": true, "all": true,
"http": {
"scope": [
"https://raw.githubusercontent.com/*"
]
},
"fs": { "fs": {
"all": true, "all": true,
"scope": [ "scope": [
@ -51,13 +56,19 @@
"shortDescription": "ChatGPT", "shortDescription": "ChatGPT",
"targets": "all", "targets": "all",
"windows": { "windows": {
"webviewInstallMode": {
"silent": true,
"type": "downloadBootstrapper"
},
"certificateThumbprint": null, "certificateThumbprint": null,
"digestAlgorithm": "sha256", "digestAlgorithm": "sha256",
"timestampUrl": "" "timestampUrl": "",
"webviewInstallMode": {
"silent": true,
"type": "embedBootstrapper"
},
"wix": {
"language": [
"zh-CN",
"en-US"
]
}
} }
}, },
"security": { "security": {

View File

@ -4,20 +4,20 @@ import { clone } from 'lodash';
import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils'; import { CHAT_MODEL_JSON, readJSON, writeJSON } from '@/utils';
import useInit from '@/hooks/useInit'; import useInit from '@/hooks/useInit';
export default function useChatModel() { export default function useChatModel(key: string) {
const [modelJson, setModelJson] = useState<Record<string, any>>({}); const [modelJson, setModelJson] = useState<Record<string, any>>({});
useInit(async () => { useInit(async () => {
const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', data: [] }); const data = await readJSON(CHAT_MODEL_JSON, { name: 'ChatGPT Model', [key]: [] });
setModelJson(data); setModelJson(data);
}); });
const modelSet = async (data: Record<string, any>[]) => { const modelSet = async (data: Record<string, any>[]) => {
const oData = clone(modelJson); const oData = clone(modelJson);
oData.data = data; oData[key] = data;
await writeJSON(CHAT_MODEL_JSON, oData); await writeJSON(CHAT_MODEL_JSON, oData);
setModelJson(oData); setModelJson(oData);
} }
return { modelJson, modelSet, modelData: modelJson?.data || [] } return { modelJson, modelSet, modelData: modelJson?.[key] || [] }
} }

View File

@ -8,5 +8,5 @@ export default function useInit(callback: () => void) {
callback(); callback();
isInit.current = false; isInit.current = false;
} }
}, []) })
} }

View File

@ -8,6 +8,11 @@
} }
} }
.ant-layout-sider-trigger {
user-select: none;
-webkit-user-select: none;
}
.chat-container { .chat-container {
padding: 20px; padding: 20px;
overflow: hidden; overflow: hidden;

27
src/layout/index.tsx vendored
View File

@ -17,13 +17,32 @@ const ChatLayout: FC<ChatLayoutProps> = ({ children }) => {
const go = useNavigate(); const go = useNavigate();
return ( return (
<Layout style={{ minHeight: '100vh' }}> <Layout style={{ minHeight: '100vh' }} hasSider>
<Sider theme="light" collapsible collapsed={collapsed} onCollapse={(value) => setCollapsed(value)}> <Sider
theme="light"
collapsible
collapsed={collapsed}
onCollapse={(value) => setCollapsed(value)}
style={{
overflow: 'auto',
height: '100vh',
position: 'fixed',
left: 0,
top: 0,
bottom: 0,
zIndex: 999,
}}
>
<div className="chat-logo"><img src="/logo.png" /></div> <div className="chat-logo"><img src="/logo.png" /></div>
<Menu defaultSelectedKeys={[location.pathname]} mode="vertical" items={menuItems} onClick={(i) => go(i.key)} /> <Menu defaultSelectedKeys={[location.pathname]} mode="vertical" items={menuItems} onClick={(i) => go(i.key)} />
</Sider> </Sider>
<Layout className="chat-layout"> <Layout className="chat-layout" style={{ marginLeft: collapsed ? 80 : 200, transition: 'margin-left 300ms ease-out' }}>
<Content className="chat-container"> <Content
className="chat-container"
style={{
overflow: 'inherit'
}}
>
<Routes /> <Routes />
</Content> </Content>
<Footer style={{ textAlign: 'center' }}> <Footer style={{ textAlign: 'center' }}>

10
src/routes.tsx vendored
View File

@ -2,12 +2,14 @@ import { useRoutes } from 'react-router-dom';
import { import {
DesktopOutlined, DesktopOutlined,
BulbOutlined, BulbOutlined,
SyncOutlined,
} from '@ant-design/icons'; } from '@ant-design/icons';
import type { RouteObject } from 'react-router-dom'; import type { RouteObject } from 'react-router-dom';
import type { MenuProps } from 'antd'; import type { MenuProps } from 'antd';
import General from '@view/General'; import General from '@view/General';
import LanguageModel from '@/view/LanguageModel'; import LanguageModel from '@/view/LanguageModel';
import SyncPrompts from '@/view/SyncPrompts';
export type ChatRouteObject = { export type ChatRouteObject = {
label: string; label: string;
@ -31,6 +33,14 @@ export const routes: Array<RouteObject & { meta: ChatRouteObject }> = [
icon: <BulbOutlined />, icon: <BulbOutlined />,
}, },
}, },
{
path: '/sync-prompts',
element: <SyncPrompts />,
meta: {
label: 'Sync Prompts',
icon: <SyncOutlined />,
},
},
]; ];
type MenuItem = Required<MenuProps>['items'][number]; type MenuItem = Required<MenuProps>['items'][number];

12
src/utils.ts vendored
View File

@ -1,7 +1,10 @@
import { readTextFile, writeTextFile, exists } from '@tauri-apps/api/fs'; import { readTextFile, writeTextFile, exists } from '@tauri-apps/api/fs';
import { homeDir, join } from '@tauri-apps/api/path'; import { homeDir, join } from '@tauri-apps/api/path';
import dayjs from 'dayjs';
export const CHAT_MODEL_JSON = 'chat.model.json'; export const CHAT_MODEL_JSON = 'chat.model.json';
export const CHAT_PROMPTS_CSV = 'chat.prompts.csv';
export const GITHUB_PROMPTS_CSV_URL = 'https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv';
export const DISABLE_AUTO_COMPLETE = { export const DISABLE_AUTO_COMPLETE = {
autoCapitalize: 'off', autoCapitalize: 'off',
autoComplete: 'off', autoComplete: 'off',
@ -12,10 +15,14 @@ export const chatRoot = async () => {
return join(await homeDir(), '.chatgpt') return join(await homeDir(), '.chatgpt')
} }
export const chatModelPath = async () => { export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON); return join(await chatRoot(), CHAT_MODEL_JSON);
} }
export const chatPromptsPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPTS_CSV);
}
export const readJSON = async (path: string, defaultVal = {}) => { export const readJSON = async (path: string, defaultVal = {}) => {
const root = await chatRoot(); const root = await chatRoot();
const file = await join(root, path); const file = await join(root, path);
@ -24,7 +31,6 @@ export const readJSON = async (path: string, defaultVal = {}) => {
writeTextFile(file, JSON.stringify({ writeTextFile(file, JSON.stringify({
name: 'ChatGPT', name: 'ChatGPT',
link: 'https://github.com/lencx/ChatGPT/blob/main/chat.model.md', link: 'https://github.com/lencx/ChatGPT/blob/main/chat.model.md',
data: null,
...defaultVal, ...defaultVal,
}, null, 2)) }, null, 2))
} }
@ -41,3 +47,5 @@ export const writeJSON = async (path: string, data: Record<string, any>) => {
const file = await join(root, path); const file = await join(root, path);
await writeTextFile(file, JSON.stringify(data, null, 2)); await writeTextFile(file, JSON.stringify(data, null, 2));
} }
export const fmtDate = (date: any) => dayjs(date).format('YYYY-MM-DD HH:mm:ss');

View File

@ -14,7 +14,7 @@ import './index.scss';
export default function LanguageModel() { export default function LanguageModel() {
const [isVisible, setVisible] = useState(false); const [isVisible, setVisible] = useState(false);
const [modelPath, setChatModelPath] = useState(''); const [modelPath, setChatModelPath] = useState('');
const { modelData, modelSet } = useChatModel(); const { modelData, modelSet } = useChatModel('user_custom');
const { opData, opAdd, opRemove, opReplace, opSafeKey } = useData(modelData); const { opData, opAdd, opRemove, opReplace, opSafeKey } = useData(modelData);
const { columns, ...opInfo } = useColumns(modelColumns()); const { columns, ...opInfo } = useColumns(modelColumns());
const formRef = useRef<any>(null); const formRef = useRef<any>(null);

45
src/view/SyncPrompts/config.tsx vendored Normal file
View File

@ -0,0 +1,45 @@
import { Tag } from 'antd';
export const genCmd = (act: string) => act.replace(/\s+|\/+/g, '_').replace(/[^\d\w]/g, '').toLocaleLowerCase();
export const modelColumns = () => [
{
title: '/{cmd}',
dataIndex: 'cmd',
fixed: 'left',
// width: 120,
key: 'cmd',
render: (_: string, row: Record<string, string>) => (
<Tag color="#2a2a2a">/{genCmd(row.act)}</Tag>
),
},
{
title: 'Act',
dataIndex: 'act',
key: 'act',
// width: 200,
},
{
title: 'Tags',
dataIndex: 'tags',
key: 'tags',
// width: 150,
render: () => <Tag>chatgpt-prompts</Tag>,
},
// {
// title: 'Enable',
// dataIndex: 'enable',
// key: 'enable',
// width: 80,
// render: (v: boolean = false) => <Switch checked={v} disabled />,
// },
{
title: 'Prompt',
dataIndex: 'prompt',
key: 'prompt',
// width: 300,
// render: (v: string) => (
// <Tooltip overlayInnerStyle={{ width: 350 }} title={v}><span className="chat-prompts-val">{v}</span></Tooltip>
// ),
},
];

28
src/view/SyncPrompts/index.scss vendored Normal file
View File

@ -0,0 +1,28 @@
.chat-prompts-tags {
.ant-tag {
margin: 2px;
}
}
.add-btn {
margin-bottom: 5px;
}
.chat-model-path {
font-size: 12px;
font-weight: bold;
color: #888;
margin-bottom: 5px;
span {
display: inline-block;
// background-color: #d8d8d8;
color: #4096ff;
padding: 0 8px;
height: 20px;
line-height: 20px;
border-radius: 4px;
cursor: pointer;
text-decoration: underline;
}
}

71
src/view/SyncPrompts/index.tsx vendored Normal file
View File

@ -0,0 +1,71 @@
import { useState } from 'react';
import { Table, Button, message } from 'antd';
import { invoke } from '@tauri-apps/api';
import { fetch, ResponseType } from '@tauri-apps/api/http';
import { writeTextFile, readTextFile } from '@tauri-apps/api/fs';
import useColumns from '@/hooks/useColumns';
import useChatModel from '@/hooks/useChatModel';
import { fmtDate, chatPromptsPath, GITHUB_PROMPTS_CSV_URL } from '@/utils';
import { modelColumns, genCmd } from './config';
import './index.scss';
import useInit from '@/hooks/useInit';
const promptsURL = 'https://github.com/f/awesome-chatgpt-prompts/blob/main/prompts.csv';
export default function LanguageModel() {
const [loading, setLoading] = useState(false);
const [lastUpdated, setLastUpdated] = useState();
const { modelSet } = useChatModel('sys_sync_prompts');
const [tableData, setTableData] = useState<Record<string, string>[]>([]);
const { columns, ...opInfo } = useColumns(modelColumns());
useInit(async () => {
const filename = await chatPromptsPath();
const data = await readTextFile(filename);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
const fileData: Record<string, any> = await invoke('metadata', { path: filename });
setLastUpdated(fileData.accessedAtMs);
setTableData(list);
})
const handleSync = async () => {
setLoading(true);
const res = await fetch(GITHUB_PROMPTS_CSV_URL, {
method: 'GET',
responseType: ResponseType.Text,
});
const data = (res.data || '') as string;
// const content = data.replace(/"(\s+)?,(\s+)?"/g, '","');
await writeTextFile(await chatPromptsPath(), data);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
setTableData(list);
modelSet(list.map(i => ({ cmd: genCmd(i.act), enable: true, tags: ['chatgpt-prompts'], ...i })));
setLoading(false);
setLastUpdated(fmtDate(Date.now()) as any);
message.success('ChatGPT Prompts data synchronization completed!');
};
return (
<div>
<Button type="primary" loading={loading} onClick={handleSync}>Sync</Button>
{lastUpdated && <span style={{ marginLeft: 10, color: '#999' }}>Last updated on {fmtDate(lastUpdated)}</span>}
<div className="chat-model-path">URL: <a href={promptsURL} target="_blank">{promptsURL}</a></div>
<Table
key={lastUpdated}
rowKey="act"
columns={columns}
scroll={{ x: 'auto' }}
dataSource={tableData}
pagination={{
hideOnSinglePage: true,
showSizeChanger: true,
showQuickJumper: true,
defaultPageSize: 5,
pageSizeOptions: [5, 10, 15, 20],
showTotal: (total) => <span>Total {total} items</span>,
}}
/>
</div>
)
}