From e32b96c71d01901da87a346842fcca3b53317159 Mon Sep 17 00:00:00 2001 From: hibobmaster <32976627+hibobmaster@users.noreply.github.com> Date: Wed, 8 May 2024 21:30:50 +0800 Subject: [PATCH] :sparkles: feat: Support custom help message --- CHANGELOG.md | 3 +++ compose.yaml | 1 + custom_help_message.txt | 11 +++++++++++ src/bot.py | 26 ++++++++++++++++---------- src/main.py | 18 ++++++++++++++++++ 5 files changed, 49 insertions(+), 10 deletions(-) create mode 100644 custom_help_message.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index af7846d..a8cc23f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 1.8.0 +- Support custom help message + ## 1.7.2 - Refactor gpt vision trigger method - !pic, !help, and gpt vision in thread chat diff --git a/compose.yaml b/compose.yaml index 6e31742..0952f91 100644 --- a/compose.yaml +++ b/compose.yaml @@ -16,6 +16,7 @@ services: - ./sync_db:/app/sync_db - ./context.db:/app/context.db # - ./manage_db:/app/manage_db + # - ./custom_help_message.txt:/app/custom_help_message.txt # import_keys path # - ./element-keys.txt:/app/element-keys.txt networks: diff --git a/custom_help_message.txt b/custom_help_message.txt new file mode 100644 index 0000000..51e96a0 --- /dev/null +++ b/custom_help_message.txt @@ -0,0 +1,11 @@ +Hi there, welcome to our chat room! +Our bot is powered by opensource project [matrix_chatgpt_bot](https://github.com/hibobmaster/matrix_chatgpt_bot). +Here are some commands you can use to interact with the bot. +!gpt [prompt], generate a one time response without context conversation +!chat [prompt], chat with context conversation +!pic [prompt], Image generation by DALL-E-3 or LocalAI or stable-diffusion-webui +!new + chat, start a new conversation +!lc [prompt], chat using langchain api +quote a image and @bot with prompt, gpt vision function +@bot with prompt, create a thread level chatting +!help, help message diff --git a/src/bot.py b/src/bot.py index 1731395..d12c53b 100644 --- a/src/bot.py +++ b/src/bot.py @@ -80,6 +80,7 @@ class Bot: gpt_vision_model: Optional[str] = None, gpt_vision_api_endpoint: Optional[str] = None, timeout: Union[float, None] = None, + custom_help_message: Optional[str] = None, ): if homeserver is None or user_id is None or device_id is None: logger.error("homeserver && user_id && device_id is required") @@ -227,6 +228,8 @@ class Bot: self.help_prog = re.compile(r"\s*!help\s*.*$") self.new_prog = re.compile(r"\s*!new\s+(.+)$") + self.custom_help_message = custom_help_message + async def close(self, task: asyncio.Task) -> None: self.chatbot.cursor.close() self.chatbot.conn.close() @@ -1818,16 +1821,19 @@ class Bot: reply_in_thread=False, thread_root_id=None, ): - help_info = ( - "!gpt [prompt], generate a one time response without context conversation\n" - + "!chat [prompt], chat with context conversation\n" - + "!pic [prompt], Image generation by DALL-E-3 or LocalAI or stable-diffusion-webui\n" # noqa: E501 - + "!new + chat, start a new conversation \n" - + "!lc [prompt], chat using langchain api\n" - + "quote a image and @bot with prompt, gpt vision function\n" - + "@bot with prompt, create a thread level chatting\n" - + "!help, help message" - ) # noqa: E501 + if self.custom_help_message: + help_info = self.custom_help_message + else: + help_info = ( + "!gpt [prompt], generate a one time response without context conversation\n" + + "!chat [prompt], chat with context conversation\n" + + "!pic [prompt], Image generation by DALL-E-3 or LocalAI or stable-diffusion-webui\n" # noqa: E501 + + "!new + chat, start a new conversation \n" + + "!lc [prompt], chat using langchain api\n" + + "quote a image and @bot with prompt, gpt vision function\n" + + "@bot with prompt, create a thread level chatting\n" + + "!help, help message" + ) # noqa: E501 await send_room_message( self.client, diff --git a/src/main.py b/src/main.py index 9c78b22..1d40fed 100644 --- a/src/main.py +++ b/src/main.py @@ -14,6 +14,22 @@ logger = getlogger() async def main(): need_import_keys = False config_path = Path(os.path.dirname(__file__)).parent / "config.json" + help_message_path = ( + Path(os.path.dirname(__file__)).parent / "custom_help_message.txt" + ) + + if os.path.isfile(help_message_path): + try: + f = open(help_message_path, encoding="utf8") + custom_help_message = "" + for line in f.readlines(): + custom_help_message += line + except Exception as e: + logger.error(e) + sys.exit(1) + else: + custom_help_message = None + if os.path.isfile(config_path): try: fp = open(config_path, encoding="utf8") @@ -52,6 +68,7 @@ async def main(): gpt_vision_model=config.get("gpt_vision_model"), gpt_vision_api_endpoint=config.get("gpt_vision_api_endpoint"), timeout=config.get("timeout"), + custom_help_message=custom_help_message, ) if ( config.get("import_keys_path") @@ -90,6 +107,7 @@ async def main(): gpt_vision_model=os.environ.get("GPT_VISION_MODEL"), gpt_vision_api_endpoint=os.environ.get("GPT_VISION_API_ENDPOINT"), timeout=float(os.environ.get("TIMEOUT", 120.0)), + custom_help_message=custom_help_message, ) if ( os.environ.get("IMPORT_KEYS_PATH")