diff --git a/.env.example b/.env.example index 2aba99e..1fb26c5 100644 --- a/.env.example +++ b/.env.example @@ -11,6 +11,8 @@ CHATGPT_API_MODEL=gpt-3.5-turbo #CHATGPT_PROMPT_PREFIX=Instructions:\nYou are ChatGPT, a large language model trained by OpenAI. # (Optional) Set to true if ChatGPT should ignore any messages which are not text #CHATGPT_IGNORE_MEDIA=false +# (Optional) You can change the api url to use another (OpenAI-compatible) API endpoint +#CHATGPT_REVERSE_PROXY=https://api.openai.com/v1/chat/completions # Set data store settings KEYV_BACKEND=file diff --git a/README.md b/README.md index e18e6fd..968a320 100644 --- a/README.md +++ b/README.md @@ -44,6 +44,7 @@ Adjust all required settings in the `.env` file before running. Optional setting - You need to have an account at [openai.com](https://openai.com/). Please note that the usage of the ChatGPT-API is not free. - Create a [API Key](https://platform.openai.com/account/api-keys). Then, set `OPENAI_API_KEY` in your `.env` file - You can change the chat-model by setting the `CHATGPT_API_MODEL` in your `.env` file. ChatGPT is the `gpt-3.5-turbo`-model which is the default. Please note that depending on the model your OpenAI account will be charged. +- You can change the API-URL to use another base than the official OpenAI-endpoint. To do so, change the `CHATGPT_REVERSE_PROXY` in the `.env` file. ## Setup diff --git a/src/env.ts b/src/env.ts index 464971b..69360d3 100644 --- a/src/env.ts +++ b/src/env.ts @@ -35,6 +35,7 @@ export const { CHATGPT_API_MODEL, CHATGPT_PROMPT_PREFIX, CHATGPT_IGNORE_MEDIA, + CHATGPT_REVERSE_PROXY, } = parseEnv(process.env, { DATA_PATH: { schema: z.string().default("./storage"), description: "Set to /storage/ if using docker, ./storage if running without" }, KEYV_BACKEND: { schema: z.enum(["file", "other"]).default("file"),description: "Set the Keyv backend to 'file' or 'other' if other set KEYV_URL" }, @@ -67,4 +68,5 @@ export const { CHATGPT_API_MODEL: { schema: z.string().default(""), description: "The model for the ChatGPT-API to use. Keep in mind that these models will charge your OpenAI account depending on their pricing." }, CHATGPT_PROMPT_PREFIX: { schema: z.string().default('Instructions:\nYou are ChatGPT, a large language model trained by OpenAI.'), description: "Instructions to feed to ChatGPT on startup"}, CHATGPT_IGNORE_MEDIA: { schema: z.boolean().default(false), description: "Wether or not the bot should react to non-text messages"}, + CHATGPT_REVERSE_PROXY: { schema: z.string().default(""), description: "Change the api url to use another (OpenAI-compatible) API endpoint" } }); diff --git a/src/index.ts b/src/index.ts index c9d5375..87c480a 100644 --- a/src/index.ts +++ b/src/index.ts @@ -7,7 +7,12 @@ import { } from "matrix-bot-sdk"; import * as path from "path"; -import { DATA_PATH, KEYV_URL, OPENAI_API_KEY, MATRIX_HOMESERVER_URL, MATRIX_ACCESS_TOKEN, MATRIX_AUTOJOIN, MATRIX_BOT_PASSWORD, MATRIX_BOT_USERNAME, MATRIX_ENCRYPTION, MATRIX_THREADS, CHATGPT_CONTEXT, CHATGPT_API_MODEL, KEYV_BOT_STORAGE, KEYV_BACKEND, CHATGPT_PROMPT_PREFIX, MATRIX_WELCOME } from './env.js' +import { + DATA_PATH, KEYV_URL, OPENAI_API_KEY, MATRIX_HOMESERVER_URL, MATRIX_ACCESS_TOKEN, MATRIX_AUTOJOIN, + MATRIX_BOT_PASSWORD, MATRIX_BOT_USERNAME, MATRIX_ENCRYPTION, MATRIX_THREADS, CHATGPT_CONTEXT, + CHATGPT_API_MODEL, KEYV_BOT_STORAGE, KEYV_BACKEND, CHATGPT_PROMPT_PREFIX, MATRIX_WELCOME, + CHATGPT_REVERSE_PROXY + } from './env.js' import CommandHandler from "./handlers.js" import { KeyvStorageProvider } from './storage.js' import { parseMatrixUsernamePretty, wrapPrompt } from './utils.js'; @@ -58,6 +63,9 @@ async function main() { }, promptPrefix: wrapPrompt(CHATGPT_PROMPT_PREFIX), debug: false, + options: { + reverseProxyUrl: CHATGPT_REVERSE_PROXY + }, }; const chatgpt = new ChatGPTClient(OPENAI_API_KEY, clientOptions, cacheOptions);