Added chatgpt temperature environment variable (#162)

* added chatgpt temperature enviroment variable

* accidentally use float instead of number

* fixed inline

* Updated the readme

* Updated the env example

---------

Co-authored-by: D <>
This commit is contained in:
Derek 2023-05-10 06:07:35 -04:00 committed by GitHub
parent b11d566ecb
commit 8efd3f9491
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 17 additions and 2 deletions

View File

@ -13,6 +13,8 @@ CHATGPT_API_MODEL=gpt-3.5-turbo
#CHATGPT_IGNORE_MEDIA=false
# (Optional) You can change the api url to use another (OpenAI-compatible) API endpoint
#CHATGPT_REVERSE_PROXY=https://api.openai.com/v1/chat/completions
# (Optional) Set the temperature of the model. 0.0 is deterministic, 1.0 is very creative.
# CHATGPT_TEMPERATURE=0.8
# Set data store settings
KEYV_BACKEND=file

View File

@ -162,6 +162,16 @@ Once the bot has started succesfully, it will output the following information t
## I use Docker but I don't see any console output
You most likely need to view the logs by running `docker logs matrix-chatgpt-bot`
## How to set the temperature
Set the temperature by using CHATGPT_TEMPERATURE in your .env file. The default is 0.8.
Here are some guidelines for setting the temperature:
| Temperature Values | Appropriate Tasks | Examples |
| --- | --- | --- |
| Below 0.5 (low) | Tasks requiring a single correct answer or predictable output | Programming |
| 0.5-0.9 (medium) | Tasks needing somewhat varied and creative content grounded in reality | E-mail response |
| Above 0.9 (high) | Tasks requiring more creative and unpredictable output | Story writing |
# Reporting issues

View File

@ -36,6 +36,7 @@ export const {
CHATGPT_PROMPT_PREFIX,
CHATGPT_IGNORE_MEDIA,
CHATGPT_REVERSE_PROXY,
CHATGPT_TEMPERATURE,
} = parseEnv(process.env, {
DATA_PATH: { schema: z.string().default("./storage"), description: "Set to /storage/ if using docker, ./storage if running without" },
KEYV_BACKEND: { schema: z.enum(["file", "other"]).default("file"),description: "Set the Keyv backend to 'file' or 'other' if other set KEYV_URL" },
@ -68,5 +69,6 @@ export const {
CHATGPT_API_MODEL: { schema: z.string().default(""), description: "The model for the ChatGPT-API to use. Keep in mind that these models will charge your OpenAI account depending on their pricing." },
CHATGPT_PROMPT_PREFIX: { schema: z.string().default('Instructions:\nYou are ChatGPT, a large language model trained by OpenAI.'), description: "Instructions to feed to ChatGPT on startup"},
CHATGPT_IGNORE_MEDIA: { schema: z.boolean().default(false), description: "Wether or not the bot should react to non-text messages"},
CHATGPT_REVERSE_PROXY: { schema: z.string().default(""), description: "Change the api url to use another (OpenAI-compatible) API endpoint" }
CHATGPT_REVERSE_PROXY: { schema: z.string().default(""), description: "Change the api url to use another (OpenAI-compatible) API endpoint" },
CHATGPT_TEMPERATURE: { schema: z.number().default(0.8), description: "Set the temperature for the model" }
});

View File

@ -11,7 +11,7 @@ import {
DATA_PATH, KEYV_URL, OPENAI_API_KEY, MATRIX_HOMESERVER_URL, MATRIX_ACCESS_TOKEN, MATRIX_AUTOJOIN,
MATRIX_BOT_PASSWORD, MATRIX_BOT_USERNAME, MATRIX_ENCRYPTION, MATRIX_THREADS, CHATGPT_CONTEXT,
CHATGPT_API_MODEL, KEYV_BOT_STORAGE, KEYV_BACKEND, CHATGPT_PROMPT_PREFIX, MATRIX_WELCOME,
CHATGPT_REVERSE_PROXY
CHATGPT_REVERSE_PROXY, CHATGPT_TEMPERATURE
} from './env.js'
import CommandHandler from "./handlers.js"
import { KeyvStorageProvider } from './storage.js'
@ -60,6 +60,7 @@ async function main() {
const clientOptions = { // (Optional) Parameters as described in https://platform.openai.com/docs/api-reference/completions
modelOptions: {
model: CHATGPT_API_MODEL, // The model is set to gpt-3.5-turbo by default
temperature: CHATGPT_TEMPERATURE,
},
promptPrefix: wrapPrompt(CHATGPT_PROMPT_PREFIX),
debug: false,