Fix docker build

This commit is contained in:
hibobmaster 2023-09-17 23:48:21 +08:00
parent dab64d5588
commit 7fe0ccea8e
No known key found for this signature in database
4 changed files with 18 additions and 97 deletions

View File

@ -2,7 +2,7 @@ FROM python:3.11-alpine as base
FROM base as pybuilder
# RUN sed -i 's|v3\.\d*|edge|' /etc/apk/repositories
RUN apk update && apk add --no-cache olm-dev gcc musl-dev libmagic libffi-dev
RUN apk update && apk add --no-cache olm-dev gcc musl-dev libmagic libffi-dev cmake make g++ git python3-dev
COPY requirements.txt /requirements.txt
RUN pip install -U pip setuptools wheel && pip install --user -r /requirements.txt && rm /requirements.txt

View File

@ -116,6 +116,8 @@ class Bot:
self.base_path = Path(os.path.dirname(__file__)).parent
if lc_admin is not None:
lc_admin = list(filter(None, lc_admin.split(",")))
self.lc_admin = lc_admin
self.lc_cache = {}
if self.lc_admin is not None:
@ -1372,7 +1374,7 @@ class Bot:
help_info = (
"!gpt [prompt], generate a one time response without context conversation\n"
+ "!chat [prompt], chat with context conversation\n"
+ "!pic [prompt], Image generation by Microsoft Bing\n"
+ "!pic [prompt], Image generation by DALL·E or LocalAI or stable-diffusion-webui\n" # noqa: E501
+ "!new + chat, start a new conversation \n"
+ "!lc [prompt], chat using langchain api\n"
+ "!help, help message"

View File

@ -1,81 +0,0 @@
import aiohttp
from log import getlogger
logger = getlogger()
class GPTBOT:
def __init__(
self,
api_endpoint: str,
session: aiohttp.ClientSession,
) -> None:
self.api_endpoint = api_endpoint
self.session = session
async def queryBing(self, payload: dict) -> dict:
resp = await self.session.post(url=self.api_endpoint, json=payload, timeout=300)
status_code = resp.status
if not status_code == 200:
logger.warning(str(resp.reason))
raise Exception(str(resp.reason))
return await resp.json()
async def queryChatGPT(self, payload: dict) -> dict:
resp = await self.session.post(url=self.api_endpoint, json=payload, timeout=300)
status_code = resp.status
if not status_code == 200:
logger.warning(str(resp.reason))
raise Exception(str(resp.reason))
return await resp.json()
async def test_chatgpt():
session = aiohttp.ClientSession()
gptbot = GPTBOT(api_endpoint="http://localhost:3000/conversation", session=session)
payload = {}
while True:
prompt = input("Bob: ")
payload["message"] = prompt
payload.update(
{
"clientOptions": {
"clientToUse": "chatgpt",
},
},
)
resp = await gptbot.queryChatGPT(payload)
content = resp["response"]
payload["conversationId"] = resp["conversationId"]
payload["parentMessageId"] = resp["messageId"]
print("GPT: " + content)
async def test_bing():
session = aiohttp.ClientSession()
gptbot = GPTBOT(api_endpoint="http://localhost:3000/conversation", session=session)
payload = {}
while True:
prompt = input("Bob: ")
payload["message"] = prompt
payload.update(
{
"clientOptions": {
"clientToUse": "bing",
},
},
)
resp = await gptbot.queryBing(payload)
content = "".join(
[body["text"] for body in resp["details"]["adaptiveCards"][0]["body"]],
)
payload["conversationSignature"] = resp["conversationSignature"]
payload["conversationId"] = resp["conversationId"]
payload["clientId"] = resp["clientId"]
payload["invocationId"] = resp["invocationId"]
print("Bing: " + content)
# if __name__ == "__main__":
# asyncio.run(test_chatgpt())
# asyncio.run(test_bing())

View File

@ -33,17 +33,17 @@ async def main():
openai_api_key=config.get("openai_api_key"),
gpt_api_endpoint=config.get("gpt_api_endpoint"),
gpt_model=config.get("gpt_model"),
max_tokens=int(config.get("max_tokens")),
top_p=float(config.get("top_p")),
presence_penalty=float(config.get("presence_penalty")),
frequency_penalty=float(config.get("frequency_penalty")),
reply_count=int(config.get("reply_count")),
max_tokens=config.get("max_tokens"),
top_p=config.get("top_p"),
presence_penalty=config.get("presence_penalty"),
frequency_penalty=config.get("frequency_penalty"),
reply_count=config.get("reply_count"),
system_prompt=config.get("system_prompt"),
temperature=float(config.get("temperature")),
temperature=config.get("temperature"),
lc_admin=config.get("lc_admin"),
image_generation_endpoint=config.get("image_generation_endpoint"),
image_generation_backend=config.get("image_generation_backend"),
timeout=float(config.get("timeout")),
timeout=config.get("timeout"),
)
if (
config.get("import_keys_path")
@ -63,17 +63,17 @@ async def main():
openai_api_key=os.environ.get("OPENAI_API_KEY"),
gpt_api_endpoint=os.environ.get("GPT_API_ENDPOINT"),
gpt_model=os.environ.get("GPT_MODEL"),
max_tokens=int(os.environ.get("MAX_TOKENS")),
top_p=float(os.environ.get("TOP_P")),
presence_penalty=float(os.environ.get("PRESENCE_PENALTY")),
frequency_penalty=float(os.environ.get("FREQUENCY_PENALTY")),
max_tokens=os.environ.get("MAX_TOKENS"),
top_p=os.environ.get("TOP_P"),
presence_penalty=os.environ.get("PRESENCE_PENALTY"),
frequency_penalty=os.environ.get("FREQUENCY_PENALTY"),
reply_count=int(os.environ.get("REPLY_COUNT")),
system_prompt=os.environ.get("SYSTEM_PROMPT"),
temperature=float(os.environ.get("TEMPERATURE")),
lc_admin=list(filter(None, os.environ.get("LC_ADMIN").split(","))),
temperature=os.environ.get("TEMPERATURE"),
lc_admin=os.environ.get("LC_ADMIN"),
image_generation_endpoint=os.environ.get("IMAGE_GENERATION_ENDPOINT"),
image_generation_backend=os.environ.get("IMAGE_GENERATION_BACKEND"),
timeout=float(os.environ.get("TIMEOUT")),
timeout=os.environ.get("TIMEOUT"),
)
if (
os.environ.get("IMPORT_KEYS_PATH")