Move some stuff from server.py to modules

This commit is contained in:
oobabooga 2023-02-23 13:42:23 -03:00
parent 1dacd34165
commit e46c43afa6
3 changed files with 24 additions and 25 deletions

View File

@ -7,9 +7,12 @@ from pathlib import Path
import modules.shared as shared import modules.shared as shared
import numpy as np import numpy as np
import torch import torch
import transformers
from transformers import AutoModelForCausalLM from transformers import AutoModelForCausalLM
from transformers import AutoTokenizer from transformers import AutoTokenizer
transformers.logging.set_verbosity_error()
local_rank = None local_rank = None
if shared.args.flexgen: if shared.args.flexgen:

View File

@ -6,7 +6,27 @@ model_name = ""
soft_prompt_tensor = None soft_prompt_tensor = None
soft_prompt = False soft_prompt = False
stop_everything = False stop_everything = False
settings = {}
settings = {
'max_new_tokens': 200,
'max_new_tokens_min': 1,
'max_new_tokens_max': 2000,
'preset': 'NovelAI-Sphinx Moth',
'name1': 'Person 1',
'name2': 'Person 2',
'context': 'This is a conversation between two people.',
'prompt': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
'prompt_gpt4chan': '-----\n--- 865467536\nInput text\n--- 865467537\n',
'stop_at_newline': True,
'chat_prompt_size': 2048,
'chat_prompt_size_min': 0,
'chat_prompt_size_max': 2048,
'preset_pygmalion': 'Pygmalion',
'name1_pygmalion': 'You',
'name2_pygmalion': 'Kawaii',
'context_pygmalion': "Kawaii's persona: Kawaii is a cheerful person who loves to make others smile. She is an optimist who loves to spread happiness and positivity wherever she goes.\n<START>",
'stop_at_newline_pygmalion': False,
}
parser = argparse.ArgumentParser(formatter_class=lambda prog: argparse.HelpFormatter(prog,max_help_position=54)) parser = argparse.ArgumentParser(formatter_class=lambda prog: argparse.HelpFormatter(prog,max_help_position=54))
parser.add_argument('--model', type=str, help='Name of the model to load by default.') parser.add_argument('--model', type=str, help='Name of the model to load by default.')

View File

@ -9,7 +9,6 @@ from pathlib import Path
import gradio as gr import gradio as gr
import torch import torch
import transformers
import modules.chat as chat import modules.chat as chat
import modules.extensions as extensions_module import modules.extensions as extensions_module
@ -23,32 +22,9 @@ from modules.models import load_model
from modules.models import load_soft_prompt from modules.models import load_soft_prompt
from modules.text_generation import generate_reply from modules.text_generation import generate_reply
transformers.logging.set_verbosity_error()
if (shared.args.chat or shared.args.cai_chat) and not shared.args.no_stream: if (shared.args.chat or shared.args.cai_chat) and not shared.args.no_stream:
print("Warning: chat mode currently becomes somewhat slower with text streaming on.\nConsider starting the web UI with the --no-stream option.\n") print("Warning: chat mode currently becomes somewhat slower with text streaming on.\nConsider starting the web UI with the --no-stream option.\n")
shared.settings = {
'max_new_tokens': 200,
'max_new_tokens_min': 1,
'max_new_tokens_max': 2000,
'preset': 'NovelAI-Sphinx Moth',
'name1': 'Person 1',
'name2': 'Person 2',
'context': 'This is a conversation between two people.',
'prompt': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
'prompt_gpt4chan': '-----\n--- 865467536\nInput text\n--- 865467537\n',
'stop_at_newline': True,
'chat_prompt_size': 2048,
'chat_prompt_size_min': 0,
'chat_prompt_size_max': 2048,
'preset_pygmalion': 'Pygmalion',
'name1_pygmalion': 'You',
'name2_pygmalion': 'Kawaii',
'context_pygmalion': "Kawaii's persona: Kawaii is a cheerful person who loves to make others smile. She is an optimist who loves to spread happiness and positivity wherever she goes.\n<START>",
'stop_at_newline_pygmalion': False,
}
if shared.args.settings is not None and Path(shared.args.settings).exists(): if shared.args.settings is not None and Path(shared.args.settings).exists():
new_settings = json.loads(open(Path(shared.args.settings), 'r').read()) new_settings = json.loads(open(Path(shared.args.settings), 'r').read())
for item in new_settings: for item in new_settings: