mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
Add support for characters
This commit is contained in:
parent
3121f4788e
commit
8d788874d7
8
characters/Example.json
Normal file
8
characters/Example.json
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"char_name": "Chiharu Yamada",
|
||||||
|
"char_persona": "Chiharu Yamada is a young Japanese woman in her early twenties. She has shoulder length black hair, dark brown eyes, and a petite figure. She is a computer engineer and loves taking apart and fixing computers. She is a bit of a nerd, but is still quite popular among her peers. She is always eager to learn new things and loves helping others with their computer problems. Chiharu is friendly, funny, and easygoing, with a knack for problem solving. She has a passion for technology and can often be found tinkering with her laptop or tinkering with some computer hardware. She has an infectious enthusiasm for life and loves to share her knowledge and experiences with others.",
|
||||||
|
"char_greeting": "*Chiharu strides into the room with a smile, her eyes lighting up when she sees you. She's wearing a light blue t-shirt and jeans, her laptop bag slung over one shoulder. She takes a seat next to you, her enthusiasm palpable in the air*\nHey! I'm so excited to finally meet you. I've heard so many great things about you and I'm eager to pick your brain about computers. I'm sure you have a wealth of knowledge that I can learn from. *She grins, eyes twinkling with excitement* Let's get started!",
|
||||||
|
"world_scenario": "",
|
||||||
|
"example_dialogue": "<START>\nYou: So how did you get into computer engineering?\nChiharu Yamada: *She smiles, her eyes lighting up as she begins to talk about her passion* I've always been interested in technology ever since I was a kid. I remember tinkering around with my dad's old computer when I was about 8 or 9, and I just fell in love with it. I kept learning more and more, and eventually I was able to start fixing my own machines.\n<START>\nYou: That's really impressive!\nChiharu Yamada: *She chuckles bashfully* Thanks! I'm still learning a lot, but I'm having a lot of fun with it. I'm always eager to try out new things and challenge myself.\n<START>\nYou: So what do you do when you're not working on computers?\nChiharu Yamada: *She grins, her eyes sparkling with energy* Oh, lots of things! I love to explore new places, go out with friends, watch movies, and play video games. I'm always trying to stay active and have some fun.\n<START>\nYou: What's your favorite type of computer hardware to work with?\nChiharu Yamada: *The woman leans forward, her enthusiasm obvious* Definitely motherboards. They're like a puzzle, and I love trying to figure out how they all fit together. Plus, they're the backbone of any computer system, so it's really satisfying when I can get them working properly.\n<START>\nYou: That sounds great!\nChiharu Yamada: *She nods, her smile widening* Yeah, it's really fun. I'm lucky to be able to do this as a job. I get to work with something I'm passionate about, and I get to help people with their computer problems. It's a win-win!\n"
|
||||||
|
}
|
||||||
|
|
BIN
characters/Example.png
Normal file
BIN
characters/Example.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 206 KiB |
@ -160,7 +160,7 @@ def generate_4chan_html(f):
|
|||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def generate_chat_html(history, name1, name2):
|
def generate_chat_html(history, name1, name2, character):
|
||||||
css = """
|
css = """
|
||||||
.chat {
|
.chat {
|
||||||
margin-left: auto;
|
margin-left: auto;
|
||||||
@ -219,17 +219,21 @@ def generate_chat_html(history, name1, name2):
|
|||||||
|
|
||||||
output = ''
|
output = ''
|
||||||
output += f'<style>{css}</style><div class="chat" id="chat">'
|
output += f'<style>{css}</style><div class="chat" id="chat">'
|
||||||
if Path("profile.png").exists():
|
img = ''
|
||||||
img = '<img src="file/profile.png">'
|
for i in [
|
||||||
elif Path("profile.jpg").exists():
|
f"characters/{character}.png",
|
||||||
img = '<img src="file/profile.jpg">'
|
f"characters/{character}.jpg",
|
||||||
elif Path("profile.jpeg").exists():
|
f"characters/{character}.jpeg",
|
||||||
img = '<img src="file/profile.jpeg">'
|
"profile.png",
|
||||||
else:
|
"profile.jpg",
|
||||||
img = ''
|
"profile.jpeg",
|
||||||
|
]:
|
||||||
|
|
||||||
|
if Path(i).exists():
|
||||||
|
img = f'<img src="file/{i}">'
|
||||||
|
break
|
||||||
|
|
||||||
for row in history[::-1]:
|
for i,row in enumerate(history[::-1]):
|
||||||
row = list(row)
|
|
||||||
row[0] = re.sub(r"[\\]*\*", r"*", row[0])
|
row[0] = re.sub(r"[\\]*\*", r"*", row[0])
|
||||||
row[1] = re.sub(r"[\\]*\*", r"*", row[1])
|
row[1] = re.sub(r"[\\]*\*", r"*", row[1])
|
||||||
row[0] = re.sub(r"(\*)([^\*]*)(\*)", r"<em>\2</em>", row[0])
|
row[0] = re.sub(r"(\*)([^\*]*)(\*)", r"<em>\2</em>", row[0])
|
||||||
@ -251,21 +255,22 @@ def generate_chat_html(history, name1, name2):
|
|||||||
</div>
|
</div>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
p = '\n'.join([f"<p>{x}</p>" for x in row[0].split('\n')])
|
if not (i == len(history)-1 and len(row[0]) == 0):
|
||||||
output += f"""
|
p = '\n'.join([f"<p>{x}</p>" for x in row[0].split('\n')])
|
||||||
<div class="message">
|
output += f"""
|
||||||
<div class="circle-you">
|
<div class="message">
|
||||||
</div>
|
<div class="circle-you">
|
||||||
<div class="text">
|
</div>
|
||||||
<div class="username">
|
<div class="text">
|
||||||
{name1}
|
<div class="username">
|
||||||
|
{name1}
|
||||||
|
</div>
|
||||||
|
<div class="body">
|
||||||
|
{p}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="body">
|
"""
|
||||||
{p}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
"""
|
|
||||||
|
|
||||||
output += "</div>"
|
output += "</div>"
|
||||||
return output
|
return output
|
||||||
|
61
server.py
61
server.py
@ -35,6 +35,7 @@ args = parser.parse_args()
|
|||||||
loaded_preset = None
|
loaded_preset = None
|
||||||
available_models = sorted(set([item.replace('.pt', '') for item in map(lambda x : str(x.name), list(Path('models/').glob('*'))+list(Path('torch-dumps/').glob('*'))) if not item.endswith('.txt')]), key=str.lower)
|
available_models = sorted(set([item.replace('.pt', '') for item in map(lambda x : str(x.name), list(Path('models/').glob('*'))+list(Path('torch-dumps/').glob('*'))) if not item.endswith('.txt')]), key=str.lower)
|
||||||
available_presets = sorted(set(map(lambda x : str(x.name).split('.')[0], Path('presets').glob('*.txt'))), key=str.lower)
|
available_presets = sorted(set(map(lambda x : str(x.name).split('.')[0], Path('presets').glob('*.txt'))), key=str.lower)
|
||||||
|
available_characters = sorted(set(map(lambda x : str(x.name).split('.')[0], Path('characters').glob('*.json'))), key=str.lower)
|
||||||
|
|
||||||
settings = {
|
settings = {
|
||||||
'max_new_tokens': 200,
|
'max_new_tokens': 200,
|
||||||
@ -50,6 +51,7 @@ settings = {
|
|||||||
'prompt': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
|
'prompt': 'Common sense questions and answers\n\nQuestion: \nFactual answer:',
|
||||||
'prompt_gpt4chan': '-----\n--- 865467536\nInput text\n--- 865467537\n',
|
'prompt_gpt4chan': '-----\n--- 865467536\nInput text\n--- 865467537\n',
|
||||||
'stop_at_newline': True,
|
'stop_at_newline': True,
|
||||||
|
'stop_at_newline_pygmalion': False,
|
||||||
}
|
}
|
||||||
|
|
||||||
if args.settings is not None and Path(args.settings).exists():
|
if args.settings is not None and Path(args.settings).exists():
|
||||||
@ -217,6 +219,7 @@ description = f"\n\n# Text generation lab\nGenerate text using Large Language Mo
|
|||||||
css = ".my-4 {margin-top: 0} .py-6 {padding-top: 2.5rem}"
|
css = ".my-4 {margin-top: 0} .py-6 {padding-top: 2.5rem}"
|
||||||
if args.chat or args.cai_chat:
|
if args.chat or args.cai_chat:
|
||||||
history = []
|
history = []
|
||||||
|
character = None
|
||||||
|
|
||||||
# This gets the new line characters right.
|
# This gets the new line characters right.
|
||||||
def clean_chat_message(text):
|
def clean_chat_message(text):
|
||||||
@ -284,12 +287,12 @@ if args.chat or args.cai_chat:
|
|||||||
|
|
||||||
def cai_chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check):
|
def cai_chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check):
|
||||||
for history in chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check):
|
for history in chatbot_wrapper(text, tokens, inference_settings, selected_model, name1, name2, context, check):
|
||||||
yield generate_chat_html(history, name1, name2)
|
yield generate_chat_html(history, name1, name2, character)
|
||||||
|
|
||||||
def remove_last_message(name1, name2):
|
def remove_last_message(name1, name2):
|
||||||
history.pop()
|
history.pop()
|
||||||
if args.cai_chat:
|
if args.cai_chat:
|
||||||
return generate_chat_html(history, name1, name2)
|
return generate_chat_html(history, name1, name2, character)
|
||||||
else:
|
else:
|
||||||
return history
|
return history
|
||||||
|
|
||||||
@ -298,11 +301,11 @@ if args.chat or args.cai_chat:
|
|||||||
history = []
|
history = []
|
||||||
|
|
||||||
def clear_html():
|
def clear_html():
|
||||||
return generate_chat_html([], "", "")
|
return generate_chat_html([], "", "", character)
|
||||||
|
|
||||||
def redraw_html(name1, name2):
|
def redraw_html(name1, name2):
|
||||||
global history
|
global history
|
||||||
return generate_chat_html(history, name1, name2)
|
return generate_chat_html(history, name1, name2, character)
|
||||||
|
|
||||||
def save_history():
|
def save_history():
|
||||||
if not Path('logs').exists():
|
if not Path('logs').exists():
|
||||||
@ -315,18 +318,43 @@ if args.chat or args.cai_chat:
|
|||||||
global history
|
global history
|
||||||
history = json.loads(file.decode('utf-8'))['data']
|
history = json.loads(file.decode('utf-8'))['data']
|
||||||
|
|
||||||
if 'pygmalion' in model_name.lower():
|
def load_character(_character, name1, name2):
|
||||||
context_str = settings['context_pygmalion']
|
global history, character
|
||||||
name1_str = settings['name1_pygmalion']
|
context = ""
|
||||||
name2_str = settings['name2_pygmalion']
|
history = []
|
||||||
else:
|
if _character != 'None':
|
||||||
context_str = settings['context']
|
character = _character
|
||||||
name1_str = settings['name1']
|
with open(Path(f'characters/{_character}.json'), 'r') as f:
|
||||||
name2_str = settings['name2']
|
data = json.loads(f.read())
|
||||||
|
name2 = data['char_name']
|
||||||
|
if 'char_persona' in data and data['char_persona'] != '':
|
||||||
|
context += f"{data['char_name']}'s Persona: {data['char_persona']}\n"
|
||||||
|
if 'world_scenario' in data and data['world_scenario'] != '':
|
||||||
|
context += f"Scenario: {data['world_scenario']}\n"
|
||||||
|
if 'example_dialogue' in data and data['example_dialogue'] != '':
|
||||||
|
context += f"{data['example_dialogue']}"
|
||||||
|
context = f"{context.strip()}\n<START>"
|
||||||
|
if 'char_greeting' in data:
|
||||||
|
history = [['', data['char_greeting']]]
|
||||||
|
else:
|
||||||
|
character = None
|
||||||
|
context = settings['context_pygmalion']
|
||||||
|
name2 = settings['name2_pygmalion']
|
||||||
|
|
||||||
|
if args.cai_chat:
|
||||||
|
return name2, context, generate_chat_html(history, name1, name2, character)
|
||||||
|
else:
|
||||||
|
return name2, context, history
|
||||||
|
|
||||||
|
suffix = '_pygmalion' if 'pygmalion' in model_name.lower() else ''
|
||||||
|
context_str = settings[f'context{suffix}']
|
||||||
|
name1_str = settings[f'name1{suffix}']
|
||||||
|
name2_str = settings[f'name2{suffix}']
|
||||||
|
stop_at_newline = settings[f'stop_at_newline{suffix}']
|
||||||
|
|
||||||
with gr.Blocks(css=css+".h-\[40vh\] {height: 66.67vh} .gradio-container {max-width: 800px; margin-left: auto; margin-right: auto}", analytics_enabled=False) as interface:
|
with gr.Blocks(css=css+".h-\[40vh\] {height: 66.67vh} .gradio-container {max-width: 800px; margin-left: auto; margin-right: auto}", analytics_enabled=False) as interface:
|
||||||
if args.cai_chat:
|
if args.cai_chat:
|
||||||
display1 = gr.HTML(value=generate_chat_html([], "", ""))
|
display1 = gr.HTML(value=generate_chat_html([], "", "", character))
|
||||||
else:
|
else:
|
||||||
display1 = gr.Chatbot()
|
display1 = gr.Chatbot()
|
||||||
textbox = gr.Textbox(lines=2, label='Input')
|
textbox = gr.Textbox(lines=2, label='Input')
|
||||||
@ -347,7 +375,9 @@ if args.chat or args.cai_chat:
|
|||||||
name2 = gr.Textbox(value=name2_str, lines=1, label='Bot\'s name')
|
name2 = gr.Textbox(value=name2_str, lines=1, label='Bot\'s name')
|
||||||
context = gr.Textbox(value=context_str, lines=2, label='Context')
|
context = gr.Textbox(value=context_str, lines=2, label='Context')
|
||||||
with gr.Row():
|
with gr.Row():
|
||||||
check = gr.Checkbox(value=settings['stop_at_newline'], label='Stop generating at new line character?')
|
character_menu = gr.Dropdown(choices=["None"]+available_characters, value="None", label='Character')
|
||||||
|
with gr.Row():
|
||||||
|
check = gr.Checkbox(value=stop_at_newline, label='Stop generating at new line character?')
|
||||||
with gr.Row():
|
with gr.Row():
|
||||||
with gr.Column():
|
with gr.Column():
|
||||||
gr.Markdown("Upload chat history")
|
gr.Markdown("Upload chat history")
|
||||||
@ -371,9 +401,10 @@ if args.chat or args.cai_chat:
|
|||||||
btn.click(lambda x: "", textbox, textbox, show_progress=False)
|
btn.click(lambda x: "", textbox, textbox, show_progress=False)
|
||||||
textbox.submit(lambda x: "", textbox, textbox, show_progress=False)
|
textbox.submit(lambda x: "", textbox, textbox, show_progress=False)
|
||||||
stop.click(None, None, None, cancels=[gen_event, gen_event2])
|
stop.click(None, None, None, cancels=[gen_event, gen_event2])
|
||||||
|
|
||||||
save_btn.click(save_history, inputs=[], outputs=[download])
|
save_btn.click(save_history, inputs=[], outputs=[download])
|
||||||
upload.upload(load_history, [upload], [])
|
upload.upload(load_history, [upload], [])
|
||||||
|
character_menu.change(load_character, [character_menu, name1, name2], [name2, context, display1])
|
||||||
|
|
||||||
if args.cai_chat:
|
if args.cai_chat:
|
||||||
upload.upload(redraw_html, [name1, name2], [display1])
|
upload.upload(redraw_html, [name1, name2], [display1])
|
||||||
else:
|
else:
|
||||||
|
Loading…
Reference in New Issue
Block a user