From 0f1627eff1709b1efc70bccd088926ba9afaa277 Mon Sep 17 00:00:00 2001 From: oobabooga <112222186+oobabooga@users.noreply.github.com> Date: Mon, 10 Apr 2023 15:48:07 -0300 Subject: [PATCH] Don't treat Intruct mode histories as regular histories * They must now be saved/loaded manually * Also improved browser caching of pfps * Also changed the global default preset --- modules/chat.py | 46 ++++++++++++++++++++++++--------------- modules/html_generator.py | 7 +++--- modules/shared.py | 2 +- server.py | 17 ++++++++------- settings-template.json | 2 +- 5 files changed, 42 insertions(+), 32 deletions(-) diff --git a/modules/chat.py b/modules/chat.py index df39a58b..5b7ad66a 100644 --- a/modules/chat.py +++ b/modules/chat.py @@ -286,7 +286,7 @@ def clear_chat_log(name1, name2, greeting, mode): shared.history['visible'] += [['', apply_extensions(greeting, "output")]] # Save cleared logs - save_history(timestamp=False) + save_history(mode) return chat_html_wrapper(shared.history['visible'], name1, name2, mode) @@ -332,15 +332,23 @@ def tokenize_dialogue(dialogue, name1, name2, mode): return history -def save_history(timestamp=True): - if timestamp: - fname = f"{shared.character}_{datetime.now().strftime('%Y%m%d-%H%M%S')}.json" +def save_history(mode, timestamp=False): + # Instruct mode histories should not be saved as if + # Alpaca or Vicuna were characters + if mode == 'instruct': + if not timestamp: + return + fname = f"Instruct_{datetime.now().strftime('%Y%m%d-%H%M%S')}.json" else: - fname = f"{shared.character}_persistent.json" + if timestamp: + fname = f"{shared.character}_{datetime.now().strftime('%Y%m%d-%H%M%S')}.json" + else: + fname = f"{shared.character}_persistent.json" if not Path('logs').exists(): Path('logs').mkdir() with open(Path(f'logs/{fname}'), 'w', encoding='utf-8') as f: f.write(json.dumps({'data': shared.history['internal'], 'data_visible': shared.history['visible']}, indent=2)) + return Path(f'logs/{fname}') @@ -389,8 +397,6 @@ def generate_pfp_cache(character): def load_character(character, name1, name2, mode): shared.character = character - shared.history['internal'] = [] - shared.history['visible'] = [] context = greeting = end_of_turn = "" greeting_field = 'greeting' picture = None @@ -435,18 +441,22 @@ def load_character(character, name1, name2, mode): greeting = shared.settings['greeting'] end_of_turn = shared.settings['end_of_turn'] - if Path(f'logs/{shared.character}_persistent.json').exists(): - load_history(open(Path(f'logs/{shared.character}_persistent.json'), 'rb').read(), name1, name2) - else: - # Insert greeting if it exists - if greeting != "": - shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]] - shared.history['visible'] += [['', apply_extensions(greeting, "output")]] - - # Create .json log files since they don't already exist - save_history(timestamp=False) + if mode != 'instruct': + shared.history['internal'] = [] + shared.history['visible'] = [] - return name1, name2, picture, greeting, context, end_of_turn, chat_html_wrapper(shared.history['visible'], name1, name2, mode, reset_cache=True) + if Path(f'logs/{shared.character}_persistent.json').exists(): + load_history(open(Path(f'logs/{shared.character}_persistent.json'), 'rb').read(), name1, name2) + else: + # Insert greeting if it exists + if greeting != "": + shared.history['internal'] += [['<|BEGIN-VISIBLE-CHAT|>', greeting]] + shared.history['visible'] += [['', apply_extensions(greeting, "output")]] + + # Create .json log files since they don't already exist + save_history(mode) + + return name1, name2, picture, greeting, context, end_of_turn, chat_html_wrapper(shared.history['visible'], name1, name2, mode) def load_default_history(name1, name2): diff --git a/modules/html_generator.py b/modules/html_generator.py index 6e20566c..7a1f2825 100644 --- a/modules/html_generator.py +++ b/modules/html_generator.py @@ -164,10 +164,9 @@ def generate_instruct_html(history): def generate_cai_chat_html(history, name1, name2, reset_cache=False): output = f'
' - # The time.time() is to prevent the brower from caching the image - suffix = f"?{time.time()}" if reset_cache else f"?{name2}" - img_bot = f'' if Path("cache/pfp_character.png").exists() else '' - img_me = f'' if Path("cache/pfp_me.png").exists() else '' + # We use ?name2 and ?time.time() to force the browser to reset caches + img_bot = f'' if Path("cache/pfp_character.png").exists() else '' + img_me = f'' if Path("cache/pfp_me.png").exists() else '' for i, _row in enumerate(history[::-1]): row = [convert_to_markdown(entry) for entry in _row] diff --git a/modules/shared.py b/modules/shared.py index 663ed498..e26489ee 100644 --- a/modules/shared.py +++ b/modules/shared.py @@ -44,7 +44,7 @@ settings = { 'default_extensions': [], 'chat_default_extensions': ["gallery"], 'presets': { - 'default': 'NovelAI-Sphinx Moth', + 'default': 'Default', '.*(alpaca|llama)': "LLaMA-Precise", '.*pygmalion': 'NovelAI-Storywriter', '.*RWKV': 'Naive', diff --git a/server.py b/server.py index dc5b1c35..72c58d29 100644 --- a/server.py +++ b/server.py @@ -384,7 +384,7 @@ def create_interface(): shared.gradio['Clear history-cancel'] = gr.Button('Cancel', visible=False) shared.gradio["Chat mode"] = gr.Radio(choices=["cai-chat", "chat", "instruct"], value="cai-chat", label="Mode") - shared.gradio["Instruction templates"] = gr.Dropdown(choices=get_available_instruction_templates(), label="Instruction template", value="None", visible=False) + shared.gradio["Instruction templates"] = gr.Dropdown(choices=get_available_instruction_templates(), label="Instruction template", value="None", visible=False, info="Change this according to the model/LoRA that you are using.") with gr.Tab("Character", elem_id="chat-settings"): with gr.Row(): @@ -445,34 +445,34 @@ def create_interface(): gen_events.append(shared.gradio['Generate'].click( lambda x: (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then( chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then( - lambda: chat.save_history(timestamp=False), None, None, show_progress=False) + chat.save_history, shared.gradio['Chat mode'], None, show_progress=False) ) gen_events.append(shared.gradio['textbox'].submit( lambda x: (x, ''), shared.gradio['textbox'], [shared.gradio['Chat input'], shared.gradio['textbox']], show_progress=False).then( chat.cai_chatbot_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then( - lambda: chat.save_history(timestamp=False), None, None, show_progress=False) + chat.save_history, shared.gradio['Chat mode'], None, show_progress=False) ) gen_events.append(shared.gradio['Regenerate'].click( chat.regenerate_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then( - lambda: chat.save_history(timestamp=False), None, None, show_progress=False) + chat.save_history, shared.gradio['Chat mode'], None, show_progress=False) ) gen_events.append(shared.gradio['Continue'].click( chat.continue_wrapper, shared.input_params, shared.gradio['display'], show_progress=shared.args.no_stream).then( - lambda: chat.save_history(timestamp=False), None, None, show_progress=False) + chat.save_history, shared.gradio['Chat mode'], None, show_progress=False) ) shared.gradio['Replace last reply'].click( chat.replace_last_reply, [shared.gradio[k] for k in ['textbox', 'name1', 'name2', 'Chat mode']], shared.gradio['display'], show_progress=shared.args.no_stream).then( lambda x: '', shared.gradio['textbox'], shared.gradio['textbox'], show_progress=False).then( - lambda: chat.save_history(timestamp=False), None, None, show_progress=False) + chat.save_history, shared.gradio['Chat mode'], None, show_progress=False) shared.gradio['Clear history-confirm'].click( lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, clear_arr).then( chat.clear_chat_log, [shared.gradio[k] for k in ['name1', 'name2', 'greeting', 'Chat mode']], shared.gradio['display']).then( - lambda: chat.save_history(timestamp=False), None, None, show_progress=False) + chat.save_history, shared.gradio['Chat mode'], None, show_progress=False) shared.gradio['Stop'].click( stop_everything_event, None, None, queue=False, cancels=gen_events if shared.args.no_stream else None).then( @@ -480,6 +480,7 @@ def create_interface(): shared.gradio['Chat mode'].change( lambda x: gr.update(visible=x == 'instruct'), shared.gradio['Chat mode'], shared.gradio['Instruction templates']).then( + lambda x: gr.update(interactive=x != 'instruct'), shared.gradio['Chat mode'], shared.gradio['character_menu']).then( chat.redraw_html, reload_inputs, shared.gradio['display']) shared.gradio['Instruction templates'].change( @@ -495,7 +496,7 @@ def create_interface(): shared.gradio['Clear history'].click(lambda: [gr.update(visible=True), gr.update(visible=False), gr.update(visible=True)], None, clear_arr) shared.gradio['Clear history-cancel'].click(lambda: [gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)], None, clear_arr) shared.gradio['Remove last'].click(chat.remove_last_message, [shared.gradio[k] for k in ['name1', 'name2', 'Chat mode']], [shared.gradio['display'], shared.gradio['textbox']], show_progress=False) - shared.gradio['download_button'].click(chat.save_history, inputs=None, outputs=[shared.gradio['download']]) + shared.gradio['download_button'].click(lambda x: chat.save_history(x, timestamp=True), shared.gradio['Chat mode'], shared.gradio['download']) shared.gradio['Upload character'].click(chat.upload_character, [shared.gradio['upload_json'], shared.gradio['upload_img_bot']], [shared.gradio['character_menu']]) shared.gradio['character_menu'].change(chat.load_character, [shared.gradio[k] for k in ['character_menu', 'name1', 'name2', 'Chat mode']], [shared.gradio[k] for k in ['name1', 'name2', 'character_picture', 'greeting', 'context', 'end_of_turn', 'display']]) shared.gradio['upload_img_tavern'].upload(chat.upload_tavern_character, [shared.gradio['upload_img_tavern'], shared.gradio['name1'], shared.gradio['name2']], [shared.gradio['character_menu']]) diff --git a/settings-template.json b/settings-template.json index ea456fd6..80a58538 100644 --- a/settings-template.json +++ b/settings-template.json @@ -19,7 +19,7 @@ "gallery" ], "presets": { - "default": "NovelAI-Sphinx Moth", + "default": "Default", ".*pygmalion": "NovelAI-Storywriter", ".*RWKV": "Naive" },