Add --listen parameter

This commit is contained in:
oobabooga 2023-01-09 19:05:36 -03:00
parent 86092d1879
commit 6c178b1c91
2 changed files with 11 additions and 8 deletions

View File

@ -84,13 +84,12 @@ Then browse to
Optionally, you can use the following command-line flags:
`--model model-name`: Load this model by default.
`--notebook`: Launch the webui in notebook mode, where the output is written to the same text box as the input.
`--chat`: Launch the webui in chat mode.
`--cpu`: Use the CPU to generate text instead of the GPU.
-h, --help show this help message and exit
--model MODEL Name of the model to load by default.
--notebook Launch the webui in notebook mode, where the output is written to the same text box as the input.
--chat Launch the webui in chat mode.
--cpu Use the CPU to generate text.
--listen Makes the webui reachable from your local network.
## Presets

View File

@ -17,6 +17,7 @@ parser.add_argument('--model', type=str, help='Name of the model to load by defa
parser.add_argument('--notebook', action='store_true', help='Launch the webui in notebook mode, where the output is written to the same text box as the input.')
parser.add_argument('--chat', action='store_true', help='Launch the webui in chat mode.')
parser.add_argument('--cpu', action='store_true', help='Use the CPU to generate text.')
parser.add_argument('--listen', action='store_true', help='Makes the webui reachable from your local network.')
args = parser.parse_args()
loaded_preset = None
available_models = sorted(set(map(lambda x : str(x.name).replace('.pt', ''), list(Path('models/').glob('*'))+list(Path('torch-dumps/').glob('*')))))
@ -232,4 +233,7 @@ else:
btn.click(generate_reply, [textbox, temp_slider, length_slider, preset_menu, model_menu], [output_textbox, markdown, html], show_progress=True)
textbox.submit(generate_reply, [textbox, temp_slider, length_slider, preset_menu, model_menu], [output_textbox, markdown, html], show_progress=True)
interface.launch(share=False, server_name="0.0.0.0")
if args.listen:
interface.launch(share=False, server_name="0.0.0.0")
else:
interface.launch(share=False)