mirror of
https://github.com/oobabooga/text-generation-webui.git
synced 2024-10-01 01:26:03 -04:00
Conversion seems to work better this way
This commit is contained in:
parent
eef6fc3cbf
commit
193fb1660a
@ -45,9 +45,9 @@ if __name__ == '__main__':
|
|||||||
model_name = path.name
|
model_name = path.name
|
||||||
|
|
||||||
print(f"Loading {model_name}...")
|
print(f"Loading {model_name}...")
|
||||||
disable_torch_init()
|
#disable_torch_init()
|
||||||
model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.float16, _fast_init=True)
|
model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.float16, low_cpu_mem_usage=True)
|
||||||
restore_torch_init()
|
#restore_torch_init()
|
||||||
|
|
||||||
tokenizer = AutoTokenizer.from_pretrained(path)
|
tokenizer = AutoTokenizer.from_pretrained(path)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user