Don't convert tensors to float16 if using CPU-only mode

This commit is contained in:
Gnome Ann 2022-03-05 14:30:26 -05:00
parent 579e85820c
commit 373f7b9bd5
1 changed files with 1 additions and 1 deletions

View File

@ -1079,7 +1079,7 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Re
device = device_map[key]
#print(f"Transferring <{key}> to {'(CPU)' if device == 'cpu' else '[device ' + str(device) + ']'} ... ", end="", flush=True)
model_dict[key] = model_dict[key].materialize(f, map_location="cpu")
if convert_to_float16 and model_dict[key].dtype is torch.float32:
if convert_to_float16 and vars.hascuda and (vars.breakmodel or vars.usegpu) and model_dict[key].dtype is torch.float32:
model_dict[key] = model_dict[key].to(torch.float16)
model_dict[key] = model_dict[key].to(device)
#print("OK", flush=True)