Fix Breakmodel RAM Regression

This commit is contained in:
henk717 2022-03-06 08:26:50 +01:00
parent 105d3831b5
commit daea4b8d15
3 changed files with 1 additions and 3 deletions

View File

@ -1081,7 +1081,7 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Re
model_dict[key] = model_dict[key].materialize(f, map_location="cpu")
if convert_to_float16 and vars.hascuda and (vars.breakmodel or vars.usegpu) and model_dict[key].dtype is torch.float32:
model_dict[key] = model_dict[key].to(torch.float16)
if not vars.usegpu and model_dict[key].dtype is torch.float16:
if not vars.usegpu and not vars.breakmodel and model_dict[key].dtype is torch.float16:
model_dict[key] = model_dict[key].to(torch.float32)
model_dict[key] = model_dict[key].to(device)
#print("OK", flush=True)

View File

@ -1 +0,0 @@
play --lowmem %*

View File

@ -1 +0,0 @@
play --remote --lowmem %*