num_layers fixes

As requested by VE_FORBRYDERNE (Possibly implemented it on to many places, needs testing but since the other one is already broken I am committing it first so I can more easily test)
This commit is contained in:
henk717 2021-11-24 03:44:11 +01:00
parent d7a2424d2d
commit a2c82bbcc8
1 changed files with 2 additions and 2 deletions

View File

@ -212,7 +212,7 @@ def device_list(n_layers, primary=None, selected=None):
def device_config(model):
global breakmodel, generator
import breakmodel
n_layers = model.config.num_layers or model.config.n_layer
n_layers = model.config.num_layers if hasattr(model.config, "num_layers") else model.config.n_layer
if(args.breakmodel_gpulayers is not None):
try:
breakmodel.gpu_blocks = list(map(int, args.breakmodel_gpulayers.split(',')))
@ -278,7 +278,7 @@ def device_config(model):
# If all layers are on the same device, use the old GPU generation mode
while(len(breakmodel.gpu_blocks) and breakmodel.gpu_blocks[-1] == 0):
breakmodel.gpu_blocks.pop()
if(len(breakmodel.gpu_blocks) and breakmodel.gpu_blocks[-1] in (-1, model.config.num_layers or model.config.n_layer)):
if(len(breakmodel.gpu_blocks) and breakmodel.gpu_blocks[-1] in (-1, model.config.num_layers if hasattr(model.config, "num_layers") else model.config.n_layer)):
vars.breakmodel = False
vars.usegpu = True
model = model.half().to(len(breakmodel.gpu_blocks)-1)