Fix CPU layers not displaying correctly when using --layers

This commit is contained in:
Gnome Ann 2021-10-05 11:29:47 -04:00
parent 91352ea9f1
commit aa59f8b4b2
1 changed files with 5 additions and 2 deletions

View File

@ -207,13 +207,16 @@ def device_config(model):
if(args.layers is not None): if(args.layers is not None):
try: try:
breakmodel.gpu_blocks = list(map(int, args.layers.split(','))) breakmodel.gpu_blocks = list(map(int, args.layers.split(',')))
assert len(gpu_blocks) <= torch.cuda.device_count() assert len(breakmodel.gpu_blocks) <= torch.cuda.device_count()
assert sum(gpu_blocks) <= n_layers assert sum(breakmodel.gpu_blocks) <= n_layers
n_layers -= sum(breakmodel.gpu_blocks)
except: except:
print("WARNING: --layers is malformatted. Please use the --help option to see correct usage of --layers. Defaulting to all layers on device 0.", file=sys.stderr) print("WARNING: --layers is malformatted. Please use the --help option to see correct usage of --layers. Defaulting to all layers on device 0.", file=sys.stderr)
breakmodel.gpu_blocks = [n_layers] breakmodel.gpu_blocks = [n_layers]
n_layers = 0
elif(args.breakmodel_layers is not None): elif(args.breakmodel_layers is not None):
breakmodel.gpu_blocks = [n_layers - max(0, min(n_layers, args.breakmodel_layers))] breakmodel.gpu_blocks = [n_layers - max(0, min(n_layers, args.breakmodel_layers))]
n_layers -= sum(breakmodel.gpu_blocks)
else: else:
device_count = torch.cuda.device_count() device_count = torch.cuda.device_count()
if(device_count > 1): if(device_count > 1):