Fix for when breakmodel layers is 0

This commit is contained in:
Gnome Ann 2021-08-28 02:19:51 -04:00
parent 8b175df26e
commit b5c28f4e07
1 changed files with 5 additions and 4 deletions

View File

@ -291,11 +291,12 @@ def new_forward(
gc.collect()
torch.cuda.empty_cache()
for param1,param2 in zip(self.h[0].parameters(),self.extrastorage[0].parameters()):
param1.data = param2.data.to(gpu_device, non_blocking=False).detach()
if ram_blocks:
for param1,param2 in zip(self.h[0].parameters(),self.extrastorage[0].parameters()):
param1.data = param2.data.to(gpu_device, non_blocking=False).detach()
for param1,param2 in zip(self.h[ram_blocks-1].parameters(),self.extrastorage[ram_blocks-1].parameters()):
param1.data = param2.data.to(gpu_device, non_blocking=False).detach()
for param1,param2 in zip(self.h[ram_blocks-1].parameters(),self.extrastorage[ram_blocks-1].parameters()):
param1.data = param2.data.to(gpu_device, non_blocking=False).detach()
#END MODEL BREAK EDITS
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions