Merge pull request #85 from VE-FORBRYDERNE/sp

Fix a bug with soft prompts when using transformers XGLM
This commit is contained in:
henk717 2022-02-28 02:33:18 +01:00 committed by GitHub
commit f628929401
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 3 additions and 1 deletions

View File

@ -1048,7 +1048,7 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly", "TPUMeshTransforme
if(hasattr(self, "transformer")): if(hasattr(self, "transformer")):
inputs_embeds = self.transformer.wte(input_ids) inputs_embeds = self.transformer.wte(input_ids)
else: else:
inputs_embeds = self.model.embed_tokens(input_ids) * self.model.embed_scale inputs_embeds = self.model.embed_tokens(input_ids)
if(vars.sp is not None): if(vars.sp is not None):
vars.sp = vars.sp.to(inputs_embeds.dtype).to(inputs_embeds.device) vars.sp = vars.sp.to(inputs_embeds.dtype).to(inputs_embeds.device)
inputs_embeds = torch.where( inputs_embeds = torch.where(
@ -1056,6 +1056,8 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly", "TPUMeshTransforme
vars.sp[shifted_input_ids.clamp(min=0)], vars.sp[shifted_input_ids.clamp(min=0)],
inputs_embeds, inputs_embeds,
) )
if(not hasattr(self, "transformer")):
inputs_embeds *= self.model.embed_scale
kwargs['inputs_embeds'] = inputs_embeds kwargs['inputs_embeds'] = inputs_embeds
return old_forward(self, *args, **kwargs) return old_forward(self, *args, **kwargs)
cls.forward = new_causallm_forward cls.forward = new_causallm_forward