From bf4e7742acd49d2982198c15341805200df1a58e Mon Sep 17 00:00:00 2001 From: Gnome Ann <> Date: Thu, 28 Oct 2021 17:18:28 -0400 Subject: [PATCH] Patch GPTJForCausalLM, if it exists, to support soft prompting --- aiserver.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/aiserver.py b/aiserver.py index 3bd15e91..e5e3ca85 100644 --- a/aiserver.py +++ b/aiserver.py @@ -534,6 +534,11 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly"]): cls.forward = new_causallm_forward for cls in (GPT2LMHeadModel, GPTNeoForCausalLM): patch_causallm(cls) + try: + from transformers import GPTJForCausalLM + patch_causallm(GPTJForCausalLM) + except: + pass # If custom GPT Neo model was chosen if(vars.model == "NeoCustom"):