From fe619d4677b67eecb1a706c12c242f10074434b4 Mon Sep 17 00:00:00 2001 From: Gnome Ann <> Date: Thu, 9 Jun 2022 17:42:46 -0400 Subject: [PATCH] Update list of versions with broken OPT again They released another version of transformers that still doesn't have the OPT patch so I decided it would be safer to just mark all 4.19 transformers versions as needing the OPT patch. --- aiserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiserver.py b/aiserver.py index 4e33332b..fefab9b8 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1359,7 +1359,7 @@ if(not vars.use_colab_tpu and vars.model not in ["InferKit", "Colab", "OAI", "Go # Fix a bug in OPTForCausalLM where self.lm_head is the wrong size - if(packaging.version.parse("4.19.0.dev0") <= packaging.version.parse(transformers_version) <= packaging.version.parse("4.19.2")): + if(packaging.version.parse("4.19.0.dev0") <= packaging.version.parse(transformers_version) < packaging.version.parse("4.20.0")): try: from transformers import OPTForCausalLM, OPTModel except ImportError: