From bbf4963d6e336f10c7fa2ec65ebeff8ae07c56f1 Mon Sep 17 00:00:00 2001 From: onesome Date: Wed, 26 Apr 2023 16:18:45 -0500 Subject: [PATCH] Fix custmodpth stuff for hf loading --- modeling/inference_models/hf.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/modeling/inference_models/hf.py b/modeling/inference_models/hf.py index c2ca525b..d7d187c2 100644 --- a/modeling/inference_models/hf.py +++ b/modeling/inference_models/hf.py @@ -24,6 +24,13 @@ class HFInferenceModel(InferenceModel): If ignore_existance is true, it will always return a path. """ + if self.model_name in ["NeoCustom", "GPT2Custom", "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX"]: + assert utils.koboldai_vars.custmodpth + assert os.path.exists(utils.koboldai_vars.custmodpth) + + print("CUSTMODPATH") + return utils.koboldai_vars.custmodpth + basename = utils.koboldai_vars.model.replace("/", "_") if legacy: ret = basename