Model: Respect model lazyload over kaivars

kaivars dictates model config unless its from outside aiserver or
whatever.
This commit is contained in:
somebody
2023-03-09 20:29:12 -06:00
parent a472bdf6c3
commit 3646aa9e83
4 changed files with 13 additions and 9 deletions

View File

@@ -12,7 +12,7 @@ from modeling.inference_models.hf_torch import HFTorchInferenceModel
class CustomGPT2HFTorchInferenceModel(HFTorchInferenceModel):
def _load(self, save_model: bool, initial_load: bool) -> None:
utils.koboldai_vars.lazy_load = False
self.lazy_load = False
model_path = None