mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Model: Respect model lazyload over kaivars
kaivars dictates model config unless its from outside aiserver or whatever.
This commit is contained in:
@@ -12,7 +12,7 @@ from modeling.inference_models.hf_torch import HFTorchInferenceModel
|
||||
|
||||
class CustomGPT2HFTorchInferenceModel(HFTorchInferenceModel):
|
||||
def _load(self, save_model: bool, initial_load: bool) -> None:
|
||||
utils.koboldai_vars.lazy_load = False
|
||||
self.lazy_load = False
|
||||
|
||||
model_path = None
|
||||
|
||||
|
Reference in New Issue
Block a user