Fix multigpu loading without lazy-loader

This commit is contained in:
0cc4m
2023-05-08 22:57:09 +02:00
parent 4f94247910
commit 6121598142

View File

@@ -243,6 +243,11 @@ class GenericHFTorchInferenceModel(HFTorchInferenceModel):
) )
shutil.rmtree("cache/") shutil.rmtree("cache/")
if not self.lazy_load:
utils.layers_module_names = utils.get_layers_module_names(self.model)
utils.module_names = list(self.model.state_dict().keys())
utils.named_buffers = list(self.model.named_buffers(recurse=True))
self.patch_embedding() self.patch_embedding()
if utils.koboldai_vars.hascuda: if utils.koboldai_vars.hascuda: