From aca2b532d7f74e9babfcb561a10397fb8f38f808 Mon Sep 17 00:00:00 2001 From: somebody Date: Wed, 21 Jun 2023 14:15:38 -0500 Subject: [PATCH] Remove debug --- modeling/inference_models/hf_torch.py | 1 - 1 file changed, 1 deletion(-) diff --git a/modeling/inference_models/hf_torch.py b/modeling/inference_models/hf_torch.py index 10b0fa3c..9a941cf6 100644 --- a/modeling/inference_models/hf_torch.py +++ b/modeling/inference_models/hf_torch.py @@ -308,7 +308,6 @@ class HFTorchInferenceModel(HFInferenceModel): with lazy_loader.use_lazy_load(dematerialized_modules=True): metamodel = AutoModelForCausalLM.from_config(self.model_config) tf_kwargs["device_map"] = self.breakmodel_config.get_device_map(metamodel) - print("Rodger rodger", tf_kwargs) with lazy_loader.use_lazy_load( enable=self.lazy_load,