From 925cad2e2fa6c65b8ea37680d19fa69023cce9f5 Mon Sep 17 00:00:00 2001 From: ebolam Date: Mon, 22 May 2023 14:50:13 -0400 Subject: [PATCH] Better compatibility with hf model backend --- modeling/inference_models/hf.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/modeling/inference_models/hf.py b/modeling/inference_models/hf.py index 318423d5..b209d49f 100644 --- a/modeling/inference_models/hf.py +++ b/modeling/inference_models/hf.py @@ -1,4 +1,4 @@ -import os +import os, sys from typing import Optional from transformers import AutoConfig import warnings @@ -196,9 +196,10 @@ class HFInferenceModel(InferenceModel): except: pass if self.hf_torch: - breakmodel.breakmodel = True - breakmodel.gpu_blocks = [] - breakmodel.disk_blocks = 0 + if 'breakmodel' in sys.modules: + breakmodel.breakmodel = True + breakmodel.gpu_blocks = [] + breakmodel.disk_blocks = 0 def _post_load(self) -> None: # These are model specific tokenizer overrides if a model has bad defaults