Better compatibility with hf model backend

This commit is contained in:
ebolam
2023-05-22 14:50:13 -04:00
parent 513b8575e7
commit 925cad2e2f

View File

@@ -1,4 +1,4 @@
import os import os, sys
from typing import Optional from typing import Optional
from transformers import AutoConfig from transformers import AutoConfig
import warnings import warnings
@@ -196,9 +196,10 @@ class HFInferenceModel(InferenceModel):
except: except:
pass pass
if self.hf_torch: if self.hf_torch:
breakmodel.breakmodel = True if 'breakmodel' in sys.modules:
breakmodel.gpu_blocks = [] breakmodel.breakmodel = True
breakmodel.disk_blocks = 0 breakmodel.gpu_blocks = []
breakmodel.disk_blocks = 0
def _post_load(self) -> None: def _post_load(self) -> None:
# These are model specific tokenizer overrides if a model has bad defaults # These are model specific tokenizer overrides if a model has bad defaults