Fallback to transformers if hf_bleeding_edge not available

This commit is contained in:
0cc4m
2023-07-23 07:14:23 +02:00
parent 748e5ef318
commit 09bb1021dd
4 changed files with 17 additions and 5 deletions

View File

@@ -7,7 +7,10 @@ import shutil
from typing import Union
from transformers import GPTNeoForCausalLM, GPT2LMHeadModel, BitsAndBytesConfig
from hf_bleeding_edge import AutoModelForCausalLM
try:
from hf_bleeding_edge import AutoModelForCausalLM
except ImportError:
from transformers import AutoModelForCausalLM
from transformers.utils import WEIGHTS_NAME, WEIGHTS_INDEX_NAME, TF2_WEIGHTS_NAME, TF2_WEIGHTS_INDEX_NAME, TF_WEIGHTS_NAME, FLAX_WEIGHTS_NAME, FLAX_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SAFE_WEIGHTS_INDEX_NAME

View File

@@ -10,8 +10,11 @@ import sys
from typing import Union
from transformers import GPTNeoForCausalLM, AutoTokenizer, LlamaTokenizer
import hf_bleeding_edge
from hf_bleeding_edge import AutoModelForCausalLM
try:
import hf_bleeding_edge
from hf_bleeding_edge import AutoModelForCausalLM
except ImportError:
from transformers import AutoModelForCausalLM
import utils
import modeling.lazy_loader as lazy_loader

View File

@@ -1,6 +1,9 @@
import os, sys
from typing import Optional
from hf_bleeding_edge import AutoConfig
try:
from hf_bleeding_edge import AutoConfig
except ImportError:
from transformers import AutoConfig
import warnings
import utils

View File

@@ -19,7 +19,10 @@ from transformers import (
GPT2LMHeadModel,
LogitsProcessorList,
)
from hf_bleeding_edge import AutoModelForCausalLM
try:
from hf_bleeding_edge import AutoModelForCausalLM
except ImportError:
from transformers import AutoModelForCausalLM
import utils
import modeling.lazy_loader as lazy_loader