mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Fallback to transformers if hf_bleeding_edge not available
This commit is contained in:
@@ -7,7 +7,10 @@ import shutil
|
||||
from typing import Union
|
||||
|
||||
from transformers import GPTNeoForCausalLM, GPT2LMHeadModel, BitsAndBytesConfig
|
||||
from hf_bleeding_edge import AutoModelForCausalLM
|
||||
try:
|
||||
from hf_bleeding_edge import AutoModelForCausalLM
|
||||
except ImportError:
|
||||
from transformers import AutoModelForCausalLM
|
||||
|
||||
from transformers.utils import WEIGHTS_NAME, WEIGHTS_INDEX_NAME, TF2_WEIGHTS_NAME, TF2_WEIGHTS_INDEX_NAME, TF_WEIGHTS_NAME, FLAX_WEIGHTS_NAME, FLAX_WEIGHTS_INDEX_NAME, SAFE_WEIGHTS_NAME, SAFE_WEIGHTS_INDEX_NAME
|
||||
|
||||
|
@@ -10,8 +10,11 @@ import sys
|
||||
from typing import Union
|
||||
|
||||
from transformers import GPTNeoForCausalLM, AutoTokenizer, LlamaTokenizer
|
||||
import hf_bleeding_edge
|
||||
from hf_bleeding_edge import AutoModelForCausalLM
|
||||
try:
|
||||
import hf_bleeding_edge
|
||||
from hf_bleeding_edge import AutoModelForCausalLM
|
||||
except ImportError:
|
||||
from transformers import AutoModelForCausalLM
|
||||
|
||||
import utils
|
||||
import modeling.lazy_loader as lazy_loader
|
||||
|
@@ -1,6 +1,9 @@
|
||||
import os, sys
|
||||
from typing import Optional
|
||||
from hf_bleeding_edge import AutoConfig
|
||||
try:
|
||||
from hf_bleeding_edge import AutoConfig
|
||||
except ImportError:
|
||||
from transformers import AutoConfig
|
||||
|
||||
import warnings
|
||||
import utils
|
||||
|
@@ -19,7 +19,10 @@ from transformers import (
|
||||
GPT2LMHeadModel,
|
||||
LogitsProcessorList,
|
||||
)
|
||||
from hf_bleeding_edge import AutoModelForCausalLM
|
||||
try:
|
||||
from hf_bleeding_edge import AutoModelForCausalLM
|
||||
except ImportError:
|
||||
from transformers import AutoModelForCausalLM
|
||||
|
||||
import utils
|
||||
import modeling.lazy_loader as lazy_loader
|
||||
|
Reference in New Issue
Block a user