diff --git a/aiserver.py b/aiserver.py index 1c56cdb0..ca8b0490 100644 --- a/aiserver.py +++ b/aiserver.py @@ -626,14 +626,20 @@ from modeling.patches import patch_transformers import importlib model_backend_code = {} model_backends = {} +model_backend_type_crosswalk = {} for module in os.listdir("./modeling/inference_models"): if not os.path.isfile(os.path.join("./modeling/inference_models",module)) and module != '__pycache__': try: model_backend_code[module] = importlib.import_module('modeling.inference_models.{}.class'.format(module)) model_backends[model_backend_code[module].model_backend_name] = model_backend_code[module].model_backend() - if 'disable' in vars(model_backends[model_backend_code[module].model_backend_name]): - if model_backends[model_backend_code[module].model_backend_name].disable: - del model_backends[model_backend_code[module].model_backend_name] + if 'disable' in vars(model_backends[model_backend_code[module].model_backend_name]) and model_backends[model_backend_code[module].model_backend_name].disable: + del model_backends[model_backend_code[module].model_backend_name] + else: + if model_backend_code[module].model_backend_type in model_backend_type_crosswalk: + model_backend_type_crosswalk[model_backend_code[module].model_backend_type].append(model_backend_code[module].model_backend_name) + else: + model_backend_type_crosswalk[model_backend_code[module].model_backend_type] = [model_backend_code[module].model_backend_name] + except Exception: logger.error("Model Backend {} failed to load".format(module)) logger.error(traceback.format_exc()) @@ -6211,6 +6217,7 @@ def UI_2_load_model_button(data): @socketio.on('select_model') @logger.catch def UI_2_select_model(data): + global model_backend_type_crosswalk #No idea why I have to make this a global where I don't for model_backends... logger.debug("Clicked on model entry: {}".format(data)) if data["name"] in model_menu and data['ismenu'] == "true": emit("open_model_load_menu", {"items": [{**item.to_json(), **{"menu":data["name"]}} for item in model_menu[data["name"]] if item.should_show()]}) @@ -6220,8 +6227,9 @@ def UI_2_select_model(data): valid_loaders = {} if data['id'] in [item.name for sublist in model_menu for item in model_menu[sublist]]: #Here if we have a model id that's in our menu, we explicitly use that backend - for model_backend in set([item.model_backend for sublist in model_menu for item in model_menu[sublist] if item.name == data['id']]): - valid_loaders[model_backend] = model_backends[model_backend].get_requested_parameters(data["name"], data["path"] if 'path' in data else None, data["menu"]) + for model_backend_type in set([item.model_backend for sublist in model_menu for item in model_menu[sublist] if item.name == data['id']]): + for model_backend in model_backend_type_crosswalk[model_backend_type]: + valid_loaders[model_backend] = model_backends[model_backend].get_requested_parameters(data["name"], data["path"] if 'path' in data else None, data["menu"]) emit("selected_model_info", {"model_backends": valid_loaders}) else: #Here we have a model that's not in our menu structure (either a custom model or a custom path diff --git a/modeling/inference_models/api/class.py b/modeling/inference_models/api/class.py index 64cfd2ab..a27250ce 100644 --- a/modeling/inference_models/api/class.py +++ b/modeling/inference_models/api/class.py @@ -19,6 +19,7 @@ from modeling.inference_model import ( ) model_backend_name = "KoboldAI API" +model_backend_type = "KoboldAI API" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class APIException(Exception): """To be used for errors when using the Kobold API as an interface.""" diff --git a/modeling/inference_models/basic_api/class.py b/modeling/inference_models/basic_api/class.py index b492c039..d77b3b09 100644 --- a/modeling/inference_models/basic_api/class.py +++ b/modeling/inference_models/basic_api/class.py @@ -17,6 +17,7 @@ from modeling.inference_model import ( model_backend_name = "KoboldAI Old Colab Method" +model_backend_type = "KoboldAI Old Colab Method" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class BasicAPIException(Exception): """To be used for errors when using the Basic API as an interface.""" diff --git a/modeling/inference_models/generic_hf_torch/class.py b/modeling/inference_models/generic_hf_torch/class.py index 57bcd5ad..d900bb95 100644 --- a/modeling/inference_models/generic_hf_torch/class.py +++ b/modeling/inference_models/generic_hf_torch/class.py @@ -23,6 +23,7 @@ except ModuleNotFoundError as e: from modeling.inference_models.hf_torch import HFTorchInferenceModel model_backend_name = "Huggingface" +model_backend_type = "Huggingface" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class model_backend(HFTorchInferenceModel): diff --git a/modeling/inference_models/gooseai/class.py b/modeling/inference_models/gooseai/class.py index 934f15dd..6d405877 100644 --- a/modeling/inference_models/gooseai/class.py +++ b/modeling/inference_models/gooseai/class.py @@ -15,6 +15,7 @@ from modeling.inference_model import ( from modeling.inference_models.openai_gooseai import model_backend as openai_gooseai_model_backend model_backend_name = "GooseAI" +model_backend_type = "GooseAI" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class OpenAIAPIError(Exception): def __init__(self, error_type: str, error_message) -> None: diff --git a/modeling/inference_models/hf_mtj/class.py b/modeling/inference_models/hf_mtj/class.py index e029db9d..1b6b2cb8 100644 --- a/modeling/inference_models/hf_mtj/class.py +++ b/modeling/inference_models/hf_mtj/class.py @@ -20,6 +20,7 @@ from modeling.inference_models.hf import HFInferenceModel from modeling.tokenizer import GenericTokenizer model_backend_name = "Huggingface MTJ" +model_backend_type = "Huggingface" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class model_backend(HFInferenceModel): diff --git a/modeling/inference_models/horde/class.py b/modeling/inference_models/horde/class.py index 3c19a0c1..97e84991 100644 --- a/modeling/inference_models/horde/class.py +++ b/modeling/inference_models/horde/class.py @@ -18,6 +18,7 @@ from modeling.inference_model import ( ) model_backend_name = "Horde" +model_backend_type = "Horde" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class HordeException(Exception): """To be used for errors on server side of the Horde.""" diff --git a/modeling/inference_models/openai/class.py b/modeling/inference_models/openai/class.py index cea644ea..f2f37ff7 100644 --- a/modeling/inference_models/openai/class.py +++ b/modeling/inference_models/openai/class.py @@ -15,6 +15,7 @@ from modeling.inference_model import ( from modeling.inference_models.openai_gooseai import model_backend as openai_gooseai_model_backend model_backend_name = "OpenAI" +model_backend_type = "OpenAI" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class OpenAIAPIError(Exception): def __init__(self, error_type: str, error_message) -> None: diff --git a/modeling/inference_models/readonly/class.py b/modeling/inference_models/readonly/class.py index 98573990..13c38baf 100644 --- a/modeling/inference_models/readonly/class.py +++ b/modeling/inference_models/readonly/class.py @@ -15,6 +15,7 @@ from modeling.inference_model import ( ) model_backend_name = "Read Only" +model_backend_type = "Read Only" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face) class BasicAPIException(Exception): """To be used for errors when using the Basic API as an interface."""