From 81e4c8a80744af4e9541fbb63dcfd281d86ee127 Mon Sep 17 00:00:00 2001 From: somebody Date: Mon, 24 Jul 2023 10:25:44 -0500 Subject: [PATCH 1/2] Backends: Fix GPTQ priority --- aiserver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aiserver.py b/aiserver.py index fe499edc..b34eeebf 100644 --- a/aiserver.py +++ b/aiserver.py @@ -631,8 +631,8 @@ model_backend_module_names = {} model_backend_type_crosswalk = {} PRIORITIZED_BACKEND_MODULES = { - "gptq_hf_torch": 1, - "generic_hf_torch": 2 + "gptq_hf_torch": 2, + "generic_hf_torch": 1 } for module in os.listdir("./modeling/inference_models"): From 2fb877db407224d502c33a9cdf88d95e46f3339e Mon Sep 17 00:00:00 2001 From: somebody Date: Mon, 24 Jul 2023 10:28:22 -0500 Subject: [PATCH 2/2] Backends: Probably fix sorting --- aiserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiserver.py b/aiserver.py index b34eeebf..2115c709 100644 --- a/aiserver.py +++ b/aiserver.py @@ -6292,7 +6292,7 @@ def UI_2_select_model(data): #so we'll just go through all the possible loaders for model_backend in sorted( model_backends, - key=lambda x: model_backend_module_names[x] in PRIORITIZED_BACKEND_MODULES, + key=lambda x: PRIORITIZED_BACKEND_MODULES.get(model_backend_module_names[x], 0), reverse=True, ): if model_backends[model_backend].is_valid(data["name"], data["path"] if 'path' in data else None, data["menu"]):