From 4dfbf809297f0e044de654ce040e40436242e95f Mon Sep 17 00:00:00 2001 From: ebolam Date: Thu, 1 Dec 2022 11:04:43 -0500 Subject: [PATCH] Not sure why this fixes it, or why we need this line...... --- aiserver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiserver.py b/aiserver.py index 3f544f3c..9aeedfbd 100644 --- a/aiserver.py +++ b/aiserver.py @@ -3006,7 +3006,7 @@ def load_model(use_gpu=True, gpu_layers=None, disk_layers=None, initial_load=Fal if(koboldai_vars.hascuda): if(koboldai_vars.usegpu): koboldai_vars.modeldim = get_hidden_size_from_model(model) - model = model.half().to(koboldai_vars.gpu_device) + #model = model.half().to(koboldai_vars.gpu_device) generator = model.generate elif(koboldai_vars.breakmodel): # Use both RAM and VRAM (breakmodel) koboldai_vars.modeldim = get_hidden_size_from_model(model)