8 bit toggle, fix for broken toggle values

This commit is contained in:
Nick Perez
2023-07-18 23:29:38 -04:00
parent 22e7baec52
commit 0142913060
2 changed files with 22 additions and 3 deletions

View File

@@ -35,6 +35,17 @@ class model_backend(HFTorchInferenceModel):
temp = json.load(f)
else:
temp = {}
requested_parameters.append({
"uitype": "toggle",
"unit": "bool",
"label": "Use 8-bit",
"id": "use_8_bit",
"default": temp['use_8_bit'] if 'use_8_bit' in temp else False,
"tooltip": "Whether or not to use BnB's 8-bit mode",
"menu_path": "Layers",
"extra_classes": "",
"refresh_model_inputs": False
})
requested_parameters.append({
"uitype": "toggle",
"unit": "bool",
@@ -53,6 +64,7 @@ class model_backend(HFTorchInferenceModel):
def set_input_parameters(self, parameters):
super().set_input_parameters(parameters)
self.use_4_bit = parameters['use_4_bit'] if 'use_4_bit' in parameters else False
self.use_8_bit = parameters['use_8_bit'] if 'use_8_bit' in parameters else False
def _load(self, save_model: bool, initial_load: bool) -> None:
utils.koboldai_vars.allowsp = True
@@ -82,6 +94,14 @@ class model_backend(HFTorchInferenceModel):
"low_cpu_mem_usage": True,
}
if self.use_8_bit:
tf_kwargs.update({
"quantization_config":BitsAndBytesConfig(
load_in_8bit=True,
llm_int8_enable_fp32_cpu_offload=True
),
})
if self.use_4_bit or utils.koboldai_vars.colab_arg:
tf_kwargs.update({
"quantization_config":BitsAndBytesConfig(
@@ -298,6 +318,7 @@ class model_backend(HFTorchInferenceModel):
if "disk_layers" in vars(self)
else 0,
"use_4_bit": self.use_4_bit,
"use_8_bit": self.use_8_bit,
},
f,
indent="",

View File

@@ -2011,7 +2011,7 @@ function load_model() {
data = {}
if (settings_area) {
for (const element of settings_area.querySelectorAll(".model_settings_input:not(.hidden)")) {
var element_data = element.value;
var element_data = element.getAttribute("data_type") === "bool" ? element.checked : element.value;
if ((element.tagName == "SELECT") && (element.multiple)) {
element_data = [];
for (var i=0, iLen=element.options.length; i<iLen; i++) {
@@ -2024,8 +2024,6 @@ function load_model() {
element_data = parseInt(element_data);
} else if (element.getAttribute("data_type") == "float") {
element_data = parseFloat(element_data);
} else if (element.getAttribute("data_type") == "bool") {
element_data = (element_data == 'on');
}
}
data[element.id.split("|")[1].replace("_value", "")] = element_data;