Fix for attention bias

This commit is contained in:
ebolam
2023-05-23 08:35:15 -04:00
parent 5561cc1f22
commit 7a8e4c39da

View File

@@ -3693,7 +3693,8 @@ def calcsubmit(txt):
bias += [1] * (i - top_index)
bias[i] = b["multiplier"]
device = utils.get_auxilary_device()
device = model.get_auxilary_device()
attention_bias.attention_bias = torch.Tensor(bias).to(device)
logger.info(f"Bias by {koboldai_vars.memory_attn_bias} -- {attention_bias.attention_bias}")
logger.debug("Submit: experimental_features time {}s".format(time.time()-start_time))