From cb6010d66698f7b7510886bca1b5dfbaa7901d01 Mon Sep 17 00:00:00 2001 From: somebody Date: Tue, 7 Mar 2023 21:13:20 -0600 Subject: [PATCH] Model: Respect sampler bounds in torch A rather embarassing way to spend an hour debugging after I told myself "I'd better remember to add this important thing to the torch side". Samplers were being applied when in their "off values" causing boring mathmatical operations to take place (ie anything x 0 is always 0) --- modeling/inference_models/hf_torch.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/modeling/inference_models/hf_torch.py b/modeling/inference_models/hf_torch.py index 1dd3e93c..bf339a24 100644 --- a/modeling/inference_models/hf_torch.py +++ b/modeling/inference_models/hf_torch.py @@ -97,6 +97,10 @@ class HFTorchInferenceModel(HFInferenceModel): for sid in utils.koboldai_vars.sampler_order: warper = Warper.from_id(sid) + + if not warper.value_is_valid(): + continue + if warper == warpers.RepetitionPenalty: # Rep pen needs more data than other samplers scores = warper.torch(scores, input_ids=input_ids)