Fix for token generation length error

This commit is contained in:
ebolam
2022-09-27 07:29:41 -04:00
parent 26ba52e2fa
commit 468297772c

View File

@@ -159,7 +159,7 @@ class koboldai_vars(object):
else: else:
method = 1 method = 1
context = [] context = []
token_budget = self.max_length token_budget = self.max_length - self.genamt
used_world_info = [] used_world_info = []
if self.tokenizer is None: if self.tokenizer is None:
used_tokens = 99999999999999999999999 used_tokens = 99999999999999999999999