From 802bef8c37524cda74060785092f9a053e5c07f6 Mon Sep 17 00:00:00 2001 From: ebolam Date: Fri, 16 Dec 2022 20:05:15 -0500 Subject: [PATCH] Bug fix for probabilities (fixes issue 344) --- koboldai_settings.py | 45 +++++++++++++++++++++++--------------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/koboldai_settings.py b/koboldai_settings.py index 8e4c6355..7be92ff1 100644 --- a/koboldai_settings.py +++ b/koboldai_settings.py @@ -1538,13 +1538,14 @@ class KoboldStoryRegister(object): self.actions[action_id]["Selected Text"] = text self.actions[action_id]["Time"] = self.actions[action_id].get("Time", int(time.time())) if 'Probabilities' in self.actions[action_id]: - tokens = self.koboldai_vars.tokenizer.encode(text) - for token_num in range(len(self.actions[action_id]["Probabilities"])): - for token_option in range(len(self.actions[action_id]["Probabilities"][token_num])): - if token_num < len(tokens): - self.actions[action_id]["Probabilities"][token_num][token_option]["Used"] = tokens[token_num] == self.actions[action_id]["Probabilities"][token_num][token_option]["tokenId"] - else: - self.actions[action_id]["Probabilities"][token_num][token_option]["Used"] = False + if self.koboldai_vars.tokenizer is not None: + tokens = self.koboldai_vars.tokenizer.encode(text) + for token_num in range(len(self.actions[action_id]["Probabilities"])): + for token_option in range(len(self.actions[action_id]["Probabilities"][token_num])): + if token_num < len(tokens): + self.actions[action_id]["Probabilities"][token_num][token_option]["Used"] = tokens[token_num] == self.actions[action_id]["Probabilities"][token_num][token_option]["tokenId"] + else: + self.actions[action_id]["Probabilities"][token_num][token_option]["Used"] = False selected_text_length = 0 self.actions[action_id]["Selected Text Length"] = selected_text_length for item in self.actions[action_id]["Options"]: @@ -1591,13 +1592,14 @@ class KoboldStoryRegister(object): del item['stream_id'] found = True if 'Probabilities' in item: - tokens = self.koboldai_vars.tokenizer.encode(option) - for token_num in range(len(item["Probabilities"])): - for token_option in range(len(item["Probabilities"][token_num])): - if token_num < len(tokens): - item["Probabilities"][token_num][token_option]["Used"] = tokens[token_num] == item["Probabilities"][token_num][token_option]["tokenId"] - else: - item["Probabilities"][token_num][token_option]["Used"] = False + if self.koboldai_vars.tokenizer is not None: + tokens = self.koboldai_vars.tokenizer.encode(option) + for token_num in range(len(item["Probabilities"])): + for token_option in range(len(item["Probabilities"][token_num])): + if token_num < len(tokens): + item["Probabilities"][token_num][token_option]["Used"] = tokens[token_num] == item["Probabilities"][token_num][token_option]["tokenId"] + else: + item["Probabilities"][token_num][token_option]["Used"] = False break elif item['text'] == option: found = True @@ -1605,13 +1607,14 @@ class KoboldStoryRegister(object): del item['stream_id'] found = True if 'Probabilities' in item: - tokens = self.koboldai_vars.tokenizer.encode(option) - for token_num in range(len(item["Probabilities"])): - for token_option in range(len(item["Probabilities"][token_num])): - if token_num < len(tokens): - item["Probabilities"][token_num][token_option]["Used"] = tokens[token_num] == item["Probabilities"][token_num][token_option]["tokenId"] - else: - item["Probabilities"][token_num][token_option]["Used"] = False + if self.koboldai_vars.tokenizer is not None: + tokens = self.koboldai_vars.tokenizer.encode(option) + for token_num in range(len(item["Probabilities"])): + for token_option in range(len(item["Probabilities"][token_num])): + if token_num < len(tokens): + item["Probabilities"][token_num][token_option]["Used"] = tokens[token_num] == item["Probabilities"][token_num][token_option]["tokenId"] + else: + item["Probabilities"][token_num][token_option]["Used"] = False break if not found: