From 339225e4000d0b930f607513e09f16f3ba7700be Mon Sep 17 00:00:00 2001 From: Divided by Zer0 Date: Wed, 31 Aug 2022 22:58:58 +0200 Subject: [PATCH] fix for multiple gens breaking API/CLUSTER --- aiserver.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/aiserver.py b/aiserver.py index a09a0714..8e218415 100644 --- a/aiserver.py +++ b/aiserver.py @@ -5028,14 +5028,17 @@ def sendtoapi(txt, min, max): if(len(genout) == 1): genresult(genout[0]) else: + adjusted_genout = [] + for item in genout: + adjusted_genout.append({"generated_text": item}) # Convert torch output format to transformers seqs = [] - for seq in genout: + for seq in adjusted_genout: seqs.append({"generated_text": seq}) if(vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0): - genresult(genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) + genresult(adjusted_genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) else: - genselect(genout) + genselect(adjusted_genout) set_aibusy(0) return @@ -5107,14 +5110,17 @@ def sendtocluster(txt, min, max): if(len(genout) == 1): genresult(genout[0]) else: + adjusted_genout = [] + for item in genout: + adjusted_genout.append({"generated_text": item}) # Convert torch output format to transformers seqs = [] - for seq in genout: + for seq in adjusted_genout: seqs.append({"generated_text": seq}) if(vars.lua_koboldbridge.restart_sequence is not None and vars.lua_koboldbridge.restart_sequence > 0): - genresult(genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) + genresult(adjusted_genout[vars.lua_koboldbridge.restart_sequence-1]["generated_text"]) else: - genselect(genout) + genselect(adjusted_genout) set_aibusy(0) return