mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Fixed Max Length limits not being enforced for transformers & InferKit
This commit is contained in:
@ -274,6 +274,7 @@ def calcsubmit(txt):
|
|||||||
else:
|
else:
|
||||||
count = budget * -1
|
count = budget * -1
|
||||||
tokens = acttkns[count:] + tokens
|
tokens = acttkns[count:] + tokens
|
||||||
|
break
|
||||||
|
|
||||||
# Add mmory & prompt tokens to beginning of bundle
|
# Add mmory & prompt tokens to beginning of bundle
|
||||||
tokens = memtokens + prompttkns + tokens
|
tokens = memtokens + prompttkns + tokens
|
||||||
@ -299,6 +300,7 @@ def calcsubmit(txt):
|
|||||||
else:
|
else:
|
||||||
count = budget * -1
|
count = budget * -1
|
||||||
subtxt = vars.actions[(-1-n)][count:] + subtxt
|
subtxt = vars.actions[(-1-n)][count:] + subtxt
|
||||||
|
break
|
||||||
|
|
||||||
# Add mmory & prompt tokens to beginning of bundle
|
# Add mmory & prompt tokens to beginning of bundle
|
||||||
if(vars.memory != ""):
|
if(vars.memory != ""):
|
||||||
|
Reference in New Issue
Block a user