mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Use the correct generation min and max when using soft prompt
This commit is contained in:
@ -1189,7 +1189,7 @@ def calcsubmit(txt):
|
||||
if(actionlen == 0):
|
||||
# First/Prompt action
|
||||
subtxt = vars.memory + winfo + anotetxt + vars.prompt
|
||||
lnsub = lnmem + lnwi + lnprompt + lnanote
|
||||
lnsub = lnsp + lnmem + lnwi + lnprompt + lnanote
|
||||
|
||||
if(not vars.model in ["Colab", "OAI"]):
|
||||
generate(subtxt, lnsub+1, lnsub+vars.genamt)
|
||||
|
Reference in New Issue
Block a user