Reduced default max_length parameter to 512.

Added warning about VRAM usage to Max Tokens tooltip.
This commit is contained in:
KoboldAI Dev 2021-05-07 19:04:51 -04:00
parent d632976fbf
commit ba1ba0fc8a
2 changed files with 6 additions and 6 deletions

View File

@ -48,7 +48,7 @@ class vars:
model = ""
noai = False # Runs the script without starting up the transformers pipeline
aibusy = False # Stops submissions while the AI is working
max_length = 1024 # Maximum number of tokens to submit per action
max_length = 512 # Maximum number of tokens to submit per action
ikmax = 3000 # Maximum number of characters to submit to InferKit
genamt = 60 # Amount of text for each action to generate
ikgen = 200 # Number of characters for InferKit to generate

View File

@ -29,15 +29,15 @@ gensettingstf = [{
"max": 2.0,
"step": 0.05,
"default": 1.0,
"tooltip": "Used to penalize words that were already generated or belong to the context"
"tooltip": "Used to penalize words that were already generated or belong to the context."
},
{
"uitype": "slider",
"unit": "int",
"label": "Amount to Generate",
"id": "setoutput",
"min": 10,
"max": 500,
"min": 16,
"max": 512,
"step": 2,
"default": 60,
"tooltip": "Number of tokens the AI should generate. Higher numbers will take longer to generate."
@ -50,8 +50,8 @@ gensettingstf = [{
"min": 512,
"max": 2048,
"step": 8,
"default": 1024,
"tooltip": "Number of tokens of context to submit to the AI for sampling."
"default": 512,
"tooltip": "Max number of tokens of context to submit to the AI for sampling. Make sure this is higher than Amount to Generate. Higher values increase VRAM/RAM usage."
}]
gensettingsik =[{