From 2fd544cad71353fd00f0236c6a6524c33420f6f1 Mon Sep 17 00:00:00 2001 From: henk717 Date: Sun, 22 Aug 2021 20:59:47 +0200 Subject: [PATCH] Change max tokens to 4096 It works smoothly on the TPU colab, so lets allow it. People should not turn this all the way up unless they got the hardware, but we want to allow this for those that do. --- gensettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gensettings.py b/gensettings.py index ac531ba2..e113b8ee 100644 --- a/gensettings.py +++ b/gensettings.py @@ -70,7 +70,7 @@ gensettingstf = [{ "label": "Max Tokens", "id": "settknmax", "min": 512, - "max": 2048, + "max": 4096, "step": 8, "default": 1024, "tooltip": "Max number of tokens of context to submit to the AI for sampling. Make sure this is higher than Amount to Generate. Higher values increase VRAM/RAM usage."