[FEATURE_REQUEST] Can the unlocked max context size for OpenAI completion be increased from 102k to 200k for example? #1842
This commit is contained in:
parent
3c2113a6e7
commit
2e00a1baaf
|
@ -119,7 +119,7 @@ const scale_max = 8191;
|
|||
const claude_max = 9000; // We have a proper tokenizer, so theoretically could be larger (up to 9k)
|
||||
const claude_100k_max = 99000;
|
||||
let ai21_max = 9200; //can easily fit 9k gpt tokens because j2's tokenizer is efficient af
|
||||
const unlocked_max = 100 * 1024;
|
||||
const unlocked_max = max_200k;
|
||||
const oai_max_temp = 2.0;
|
||||
const claude_max_temp = 1.0; //same as j2
|
||||
const j2_max_topk = 10.0;
|
||||
|
|
Loading…
Reference in New Issue