From e7d38d95d0df4b1bbf1a19d1a61533bb76288b00 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sat, 22 Feb 2025 14:37:53 +0200 Subject: [PATCH] Add max context size for llama-guard-3-8b model --- public/scripts/openai.js | 1 + 1 file changed, 1 insertion(+) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 07223c05b..dd86c8f51 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -4132,6 +4132,7 @@ function getGroqMaxContext(model, isUnlocked) { 'llama-3.1-8b-instant': max_128k, 'llama3-70b-8192': max_8k, 'llama3-8b-8192': max_8k, + 'llama-guard-3-8b': max_8k, 'mixtral-8x7b-32768': max_32k, 'deepseek-r1-distill-llama-70b': max_128k, 'llama-3.3-70b-specdec': max_8k,