From 5929d5c0e45db9dcef65f8619e6abbb9fd9fece2 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Sun, 6 Apr 2025 23:08:31 +0300 Subject: [PATCH] Groq: sync supported models --- public/index.html | 17 +++++++++-------- public/scripts/openai.js | 3 +++ 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/public/index.html b/public/index.html index c8b417072..58569fedc 100644 --- a/public/index.html +++ b/public/index.html @@ -3249,28 +3249,29 @@ - + - - - + + + - + - - - + + + + diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 925d7f213..eb6051ff3 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -4193,6 +4193,9 @@ function getGroqMaxContext(model, isUnlocked) { 'qwen-2.5-32b': max_128k, 'deepseek-r1-distill-qwen-32b': max_128k, 'deepseek-r1-distill-llama-70b-specdec': max_128k, + 'mistral-saba-24b': max_32k, + 'meta-llama/llama-4-scout-17b-16e-instruct': max_128k, + 'meta-llama/llama-4-maverick-17b-128e-instruct': max_128k, }; // Return context size if model found, otherwise default to 128k