From 28d42e520035567223fb985304d6585a87cc99ab Mon Sep 17 00:00:00 2001
From: cloak1505 <170299980+cloak1505@users.noreply.github.com>
Date: Sat, 26 Apr 2025 11:39:44 -0500
Subject: [PATCH] Prune Google models
---
public/index.html | 62 +++++++------------
.../scripts/extensions/caption/settings.html | 27 +-------
public/scripts/openai.js | 16 ++---
src/endpoints/backends/chat-completions.js | 3 +-
4 files changed, 32 insertions(+), 76 deletions(-)
diff --git a/public/index.html b/public/index.html
index f708229d4..195c22165 100644
--- a/public/index.html
+++ b/public/index.html
@@ -3146,49 +3146,31 @@
Google Model
diff --git a/public/scripts/extensions/caption/settings.html b/public/scripts/extensions/caption/settings.html
index 539a737ef..dbab7c6ea 100644
--- a/public/scripts/extensions/caption/settings.html
+++ b/public/scripts/extensions/caption/settings.html
@@ -79,34 +79,13 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
+
+
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 2eb809a92..00a9ded80 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -4481,20 +4481,16 @@ async function onModelChange() {
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', max_2mil);
- } else if (value.includes('gemini-exp-1114') || value.includes('gemini-exp-1121') || value.includes('gemini-2.0-flash-thinking-exp-1219')) {
- $('#openai_max_context').attr('max', max_32k);
- } else if (value.includes('gemini-1.5-pro') || value.includes('gemini-exp-1206') || value.includes('gemini-2.0-pro')) {
+ } else if (value.includes('gemini-1.5-pro')) {
$('#openai_max_context').attr('max', max_2mil);
- } else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash') || value.includes('gemini-2.5-flash-preview-04-17') || value.includes('gemini-2.5-pro-exp-03-25') || value.includes('gemini-2.5-pro-preview-03-25')) {
+ } else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash') || value.includes('gemini-2.0-pro') || value.includes('gemini-2.5-flash') || value.includes('gemini-2.5-pro') || value.includes('learnlm-2.0-flash')) {
$('#openai_max_context').attr('max', max_1mil);
- } else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') {
- $('#openai_max_context').attr('max', max_32k);
- } else if (value.includes('gemini-1.0-ultra') || value === 'gemini-ultra') {
- $('#openai_max_context').attr('max', max_32k);
- } else if (value.includes('gemma-3')) {
+ } else if (value.includes('gemma-3-27b-it')) {
$('#openai_max_context').attr('max', max_128k);
+ } else if (value.includes('gemma-3') || value.includes('learnlm-1.5-pro-experimental')) {
+ $('#openai_max_context').attr('max', max_32k);
} else {
- $('#openai_max_context').attr('max', max_4k);
+ $('#openai_max_context').attr('max', max_32k);
}
let makersuite_max_temp = (value.includes('vision') || value.includes('ultra') || value.includes('gemma')) ? 1.0 : 2.0;
oai_settings.temp_openai = Math.min(makersuite_max_temp, oai_settings.temp_openai);
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index adc62e641..83b929192 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -372,9 +372,8 @@ async function sendMakerSuiteRequest(request, response) {
model.includes('gemini-2.5-flash') ||
model.includes('gemini-2.0-pro') ||
model.includes('gemini-2.0-flash') ||
- model.includes('gemini-2.0-flash-thinking-exp') ||
- model.includes('gemini-1.5-flash') ||
model.includes('gemini-1.5-pro') ||
+ model.includes('gemini-1.5-flash') ||
model.startsWith('gemini-exp')
) && request.body.use_makersuite_sysprompt;