diff --git a/public/index.html b/public/index.html index 076c7553b..cc68a5c39 100644 --- a/public/index.html +++ b/public/index.html @@ -3025,34 +3025,30 @@ - - - + + - - - + - - + + - - - + + + - - - + + diff --git a/public/scripts/extensions/caption/settings.html b/public/scripts/extensions/caption/settings.html index 6fdbcb5d6..5a92ba7d8 100644 --- a/public/scripts/extensions/caption/settings.html +++ b/public/scripts/extensions/caption/settings.html @@ -70,7 +70,6 @@ - diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 6f695ae3b..1f2fe73d5 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -4087,13 +4087,8 @@ async function onModelChange() { $('#openai_max_context').attr('max', max_2mil); } else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp')) { $('#openai_max_context').attr('max', max_1mil); - } else if (value.includes('gemini-1.0-pro-vision') || value === 'gemini-pro-vision') { - $('#openai_max_context').attr('max', max_16k); } else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') { $('#openai_max_context').attr('max', max_32k); - } else if (value === 'text-bison-001') { - $('#openai_max_context').attr('max', max_8k); - // The ultra endpoints are possibly dead: } else if (value.includes('gemini-1.0-ultra') || value === 'gemini-ultra') { $('#openai_max_context').attr('max', max_32k); } else { @@ -4776,7 +4771,6 @@ export function isImageInliningSupported() { 'gemini-1.5-pro-002', 'gemini-1.5-pro-exp-0801', 'gemini-1.5-pro-exp-0827', - 'gemini-pro-vision', 'claude-3', 'claude-3-5', 'gpt-4-turbo', diff --git a/src/constants.js b/src/constants.js index d2c40daeb..7fb98eef9 100644 --- a/src/constants.js +++ b/src/constants.js @@ -159,33 +159,6 @@ export const GEMINI_SAFETY = [ }, ]; -export const BISON_SAFETY = [ - { - category: 'HARM_CATEGORY_DEROGATORY', - threshold: 'BLOCK_NONE', - }, - { - category: 'HARM_CATEGORY_TOXICITY', - threshold: 'BLOCK_NONE', - }, - { - category: 'HARM_CATEGORY_VIOLENCE', - threshold: 'BLOCK_NONE', - }, - { - category: 'HARM_CATEGORY_SEXUAL', - threshold: 'BLOCK_NONE', - }, - { - category: 'HARM_CATEGORY_MEDICAL', - threshold: 'BLOCK_NONE', - }, - { - category: 'HARM_CATEGORY_DANGEROUS', - threshold: 'BLOCK_NONE', - }, -]; - export const CHAT_COMPLETION_SOURCES = { OPENAI: 'openai', WINDOWAI: 'windowai', diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index f30a5163d..78e3f1f63 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -6,7 +6,6 @@ import { jsonParser } from '../../express-common.js'; import { CHAT_COMPLETION_SOURCES, GEMINI_SAFETY, - BISON_SAFETY, OPENROUTER_HEADERS, } from '../../constants.js'; import { @@ -262,9 +261,7 @@ async function sendMakerSuiteRequest(request, response) { } const model = String(request.body.model); - const isGemini = model.includes('gemini'); - const isText = model.includes('text'); - const stream = Boolean(request.body.stream) && isGemini; + const stream = Boolean(request.body.stream); const generationConfig = { stopSequences: request.body.stop, @@ -301,39 +298,7 @@ async function sendMakerSuiteRequest(request, response) { return body; } - function getBisonBody() { - const prompt = isText - ? ({ text: convertTextCompletionPrompt(request.body.messages) }) - : ({ messages: convertGooglePrompt(request.body.messages, model).contents }); - - /** @type {any} Shut the lint up */ - const bisonBody = { - ...generationConfig, - safetySettings: BISON_SAFETY, - candidate_count: 1, // lewgacy spelling - prompt: prompt, - }; - - if (!isText) { - delete bisonBody.stopSequences; - delete bisonBody.maxOutputTokens; - delete bisonBody.safetySettings; - - if (Array.isArray(prompt.messages)) { - for (const msg of prompt.messages) { - msg.author = msg.role; - msg.content = msg.parts[0].text; - delete msg.parts; - delete msg.role; - } - } - } - - delete bisonBody.candidateCount; - return bisonBody; - } - - const body = isGemini ? getGeminiBody() : getBisonBody(); + const body = getGeminiBody(); console.log('Google AI Studio request:', body); try { @@ -343,10 +308,8 @@ async function sendMakerSuiteRequest(request, response) { controller.abort(); }); - const apiVersion = isGemini ? 'v1beta' : 'v1beta2'; - const responseType = isGemini - ? (stream ? 'streamGenerateContent' : 'generateContent') - : (isText ? 'generateText' : 'generateMessage'); + const apiVersion = 'v1beta'; + const responseType = (stream ? 'streamGenerateContent' : 'generateContent'); const generateResponse = await fetch(`${apiUrl.toString().replace(/\/$/, '')}/${apiVersion}/models/${model}:${responseType}?key=${apiKey}${stream ? '&alt=sse' : ''}`, { body: JSON.stringify(body), diff --git a/src/prompt-converters.js b/src/prompt-converters.js index c23dd6a69..47f5989cc 100644 --- a/src/prompt-converters.js +++ b/src/prompt-converters.js @@ -333,8 +333,6 @@ export function convertCohereMessages(messages, charName = '', userName = '') { * @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models */ export function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') { - // This is a 1x1 transparent PNG - const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII='; const visionSupportedModels = [ 'gemini-2.0-flash-exp', @@ -355,13 +353,6 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN 'gemini-1.5-pro-002', 'gemini-1.5-pro-exp-0801', 'gemini-1.5-pro-exp-0827', - 'gemini-1.0-pro-vision-latest', - 'gemini-pro-vision', - ]; - - const dummyRequiredModels = [ - 'gemini-1.0-pro-vision-latest', - 'gemini-pro-vision', ]; const isMultimodal = visionSupportedModels.includes(model); @@ -452,16 +443,6 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN } }); - // pro 1.5 doesn't require a dummy image to be attached, other vision models do - if (isMultimodal && dummyRequiredModels.includes(model) && !hasImage) { - contents[0].parts.push({ - inlineData: { - mimeType: 'image/png', - data: PNG_PIXEL, - }, - }); - } - return { contents: contents, system_instruction: system_instruction }; }