Gemini thinking: Specify context size, system prompt and vision support

This commit is contained in:
Cohee 2024-12-27 22:39:26 +02:00
parent 0d8977d688
commit a82c05a8ac
4 changed files with 5 additions and 1 deletions

View File

@ -54,6 +54,7 @@
<option data-type="anthropic" value="claude-3-sonnet-20240229">claude-3-sonnet-20240229</option>
<option data-type="anthropic" value="claude-3-haiku-20240307">claude-3-haiku-20240307</option>
<option data-type="google" value="gemini-2.0-flash-exp">gemini-2.0-flash-exp</option>
<option data-type="google" value="gemini-2.0-flash-thinking-exp-1219">gemini-2.0-flash-thinking-exp-1219</option>
<option data-type="google" value="gemini-1.5-flash">gemini-1.5-flash</option>
<option data-type="google" value="gemini-1.5-flash-latest">gemini-1.5-flash-latest</option>
<option data-type="google" value="gemini-1.5-flash-001">gemini-1.5-flash-001</option>

View File

@ -4144,7 +4144,7 @@ async function onModelChange() {
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
if (oai_settings.max_context_unlocked) {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.includes('gemini-exp-1114') || value.includes('gemini-exp-1121')) {
} else if (value.includes('gemini-exp-1114') || value.includes('gemini-exp-1121') || value.includes('gemini-2.0-flash-thinking-exp-1219')) {
$('#openai_max_context').attr('max', max_32k);
} else if (value.includes('gemini-1.5-pro') || value.includes('gemini-exp-1206')) {
$('#openai_max_context').attr('max', max_2mil);
@ -4812,6 +4812,7 @@ export function isImageInliningSupported() {
// gultra just isn't being offered as multimodal, thanks google.
const visionSupportedModels = [
'gpt-4-vision',
'gemini-2.0-flash-thinking-exp-1219',
'gemini-2.0-flash-exp',
'gemini-1.5-flash',
'gemini-1.5-flash-latest',

View File

@ -294,6 +294,7 @@ async function sendMakerSuiteRequest(request, response) {
}
const should_use_system_prompt = (
model.includes('gemini-2.0-flash-thinking-exp') ||
model.includes('gemini-2.0-flash-exp') ||
model.includes('gemini-1.5-flash') ||
model.includes('gemini-1.5-pro') ||

View File

@ -360,6 +360,7 @@ export function convertCohereMessages(messages, names) {
*/
export function convertGooglePrompt(messages, model, useSysPrompt, names) {
const visionSupportedModels = [
'gemini-2.0-flash-thinking-exp-1219',
'gemini-2.0-flash-exp',
'gemini-1.5-flash',
'gemini-1.5-flash-latest',