mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-18 21:20:39 +01:00
Add new Gemini thinking model and its alias, specify context size and vision support
This commit is contained in:
parent
9f1d431c99
commit
11882827c7
@ -3062,7 +3062,9 @@
|
||||
<option value="gemini-1.0-ultra-latest">Gemini 1.0 Ultra</option>
|
||||
</optgroup>
|
||||
<optgroup label="Subversions">
|
||||
<option value="gemini-2.0-flash-thinking-exp-1219">Gemini 2.0 Flash Thinking Experimental</option>
|
||||
<option value="gemini-2.0-flash-thinking-exp">Gemini 2.0 Flash Thinking Experimental</option>
|
||||
<option value="gemini-2.0-flash-thinking-exp-01-21">Gemini 2.0 Flash Thinking Experimental 2025-01-21</option>
|
||||
<option value="gemini-2.0-flash-thinking-exp-1219">Gemini 2.0 Flash Thinking Experimental 2024-12-19</option>
|
||||
<option value="gemini-2.0-flash-exp">Gemini 2.0 Flash Experimental</option>
|
||||
<option value="gemini-exp-1114">Gemini Experimental 2024-11-14</option>
|
||||
<option value="gemini-exp-1121">Gemini Experimental 2024-11-21</option>
|
||||
|
@ -54,6 +54,8 @@
|
||||
<option data-type="anthropic" value="claude-3-sonnet-20240229">claude-3-sonnet-20240229</option>
|
||||
<option data-type="anthropic" value="claude-3-haiku-20240307">claude-3-haiku-20240307</option>
|
||||
<option data-type="google" value="gemini-2.0-flash-exp">gemini-2.0-flash-exp</option>
|
||||
<option data-type="google" value="gemini-2.0-flash-thinking-exp">gemini-2.0-flash-thinking-exp</option>
|
||||
<option data-type="google" value="gemini-2.0-flash-thinking-exp-01-21">gemini-2.0-flash-thinking-exp-01-21</option>
|
||||
<option data-type="google" value="gemini-2.0-flash-thinking-exp-1219">gemini-2.0-flash-thinking-exp-1219</option>
|
||||
<option data-type="google" value="gemini-1.5-flash">gemini-1.5-flash</option>
|
||||
<option data-type="google" value="gemini-1.5-flash-latest">gemini-1.5-flash-latest</option>
|
||||
|
@ -4228,7 +4228,7 @@ async function onModelChange() {
|
||||
$('#openai_max_context').attr('max', max_32k);
|
||||
} else if (value.includes('gemini-1.5-pro') || value.includes('gemini-exp-1206')) {
|
||||
$('#openai_max_context').attr('max', max_2mil);
|
||||
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp')) {
|
||||
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp') || value.includes('gemini-2.0-flash-thinking-exp')) {
|
||||
$('#openai_max_context').attr('max', max_1mil);
|
||||
} else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') {
|
||||
$('#openai_max_context').attr('max', max_32k);
|
||||
|
@ -360,6 +360,8 @@ export function convertCohereMessages(messages, names) {
|
||||
*/
|
||||
export function convertGooglePrompt(messages, model, useSysPrompt, names) {
|
||||
const visionSupportedModels = [
|
||||
'gemini-2.0-flash-thinking-exp',
|
||||
'gemini-2.0-flash-thinking-exp-01-21',
|
||||
'gemini-2.0-flash-thinking-exp-1219',
|
||||
'gemini-2.0-flash-exp',
|
||||
'gemini-1.5-flash',
|
||||
|
Loading…
x
Reference in New Issue
Block a user