Merge branch 'staging' into gemini_sys_prompt

This commit is contained in:
Cohee
2025-04-06 13:33:30 +03:00
5 changed files with 6 additions and 3 deletions

View File

@ -3140,6 +3140,7 @@
<option value="gemma-3-27b-it">Gemma 3 27B</option>
</optgroup>
<optgroup label="Subversions">
<option value="gemini-2.5-pro-preview-03-25">Gemini 2.5 Pro Preview 2025-03-25</option>
<option value="gemini-2.5-pro-exp-03-25">Gemini 2.5 Pro Experimental 2025-03-25</option>
<option value="gemini-2.0-pro-exp">Gemini 2.0 Pro Experimental</option>
<option value="gemini-2.0-pro-exp-02-05">Gemini 2.0 Pro Experimental 2025-02-05</option>

View File

@ -65,6 +65,7 @@
<option data-type="anthropic" value="claude-3-opus-20240229">claude-3-opus-20240229</option>
<option data-type="anthropic" value="claude-3-sonnet-20240229">claude-3-sonnet-20240229</option>
<option data-type="anthropic" value="claude-3-haiku-20240307">claude-3-haiku-20240307</option>
<option data-type="google" value="gemini-2.5-pro-preview-03-25">gemini-2.5-pro-preview-03-25</option>
<option data-type="google" value="gemini-2.5-pro-exp-03-25">gemini-2.5-pro-exp-03-25</option>
<option data-type="google" value="gemini-2.0-pro-exp">gemini-2.0-pro-exp</option>
<option data-type="google" value="gemini-2.0-pro-exp-02-05">gemini-2.0-pro-exp-02-05</option>

View File

@ -4327,7 +4327,7 @@ async function onModelChange() {
$('#openai_max_context').attr('max', max_32k);
} else if (value.includes('gemini-1.5-pro') || value.includes('gemini-exp-1206') || value.includes('gemini-2.0-pro')) {
$('#openai_max_context').attr('max', max_2mil);
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash') || value.includes('gemini-2.5-pro-exp-03-25')) {
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash') || value.includes('gemini-2.5-pro-exp-03-25') || value.includes('gemini-2.5-pro-preview-03-25')) {
$('#openai_max_context').attr('max', max_1mil);
} else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') {
$('#openai_max_context').attr('max', max_32k);
@ -4961,6 +4961,7 @@ export function isImageInliningSupported() {
const visionSupportedModels = [
'gpt-4-vision',
'gemini-2.5-pro-exp-03-25',
'gemini-2.5-pro-preview-03-25',
'gemini-2.0-pro-exp',
'gemini-2.0-pro-exp-02-05',
'gemini-2.0-flash-lite-preview',

View File

@ -16,7 +16,7 @@ router.post('/caption-image', async (request, response) => {
const base64Data = request.body.image.split(',')[1];
const apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.MAKERSUITE);
const apiUrl = new URL(request.body.reverse_proxy || API_MAKERSUITE);
const model = request.body.model || 'gemini-pro-vision';
const model = request.body.model || 'gemini-2.0-flash';
const url = `${apiUrl.origin}/v1beta/models/${model}:generateContent?key=${apiKey}`;
const body = {
contents: [{
@ -30,7 +30,6 @@ router.post('/caption-image', async (request, response) => {
}],
}],
safetySettings: GEMINI_SAFETY,
generationConfig: { maxOutputTokens: 1000 },
};
console.debug('Multimodal captioning request', model, body);

View File

@ -360,6 +360,7 @@ export function convertCohereMessages(messages, names) {
*/
export function convertGooglePrompt(messages, model, useSysPrompt, names) {
const visionSupportedModels = [
'gemini-2.5-pro-preview-03-25',
'gemini-2.5-pro-exp-03-25',
'gemini-2.0-pro-exp',
'gemini-2.0-pro-exp-02-05',