mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge pull request #3190 from M0ch0/staging
Update for Latest Google AI Studio
This commit is contained in:
@@ -3025,34 +3025,30 @@
|
|||||||
<optgroup label="Primary">
|
<optgroup label="Primary">
|
||||||
<option value="gemini-1.5-pro">Gemini 1.5 Pro</option>
|
<option value="gemini-1.5-pro">Gemini 1.5 Pro</option>
|
||||||
<option value="gemini-1.5-flash">Gemini 1.5 Flash</option>
|
<option value="gemini-1.5-flash">Gemini 1.5 Flash</option>
|
||||||
<option value="gemini-1.0-pro">Gemini 1.0 Pro</option>
|
<option value="gemini-1.0-pro">Gemini 1.0 Pro (Deprecated)</option>
|
||||||
<option value="gemini-pro">Gemini Pro (1.0)</option>
|
<option value="gemini-pro">Gemini Pro (1.0) (Deprecated)</option>
|
||||||
<option value="gemini-pro-vision">Gemini Pro Vision (1.0)</option>
|
|
||||||
<option value="gemini-ultra">Gemini Ultra (1.0)</option>
|
<option value="gemini-ultra">Gemini Ultra (1.0)</option>
|
||||||
<option value="gemini-1.0-ultra-latest">Gemini 1.0 Ultra</option>
|
<option value="gemini-1.0-ultra-latest">Gemini 1.0 Ultra</option>
|
||||||
<option value="text-bison-001">PaLM 2 (Legacy)</option>
|
|
||||||
<option value="chat-bison-001">PaLM 2 Chat (Legacy)</option>
|
|
||||||
</optgroup>
|
</optgroup>
|
||||||
<optgroup label="Subversions">
|
<optgroup label="Subversions">
|
||||||
<option value="gemini-2.0-flash-exp">Gemini 2.0 Flash Experiment</option>
|
<option value="gemini-2.0-flash-exp">Gemini 2.0 Flash Experimental</option>
|
||||||
<option value="gemini-exp-1114">Gemini Experimental 2024-11-14</option>
|
<option value="gemini-exp-1114">Gemini Experimental 2024-11-14</option>
|
||||||
<option value="gemini-exp-1121">Gemini Experimental 2024-11-21</option>
|
<option value="gemini-exp-1121">Gemini Experimental 2024-11-21</option>
|
||||||
<option value="gemini-exp-1206">Gemini Experimental 2024-12-06</option>
|
<option value="gemini-exp-1206">Gemini Experimental 2024-12-06</option>
|
||||||
<option value="gemini-1.5-pro-exp-0801">Gemini 1.5 Pro Experiment 2024-08-01</option>
|
<option value="gemini-1.5-pro-exp-0801">Gemini 1.5 Pro Experimental 2024-08-01</option>
|
||||||
<option value="gemini-1.5-pro-exp-0827">Gemini 1.5 Pro Experiment 2024-08-27</option>
|
<option value="gemini-1.5-pro-exp-0827">Gemini 1.5 Pro Experimental 2024-08-27</option>
|
||||||
<option value="gemini-1.5-pro-latest">Gemini 1.5 Pro [latest]</option>
|
<option value="gemini-1.5-pro-latest">Gemini 1.5 Pro [latest]</option>
|
||||||
<option value="gemini-1.5-pro-001">Gemini 1.5 Pro [001]</option>
|
<option value="gemini-1.5-pro-001">Gemini 1.5 Pro [001]</option>
|
||||||
<option value="gemini-1.5-pro-002">Gemini 1.5 Pro [002]</option>
|
<option value="gemini-1.5-pro-002">Gemini 1.5 Pro [002]</option>
|
||||||
<option value="gemini-1.5-flash-8b">Gemini 1.5 Flash 8B</option>
|
<option value="gemini-1.5-flash-8b">Gemini 1.5 Flash 8B</option>
|
||||||
<option value="gemini-1.5-flash-exp-0827">Gemini 1.5 Flash Experiment 2024-08-27</option>
|
<option value="gemini-1.5-flash-exp-0827">Gemini 1.5 Flash Experimental 2024-08-27</option>
|
||||||
<option value="gemini-1.5-flash-8b-exp-0827">Gemini 1.5 Flash 8B Experiment 2024-08-27</option>
|
<option value="gemini-1.5-flash-8b-exp-0827">Gemini 1.5 Flash 8B Experimental 2024-08-27</option>
|
||||||
<option value="gemini-1.5-flash-8b-exp-0924">Gemini 1.5 Flash 8B Experiment 2024-09-24</option>
|
<option value="gemini-1.5-flash-8b-exp-0924">Gemini 1.5 Flash 8B Experimental 2024-09-24</option>
|
||||||
<option value="gemini-1.5-flash-latest">Gemini 1.5 Flash [latest]</option>
|
<option value="gemini-1.5-flash-latest">Gemini 1.5 Flash [latest]</option>
|
||||||
<option value="gemini-1.5-flash-001">Gemini 1.5 Flash [001]</option>
|
<option value="gemini-1.5-flash-001">Gemini 1.5 Flash [001]</option>
|
||||||
<option value="gemini-1.5-flash-002">Gemini 1.5 Flash [002]</option>
|
<option value="gemini-1.5-flash-002">Gemini 1.5 Flash [002]</option>
|
||||||
<option value="gemini-1.0-pro-latest">Gemini 1.0 Pro [latest]</option>
|
<option value="gemini-1.0-pro-latest">Gemini 1.0 Pro [latest] (Deprecated)</option>
|
||||||
<option value="gemini-1.0-pro-001">Gemini 1.0 Pro (Tuning) [001]</option>
|
<option value="gemini-1.0-pro-001">Gemini 1.0 Pro (Tuning) [001] (Deprecated)</option>
|
||||||
<option value="gemini-1.0-pro-vision-latest">Gemini 1.0 Pro Vision [latest]</option>
|
|
||||||
</optgroup>
|
</optgroup>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -70,7 +70,6 @@
|
|||||||
<option data-type="google" value="gemini-1.5-pro-002">gemini-1.5-pro-002</option>
|
<option data-type="google" value="gemini-1.5-pro-002">gemini-1.5-pro-002</option>
|
||||||
<option data-type="google" value="gemini-1.5-pro-exp-0801">gemini-1.5-pro-exp-0801</option>
|
<option data-type="google" value="gemini-1.5-pro-exp-0801">gemini-1.5-pro-exp-0801</option>
|
||||||
<option data-type="google" value="gemini-1.5-pro-exp-0827">gemini-1.5-pro-exp-0827</option>
|
<option data-type="google" value="gemini-1.5-pro-exp-0827">gemini-1.5-pro-exp-0827</option>
|
||||||
<option data-type="google" value="gemini-pro-vision">gemini-pro-vision</option>
|
|
||||||
<option data-type="groq" value="llama-3.2-11b-vision-preview">llama-3.2-11b-vision-preview</option>
|
<option data-type="groq" value="llama-3.2-11b-vision-preview">llama-3.2-11b-vision-preview</option>
|
||||||
<option data-type="groq" value="llama-3.2-90b-vision-preview">llama-3.2-90b-vision-preview</option>
|
<option data-type="groq" value="llama-3.2-90b-vision-preview">llama-3.2-90b-vision-preview</option>
|
||||||
<option data-type="groq" value="llava-v1.5-7b-4096-preview">llava-v1.5-7b-4096-preview</option>
|
<option data-type="groq" value="llava-v1.5-7b-4096-preview">llava-v1.5-7b-4096-preview</option>
|
||||||
|
@@ -4087,13 +4087,8 @@ async function onModelChange() {
|
|||||||
$('#openai_max_context').attr('max', max_2mil);
|
$('#openai_max_context').attr('max', max_2mil);
|
||||||
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp')) {
|
} else if (value.includes('gemini-1.5-flash') || value.includes('gemini-2.0-flash-exp')) {
|
||||||
$('#openai_max_context').attr('max', max_1mil);
|
$('#openai_max_context').attr('max', max_1mil);
|
||||||
} else if (value.includes('gemini-1.0-pro-vision') || value === 'gemini-pro-vision') {
|
|
||||||
$('#openai_max_context').attr('max', max_16k);
|
|
||||||
} else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') {
|
} else if (value.includes('gemini-1.0-pro') || value === 'gemini-pro') {
|
||||||
$('#openai_max_context').attr('max', max_32k);
|
$('#openai_max_context').attr('max', max_32k);
|
||||||
} else if (value === 'text-bison-001') {
|
|
||||||
$('#openai_max_context').attr('max', max_8k);
|
|
||||||
// The ultra endpoints are possibly dead:
|
|
||||||
} else if (value.includes('gemini-1.0-ultra') || value === 'gemini-ultra') {
|
} else if (value.includes('gemini-1.0-ultra') || value === 'gemini-ultra') {
|
||||||
$('#openai_max_context').attr('max', max_32k);
|
$('#openai_max_context').attr('max', max_32k);
|
||||||
} else {
|
} else {
|
||||||
@@ -4776,7 +4771,6 @@ export function isImageInliningSupported() {
|
|||||||
'gemini-1.5-pro-002',
|
'gemini-1.5-pro-002',
|
||||||
'gemini-1.5-pro-exp-0801',
|
'gemini-1.5-pro-exp-0801',
|
||||||
'gemini-1.5-pro-exp-0827',
|
'gemini-1.5-pro-exp-0827',
|
||||||
'gemini-pro-vision',
|
|
||||||
'claude-3',
|
'claude-3',
|
||||||
'claude-3-5',
|
'claude-3-5',
|
||||||
'gpt-4-turbo',
|
'gpt-4-turbo',
|
||||||
|
@@ -159,33 +159,6 @@ export const GEMINI_SAFETY = [
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
export const BISON_SAFETY = [
|
|
||||||
{
|
|
||||||
category: 'HARM_CATEGORY_DEROGATORY',
|
|
||||||
threshold: 'BLOCK_NONE',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
category: 'HARM_CATEGORY_TOXICITY',
|
|
||||||
threshold: 'BLOCK_NONE',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
category: 'HARM_CATEGORY_VIOLENCE',
|
|
||||||
threshold: 'BLOCK_NONE',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
category: 'HARM_CATEGORY_SEXUAL',
|
|
||||||
threshold: 'BLOCK_NONE',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
category: 'HARM_CATEGORY_MEDICAL',
|
|
||||||
threshold: 'BLOCK_NONE',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
category: 'HARM_CATEGORY_DANGEROUS',
|
|
||||||
threshold: 'BLOCK_NONE',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const CHAT_COMPLETION_SOURCES = {
|
export const CHAT_COMPLETION_SOURCES = {
|
||||||
OPENAI: 'openai',
|
OPENAI: 'openai',
|
||||||
WINDOWAI: 'windowai',
|
WINDOWAI: 'windowai',
|
||||||
|
@@ -6,7 +6,6 @@ import { jsonParser } from '../../express-common.js';
|
|||||||
import {
|
import {
|
||||||
CHAT_COMPLETION_SOURCES,
|
CHAT_COMPLETION_SOURCES,
|
||||||
GEMINI_SAFETY,
|
GEMINI_SAFETY,
|
||||||
BISON_SAFETY,
|
|
||||||
OPENROUTER_HEADERS,
|
OPENROUTER_HEADERS,
|
||||||
} from '../../constants.js';
|
} from '../../constants.js';
|
||||||
import {
|
import {
|
||||||
@@ -262,9 +261,7 @@ async function sendMakerSuiteRequest(request, response) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const model = String(request.body.model);
|
const model = String(request.body.model);
|
||||||
const isGemini = model.includes('gemini');
|
const stream = Boolean(request.body.stream);
|
||||||
const isText = model.includes('text');
|
|
||||||
const stream = Boolean(request.body.stream) && isGemini;
|
|
||||||
|
|
||||||
const generationConfig = {
|
const generationConfig = {
|
||||||
stopSequences: request.body.stop,
|
stopSequences: request.body.stop,
|
||||||
@@ -301,39 +298,7 @@ async function sendMakerSuiteRequest(request, response) {
|
|||||||
return body;
|
return body;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getBisonBody() {
|
const body = getGeminiBody();
|
||||||
const prompt = isText
|
|
||||||
? ({ text: convertTextCompletionPrompt(request.body.messages) })
|
|
||||||
: ({ messages: convertGooglePrompt(request.body.messages, model).contents });
|
|
||||||
|
|
||||||
/** @type {any} Shut the lint up */
|
|
||||||
const bisonBody = {
|
|
||||||
...generationConfig,
|
|
||||||
safetySettings: BISON_SAFETY,
|
|
||||||
candidate_count: 1, // lewgacy spelling
|
|
||||||
prompt: prompt,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!isText) {
|
|
||||||
delete bisonBody.stopSequences;
|
|
||||||
delete bisonBody.maxOutputTokens;
|
|
||||||
delete bisonBody.safetySettings;
|
|
||||||
|
|
||||||
if (Array.isArray(prompt.messages)) {
|
|
||||||
for (const msg of prompt.messages) {
|
|
||||||
msg.author = msg.role;
|
|
||||||
msg.content = msg.parts[0].text;
|
|
||||||
delete msg.parts;
|
|
||||||
delete msg.role;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
delete bisonBody.candidateCount;
|
|
||||||
return bisonBody;
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = isGemini ? getGeminiBody() : getBisonBody();
|
|
||||||
console.log('Google AI Studio request:', body);
|
console.log('Google AI Studio request:', body);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -343,10 +308,8 @@ async function sendMakerSuiteRequest(request, response) {
|
|||||||
controller.abort();
|
controller.abort();
|
||||||
});
|
});
|
||||||
|
|
||||||
const apiVersion = isGemini ? 'v1beta' : 'v1beta2';
|
const apiVersion = 'v1beta';
|
||||||
const responseType = isGemini
|
const responseType = (stream ? 'streamGenerateContent' : 'generateContent');
|
||||||
? (stream ? 'streamGenerateContent' : 'generateContent')
|
|
||||||
: (isText ? 'generateText' : 'generateMessage');
|
|
||||||
|
|
||||||
const generateResponse = await fetch(`${apiUrl.toString().replace(/\/$/, '')}/${apiVersion}/models/${model}:${responseType}?key=${apiKey}${stream ? '&alt=sse' : ''}`, {
|
const generateResponse = await fetch(`${apiUrl.toString().replace(/\/$/, '')}/${apiVersion}/models/${model}:${responseType}?key=${apiKey}${stream ? '&alt=sse' : ''}`, {
|
||||||
body: JSON.stringify(body),
|
body: JSON.stringify(body),
|
||||||
|
@@ -333,8 +333,6 @@ export function convertCohereMessages(messages, charName = '', userName = '') {
|
|||||||
* @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models
|
* @returns {{contents: *[], system_instruction: {parts: {text: string}}}} Prompt for Google MakerSuite models
|
||||||
*/
|
*/
|
||||||
export function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') {
|
export function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '', userName = '') {
|
||||||
// This is a 1x1 transparent PNG
|
|
||||||
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
|
|
||||||
|
|
||||||
const visionSupportedModels = [
|
const visionSupportedModels = [
|
||||||
'gemini-2.0-flash-exp',
|
'gemini-2.0-flash-exp',
|
||||||
@@ -355,13 +353,6 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN
|
|||||||
'gemini-1.5-pro-002',
|
'gemini-1.5-pro-002',
|
||||||
'gemini-1.5-pro-exp-0801',
|
'gemini-1.5-pro-exp-0801',
|
||||||
'gemini-1.5-pro-exp-0827',
|
'gemini-1.5-pro-exp-0827',
|
||||||
'gemini-1.0-pro-vision-latest',
|
|
||||||
'gemini-pro-vision',
|
|
||||||
];
|
|
||||||
|
|
||||||
const dummyRequiredModels = [
|
|
||||||
'gemini-1.0-pro-vision-latest',
|
|
||||||
'gemini-pro-vision',
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const isMultimodal = visionSupportedModels.includes(model);
|
const isMultimodal = visionSupportedModels.includes(model);
|
||||||
@@ -452,16 +443,6 @@ export function convertGooglePrompt(messages, model, useSysPrompt = false, charN
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// pro 1.5 doesn't require a dummy image to be attached, other vision models do
|
|
||||||
if (isMultimodal && dummyRequiredModels.includes(model) && !hasImage) {
|
|
||||||
contents[0].parts.push({
|
|
||||||
inlineData: {
|
|
||||||
mimeType: 'image/png',
|
|
||||||
data: PNG_PIXEL,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return { contents: contents, system_instruction: system_instruction };
|
return { contents: contents, system_instruction: system_instruction };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user