mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-15 11:40:44 +01:00
Merge pull request #1973 from kingbased/goog
Update Makersuite models + proper prompt conversion
This commit is contained in:
commit
82d50e553d
@ -2604,11 +2604,20 @@
|
|||||||
<div>
|
<div>
|
||||||
<h4 data-i18n="Google Model">Google Model</h4>
|
<h4 data-i18n="Google Model">Google Model</h4>
|
||||||
<select id="model_google_select">
|
<select id="model_google_select">
|
||||||
<option value="gemini-1.5-pro">Gemini 1.5 Pro</option>
|
<optgroup label="Latest">
|
||||||
|
<!-- Points to 1.0, no default 1.5 endpoint -->
|
||||||
<option value="gemini-pro">Gemini Pro</option>
|
<option value="gemini-pro">Gemini Pro</option>
|
||||||
<option value="gemini-pro-vision">Gemini Pro Vision</option>
|
<option value="gemini-pro-vision">Gemini Pro Vision</option>
|
||||||
|
<option value="gemini-ultra">Gemini Ultra</option>
|
||||||
<option value="text-bison-001">Bison Text</option>
|
<option value="text-bison-001">Bison Text</option>
|
||||||
<option value="chat-bison-001">Bison Chat</option>
|
<option value="chat-bison-001">Bison Chat</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Sub-versions">
|
||||||
|
<option value="gemini-1.5-pro-latest">Gemini 1.5 Pro</option>
|
||||||
|
<option value="gemini-1.0-pro-latest">Gemini 1.0 Pro</option>
|
||||||
|
<option value="gemini-1.0-pro-vision-latest">Gemini 1.0 Pro Vision</option>
|
||||||
|
<option value="gemini-1.0-ultra-latest">Gemini 1.0 Ultra</option>
|
||||||
|
</optgroup>
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
|
@ -3507,11 +3507,11 @@ async function onModelChange() {
|
|||||||
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
|
if (oai_settings.chat_completion_source == chat_completion_sources.MAKERSUITE) {
|
||||||
if (oai_settings.max_context_unlocked) {
|
if (oai_settings.max_context_unlocked) {
|
||||||
$('#openai_max_context').attr('max', unlocked_max);
|
$('#openai_max_context').attr('max', unlocked_max);
|
||||||
} else if (value === 'gemini-1.5-pro') {
|
} else if (value === 'gemini-1.5-pro-latest') {
|
||||||
$('#openai_max_context').attr('max', max_1mil);
|
$('#openai_max_context').attr('max', max_1mil);
|
||||||
} else if (value === 'gemini-pro') {
|
} else if (value === 'gemini-ultra' || value === 'gemini-1.0-pro-latest' || value === 'gemini-pro' || value === 'gemini-1.0-ultra-latest') {
|
||||||
$('#openai_max_context').attr('max', max_32k);
|
$('#openai_max_context').attr('max', max_32k);
|
||||||
} else if (value === 'gemini-pro-vision') {
|
} else if (value === 'gemini-1.0-pro-vision-latest' || value === 'gemini-pro-vision') {
|
||||||
$('#openai_max_context').attr('max', max_16k);
|
$('#openai_max_context').attr('max', max_16k);
|
||||||
} else {
|
} else {
|
||||||
$('#openai_max_context').attr('max', max_8k);
|
$('#openai_max_context').attr('max', max_8k);
|
||||||
@ -3939,21 +3939,26 @@ export function isImageInliningSupported() {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const gpt4v = 'gpt-4-vision';
|
|
||||||
const geminiProV = 'gemini-pro-vision';
|
|
||||||
const claude = 'claude-3';
|
|
||||||
|
|
||||||
if (!oai_settings.image_inlining) {
|
if (!oai_settings.image_inlining) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// gultra just isn't being offered as multimodal, thanks google.
|
||||||
|
const visionSupportedModels = [
|
||||||
|
'gpt-4-vision',
|
||||||
|
'gemini-1.0-pro-vision-latest',
|
||||||
|
'gemini-1.5-pro-latest',
|
||||||
|
'gemini-pro-vision',
|
||||||
|
'claude-3'
|
||||||
|
];
|
||||||
|
|
||||||
switch (oai_settings.chat_completion_source) {
|
switch (oai_settings.chat_completion_source) {
|
||||||
case chat_completion_sources.OPENAI:
|
case chat_completion_sources.OPENAI:
|
||||||
return oai_settings.openai_model.includes(gpt4v);
|
return visionSupportedModels.some(model => oai_settings.openai_model.includes(model));
|
||||||
case chat_completion_sources.MAKERSUITE:
|
case chat_completion_sources.MAKERSUITE:
|
||||||
return oai_settings.google_model.includes(geminiProV);
|
return visionSupportedModels.some(model => oai_settings.google_model.includes(model));
|
||||||
case chat_completion_sources.CLAUDE:
|
case chat_completion_sources.CLAUDE:
|
||||||
return oai_settings.claude_model.includes(claude);
|
return visionSupportedModels.some(model => oai_settings.claude_model.includes(model));
|
||||||
case chat_completion_sources.OPENROUTER:
|
case chat_completion_sources.OPENROUTER:
|
||||||
return !oai_settings.openrouter_force_instruct;
|
return !oai_settings.openrouter_force_instruct;
|
||||||
case chat_completion_sources.CUSTOM:
|
case chat_completion_sources.CUSTOM:
|
||||||
|
@ -192,53 +192,73 @@ function convertClaudeMessages(messages, prefillString, useSysPrompt, humanMsgFi
|
|||||||
function convertGooglePrompt(messages, model) {
|
function convertGooglePrompt(messages, model) {
|
||||||
// This is a 1x1 transparent PNG
|
// This is a 1x1 transparent PNG
|
||||||
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
|
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
|
||||||
|
|
||||||
|
const visionSupportedModels = [
|
||||||
|
'gemini-1.0-pro-vision-latest',
|
||||||
|
'gemini-1.5-pro-latest',
|
||||||
|
'gemini-pro-vision',
|
||||||
|
];
|
||||||
|
|
||||||
|
const isMultimodal = visionSupportedModels.includes(model);
|
||||||
|
let hasImage = false;
|
||||||
|
|
||||||
const contents = [];
|
const contents = [];
|
||||||
let lastRole = '';
|
messages.forEach((message, index) => {
|
||||||
let currentText = '';
|
// fix the roles
|
||||||
|
if (message.role === 'system') {
|
||||||
|
message.role = 'user';
|
||||||
|
} else if (message.role === 'assistant') {
|
||||||
|
message.role = 'model';
|
||||||
|
}
|
||||||
|
|
||||||
const isMultimodal = model === 'gemini-pro-vision';
|
// similar story as claude
|
||||||
|
if (message.name) {
|
||||||
|
if (Array.isArray(message.content)) {
|
||||||
|
message.content[0].text = `${message.name}: ${message.content[0].text}`;
|
||||||
|
} else {
|
||||||
|
message.content = `${message.name}: ${message.content}`;
|
||||||
|
}
|
||||||
|
delete message.name;
|
||||||
|
}
|
||||||
|
|
||||||
if (isMultimodal) {
|
//create the prompt parts
|
||||||
const combinedText = messages.map((message) => {
|
const parts = [];
|
||||||
const role = message.role === 'assistant' ? 'MODEL: ' : 'USER: ';
|
if (typeof message.content === 'string') {
|
||||||
return role + message.content;
|
parts.push({ text: message.content });
|
||||||
}).join('\n\n').trim();
|
} else if (Array.isArray(message.content)) {
|
||||||
|
message.content.forEach((part) => {
|
||||||
const imageEntry = messages.find((message) => message.content?.[1]?.image_url);
|
if (part.type === 'text') {
|
||||||
const imageData = imageEntry?.content?.[1]?.image_url?.data ?? PNG_PIXEL;
|
parts.push({ text: part.text });
|
||||||
contents.push({
|
} else if (part.type === 'image_url' && isMultimodal) {
|
||||||
parts: [
|
parts.push({
|
||||||
{ text: combinedText },
|
|
||||||
{
|
|
||||||
inlineData: {
|
inlineData: {
|
||||||
mimeType: 'image/png',
|
mimeType: 'image/png',
|
||||||
data: imageData,
|
data: part.image_url.url,
|
||||||
},
|
},
|
||||||
|
});
|
||||||
|
hasImage = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge consecutive messages with the same role
|
||||||
|
if (index > 0 && message.role === contents[contents.length - 1].role) {
|
||||||
|
contents[contents.length - 1].parts[0].text += '\n\n' + parts[0].text;
|
||||||
|
} else {
|
||||||
|
contents.push({
|
||||||
|
role: message.role,
|
||||||
|
parts: parts,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// pro 1.5 doesn't require a dummy image to be attached, other vision models do
|
||||||
|
if (isMultimodal && model !== 'gemini-1.5-pro-latest' && !hasImage) {
|
||||||
|
contents[0].parts.push({
|
||||||
|
inlineData: {
|
||||||
|
mimeType: 'image/png',
|
||||||
|
data: PNG_PIXEL,
|
||||||
},
|
},
|
||||||
],
|
|
||||||
role: 'user',
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
messages.forEach((message, index) => {
|
|
||||||
const role = message.role === 'assistant' ? 'model' : 'user';
|
|
||||||
if (lastRole === role) {
|
|
||||||
currentText += '\n\n' + message.content;
|
|
||||||
} else {
|
|
||||||
if (currentText !== '') {
|
|
||||||
contents.push({
|
|
||||||
parts: [{ text: currentText.trim() }],
|
|
||||||
role: lastRole,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
currentText = message.content;
|
|
||||||
lastRole = role;
|
|
||||||
}
|
|
||||||
if (index === messages.length - 1) {
|
|
||||||
contents.push({
|
|
||||||
parts: [{ text: currentText.trim() }],
|
|
||||||
role: lastRole,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user