SillyTavern/src/endpoints/prompt-converters.js

186 lines
8.1 KiB
JavaScript
Raw Normal View History

/**
* Convert a prompt from the ChatML objects to the format used by Claude.
* @param {object[]} messages Array of messages
* @param {boolean} addAssistantPostfix Add Assistant postfix.
* @param {string} addAssistantPrefill Add Assistant prefill after the assistant postfix.
* @param {boolean} withSyspromptSupport Indicates if the Claude model supports the system prompt format.
* @param {boolean} useSystemPrompt Indicates if the system prompt format should be used.
* @param {string} addSysHumanMsg Add Human message between system prompt and assistant.
* @returns {string} Prompt for Claude
* @copyright Prompt Conversion script taken from RisuAI by kwaroran (GPLv3).
*/
function convertClaudePrompt(messages, addAssistantPostfix, addAssistantPrefill, withSyspromptSupport, useSystemPrompt, addSysHumanMsg) {
2023-11-24 17:58:20 -03:00
// Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it.
2023-12-15 13:10:53 +02:00
console.log(JSON.stringify(messages, null, 2));
if (messages.length > 0) {
//messages[0].role = withSyspromptSupport && useSystemPrompt ? 'system' : 'user';
if (addAssistantPostfix) {
let role = 'assistant';
if (messages.length > 0 && messages[messages.length - 1].role === 'assistant' || messages[messages.length - 1].content.includes('Assistant:')) {
role = 'system';
}
messages.push({
role: role,
content: addAssistantPrefill || '',
});
2023-11-21 22:11:26 +02:00
}
2023-12-15 13:10:53 +02:00
let hasUser = false;
const firstAssistantIndex = messages.findIndex((message, i) => {
if (i > 0 && (message.role === 'user' || message.content.includes('Human:'))) {
hasUser = true;
}
return message.role === 'assistant' && i > 0;
});
if (withSyspromptSupport && useSystemPrompt) {
messages[0].role = 'system';
if (!hasUser) {
messages.splice(firstAssistantIndex, 0, {
role: 'user',
content: addSysHumanMsg || 'Let\'s get started.',
});
}
} else {
messages[0].role = 'user';
if (firstAssistantIndex > 0) {
messages[firstAssistantIndex - 1].role = firstAssistantIndex - 1 !== 0 && messages[firstAssistantIndex - 1].role === 'user' ? 'FirstMsg' : messages[firstAssistantIndex - 1].role;
}
}
}
console.log(JSON.stringify(messages, null, 2));
let setHumanMsg = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg : '\n\nHuman: Let\'s get started.';
let requestPrompt = messages.map((v, i) => {
// Claude doesn't support message names, so we'll just add them to the message content.
if (v.name && v.role !== 'system') {
v.content = `${v.name}: ${v.content}`;
delete v.name;
2023-11-24 17:58:20 -03:00
}
2023-11-21 22:11:26 +02:00
let prefix = '';
// Switches to system prompt format by adding empty prefix to the first message of the assistant, when the "use system prompt" checked and the model is 2.1.
// Otherwise, use the default message format by adding "Human: " prefix to the first message(compatible with all claude models including 2.1.)
2023-12-15 13:10:53 +02:00
/* if (i === 0) {
prefix = withSyspromptSupport && useSystemPrompt ? '' : '\n\nHuman: ';
// For system prompt format. If there is no message with role "user" or prefix "Human:" change the first assistant's prefix(insert the human's message).
2023-12-15 13:10:53 +02:00
} else if (i === firstAssistantIndex && !hasUser && withSyspromptSupport && useSystemPrompt) {
prefix = `${setHumanMsg}\n\nAssistant: `;
//prefix = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg + '\n\nAssistant: ' : '\n\nHuman: Let\'s get started.\n\nAssistant: ';
// Merge two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. Fix messages order for default claude format when(messages > Context Size).
2023-12-15 13:10:53 +02:00
} else*/ if (i > 0 && i === firstAssistantIndex - 1 && v.role === 'user' && !(withSyspromptSupport && useSystemPrompt)) {
prefix = '\n\nFirst message: ';
//Define role prefixes(role : prefix). Set the correct prefix according to the role/name.
} else {
prefix = {
'assistant': '\n\nAssistant: ',
'user': '\n\nHuman: ',
2023-12-15 13:10:53 +02:00
'FirstMsg': '\n\nFirst message: ',
'system': i === 0 ? '' : v.name === 'example_assistant' ? '\n\nA: ' : v.name === 'example_user' ? '\n\nH: ' : i === messages.length - 1 ? '\n' : '\n\n',
}[v.role] ?? '\n\n';
}
return prefix + v.content;
}).join('');
//Add the assistant suffix(if the option unchecked), add a prefill after it(if filled). Also Add the first human message before the assistant suffix(when using sysprompt and there are no other messages with the role 'Assistant').
2023-12-15 13:10:53 +02:00
//requestPrompt += addAssistantPostfix ? `${withSyspromptSupport && useSystemPrompt && firstAssistantIndex === -1 ? setHumanMsg : ''}\n\nAssistant: ${addAssistantPrefill ? addAssistantPrefill : ''}` : '';
requestPrompt += '';
return requestPrompt;
}
2023-12-14 20:05:27 +02:00
/**
* Convert a prompt from the ChatML objects to the format used by Google MakerSuite models.
* @param {object[]} messages Array of messages
* @param {string} model Model name
* @returns {object[]} Prompt for Google MakerSuite models
*/
2023-12-15 02:01:42 +10:00
function convertGooglePrompt(messages, model) {
// This is a 1x1 transparent PNG
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
const contents = [];
let lastRole = '';
let currentText = '';
2023-12-15 01:28:54 +10:00
2023-12-15 02:01:42 +10:00
const isMultimodal = model === 'gemini-pro-vision';
2023-12-15 01:28:54 +10:00
if (isMultimodal) {
const combinedText = messages.map((message) => {
const role = message.role === 'assistant' ? 'MODEL: ' : 'USER: ';
return role + message.content;
}).join('\n\n').trim();
const imageEntry = messages.find((message) => message.content?.[1]?.image_url);
const imageData = imageEntry?.content?.[1]?.image_url?.data ?? PNG_PIXEL;
2023-12-15 01:28:54 +10:00
contents.push({
parts: [
{ text: combinedText },
{
inlineData: {
mimeType: 'image/png',
data: imageData,
2023-12-15 01:28:54 +10:00
},
},
],
role: 'user',
});
} else {
messages.forEach((message, index) => {
const role = message.role === 'assistant' ? 'model' : 'user';
if (lastRole === role) {
currentText += '\n\n' + message.content;
} else {
if (currentText !== '') {
contents.push({
parts: [{ text: currentText.trim() }],
role: lastRole,
});
}
currentText = message.content;
lastRole = role;
}
if (index === messages.length - 1) {
contents.push({
parts: [{ text: currentText.trim() }],
role: lastRole,
});
}
2023-12-15 01:28:54 +10:00
});
}
return contents;
}
/**
* Convert a prompt from the ChatML objects to the format used by Text Completion API.
* @param {object[]} messages Array of messages
* @returns {string} Prompt for Text Completion API
*/
function convertTextCompletionPrompt(messages) {
if (typeof messages === 'string') {
return messages;
}
const messageStrings = [];
messages.forEach(m => {
if (m.role === 'system' && m.name === undefined) {
messageStrings.push('System: ' + m.content);
}
else if (m.role === 'system' && m.name !== undefined) {
messageStrings.push(m.name + ': ' + m.content);
}
else {
messageStrings.push(m.role + ': ' + m.content);
}
});
return messageStrings.join('\n') + '\nassistant:';
}
module.exports = {
convertClaudePrompt,
convertGooglePrompt,
convertTextCompletionPrompt,
2023-12-02 21:11:06 +02:00
};