2023-09-16 18:48:06 +03:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Claude .
* @ param { object [ ] } messages Array of messages
2023-12-13 21:19:26 +02:00
* @ param { boolean } addAssistantPostfix Add Assistant postfix .
* @ param { string } addAssistantPrefill Add Assistant prefill after the assistant postfix .
* @ param { boolean } withSyspromptSupport Indicates if the Claude model supports the system prompt format .
* @ param { boolean } useSystemPrompt Indicates if the system prompt format should be used .
* @ param { string } addSysHumanMsg Add Human message between system prompt and assistant .
2023-09-16 18:48:06 +03:00
* @ returns { string } Prompt for Claude
* @ copyright Prompt Conversion script taken from RisuAI by kwaroran ( GPLv3 ) .
* /
2023-12-13 21:19:26 +02:00
function convertClaudePrompt ( messages , addAssistantPostfix , addAssistantPrefill , withSyspromptSupport , useSystemPrompt , addSysHumanMsg ) {
2023-11-24 17:58:20 -03:00
2023-12-13 21:19:26 +02:00
// Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it.
2023-12-15 13:10:53 +02:00
console . log ( JSON . stringify ( messages , null , 2 ) ) ;
if ( messages . length > 0 ) {
//messages[0].role = withSyspromptSupport && useSystemPrompt ? 'system' : 'user';
if ( addAssistantPostfix ) {
let role = 'assistant' ;
if ( messages . length > 0 && messages [ messages . length - 1 ] . role === 'assistant' || messages [ messages . length - 1 ] . content . includes ( 'Assistant:' ) ) {
role = 'system' ;
}
messages . push ( {
role : role ,
content : addAssistantPrefill || '' ,
} ) ;
2023-11-21 22:11:26 +02:00
}
2023-12-15 13:10:53 +02:00
let hasUser = false ;
const firstAssistantIndex = messages . findIndex ( ( message , i ) => {
if ( i > 0 && ( message . role === 'user' || message . content . includes ( 'Human:' ) ) ) {
hasUser = true ;
}
return message . role === 'assistant' && i > 0 ;
} ) ;
if ( withSyspromptSupport && useSystemPrompt ) {
messages [ 0 ] . role = 'system' ;
if ( ! hasUser ) {
messages . splice ( firstAssistantIndex , 0 , {
role : 'user' ,
content : addSysHumanMsg || 'Let\'s get started.' ,
} ) ;
}
} else {
messages [ 0 ] . role = 'user' ;
if ( firstAssistantIndex > 0 ) {
messages [ firstAssistantIndex - 1 ] . role = firstAssistantIndex - 1 !== 0 && messages [ firstAssistantIndex - 1 ] . role === 'user' ? 'FirstMsg' : messages [ firstAssistantIndex - 1 ] . role ;
}
}
}
console . log ( JSON . stringify ( messages , null , 2 ) ) ;
2023-12-13 21:19:26 +02:00
let setHumanMsg = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg : '\n\nHuman: Let\'s get started.' ;
let requestPrompt = messages . map ( ( v , i ) => {
// Claude doesn't support message names, so we'll just add them to the message content.
if ( v . name && v . role !== 'system' ) {
v . content = ` ${ v . name } : ${ v . content } ` ;
delete v . name ;
2023-11-24 17:58:20 -03:00
}
2023-11-21 22:11:26 +02:00
2023-09-16 18:48:06 +03:00
let prefix = '' ;
2023-12-13 21:19:26 +02:00
// Switches to system prompt format by adding empty prefix to the first message of the assistant, when the "use system prompt" checked and the model is 2.1.
// Otherwise, use the default message format by adding "Human: " prefix to the first message(compatible with all claude models including 2.1.)
2023-12-15 13:10:53 +02:00
/ * i f ( i = = = 0 ) {
2023-12-13 21:19:26 +02:00
prefix = withSyspromptSupport && useSystemPrompt ? '' : '\n\nHuman: ' ;
// For system prompt format. If there is no message with role "user" or prefix "Human:" change the first assistant's prefix(insert the human's message).
2023-12-15 13:10:53 +02:00
} else if ( i === firstAssistantIndex && ! hasUser && withSyspromptSupport && useSystemPrompt ) {
2023-12-13 21:19:26 +02:00
prefix = ` ${ setHumanMsg } \n \n Assistant: ` ;
//prefix = addSysHumanMsg ? '\n\nHuman: ' + addSysHumanMsg + '\n\nAssistant: ' : '\n\nHuman: Let\'s get started.\n\nAssistant: ';
// Merge two messages with "\n\nHuman: " prefixes into one before the first Assistant's message. Fix messages order for default claude format when(messages > Context Size).
2023-12-15 13:10:53 +02:00
} else * / i f ( i > 0 & & i = = = f i r s t A s s i s t a n t I n d e x - 1 & & v . r o l e = = = ' u s e r ' & & ! ( w i t h S y s p r o m p t S u p p o r t & & u s e S y s t e m P r o m p t ) ) {
2023-12-13 21:19:26 +02:00
prefix = '\n\nFirst message: ' ;
//Define role prefixes(role : prefix). Set the correct prefix according to the role/name.
} else {
prefix = {
'assistant' : '\n\nAssistant: ' ,
'user' : '\n\nHuman: ' ,
2023-12-15 13:10:53 +02:00
'FirstMsg' : '\n\nFirst message: ' ,
'system' : i === 0 ? '' : v . name === 'example_assistant' ? '\n\nA: ' : v . name === 'example_user' ? '\n\nH: ' : i === messages . length - 1 ? '\n' : '\n\n' ,
} [ v . role ] ? ? '\n\n' ;
2023-09-16 18:48:06 +03:00
}
return prefix + v . content ;
} ) . join ( '' ) ;
2023-12-13 21:19:26 +02:00
//Add the assistant suffix(if the option unchecked), add a prefill after it(if filled). Also Add the first human message before the assistant suffix(when using sysprompt and there are no other messages with the role 'Assistant').
2023-12-15 13:10:53 +02:00
//requestPrompt += addAssistantPostfix ? `${withSyspromptSupport && useSystemPrompt && firstAssistantIndex === -1 ? setHumanMsg : ''}\n\nAssistant: ${addAssistantPrefill ? addAssistantPrefill : ''}` : '';
requestPrompt += '' ;
2023-09-16 18:48:06 +03:00
return requestPrompt ;
}
2023-12-14 20:05:27 +02:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Google MakerSuite models .
* @ param { object [ ] } messages Array of messages
* @ param { string } model Model name
* @ returns { object [ ] } Prompt for Google MakerSuite models
* /
2023-12-15 02:01:42 +10:00
function convertGooglePrompt ( messages , model ) {
2023-12-14 21:21:37 +02:00
// This is a 1x1 transparent PNG
const PNG _PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=' ;
2023-12-14 15:49:50 +10:00
const contents = [ ] ;
let lastRole = '' ;
let currentText = '' ;
2023-12-15 01:28:54 +10:00
2023-12-15 02:01:42 +10:00
const isMultimodal = model === 'gemini-pro-vision' ;
2023-12-15 01:28:54 +10:00
if ( isMultimodal ) {
const combinedText = messages . map ( ( message ) => {
const role = message . role === 'assistant' ? 'MODEL: ' : 'USER: ' ;
return role + message . content ;
} ) . join ( '\n\n' ) . trim ( ) ;
2023-12-14 21:21:37 +02:00
const imageEntry = messages . find ( ( message ) => message . content ? . [ 1 ] ? . image _url ) ;
const imageData = imageEntry ? . content ? . [ 1 ] ? . image _url ? . data ? ? PNG _PIXEL ;
2023-12-15 01:28:54 +10:00
contents . push ( {
parts : [
{ text : combinedText } ,
{
inlineData : {
mimeType : 'image/png' ,
2023-12-14 21:21:37 +02:00
data : imageData ,
2023-12-15 01:28:54 +10:00
} ,
} ,
] ,
role : 'user' ,
} ) ;
} else {
messages . forEach ( ( message , index ) => {
const role = message . role === 'assistant' ? 'model' : 'user' ;
if ( lastRole === role ) {
currentText += '\n\n' + message . content ;
} else {
if ( currentText !== '' ) {
contents . push ( {
parts : [ { text : currentText . trim ( ) } ] ,
role : lastRole ,
} ) ;
}
currentText = message . content ;
lastRole = role ;
}
if ( index === messages . length - 1 ) {
2023-12-14 15:49:50 +10:00
contents . push ( {
parts : [ { text : currentText . trim ( ) } ] ,
role : lastRole ,
} ) ;
}
2023-12-15 01:28:54 +10:00
} ) ;
}
2023-12-14 15:49:50 +10:00
return contents ;
}
2023-12-14 16:00:17 +02:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Text Completion API .
* @ param { object [ ] } messages Array of messages
* @ returns { string } Prompt for Text Completion API
* /
function convertTextCompletionPrompt ( messages ) {
if ( typeof messages === 'string' ) {
return messages ;
}
const messageStrings = [ ] ;
messages . forEach ( m => {
if ( m . role === 'system' && m . name === undefined ) {
messageStrings . push ( 'System: ' + m . content ) ;
}
else if ( m . role === 'system' && m . name !== undefined ) {
messageStrings . push ( m . name + ': ' + m . content ) ;
}
else {
messageStrings . push ( m . role + ': ' + m . content ) ;
}
} ) ;
return messageStrings . join ( '\n' ) + '\nassistant:' ;
}
2023-09-16 18:48:06 +03:00
module . exports = {
convertClaudePrompt ,
2023-12-14 15:49:50 +10:00
convertGooglePrompt ,
2023-12-14 16:00:17 +02:00
convertTextCompletionPrompt ,
2023-12-02 21:11:06 +02:00
} ;