2024-04-01 23:20:17 +02:00
require ( './polyfill.js' ) ;
2023-09-16 17:48:06 +02:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Claude .
* @ param { object [ ] } messages Array of messages
2023-12-13 20:19:26 +01:00
* @ param { boolean } addAssistantPostfix Add Assistant postfix .
* @ param { string } addAssistantPrefill Add Assistant prefill after the assistant postfix .
2023-12-19 18:44:52 +01:00
* @ param { boolean } withSysPromptSupport Indicates if the Claude model supports the system prompt format .
2023-12-13 20:19:26 +01:00
* @ param { boolean } useSystemPrompt Indicates if the system prompt format should be used .
2023-12-22 16:04:58 +01:00
* @ param { boolean } excludePrefixes Exlude Human / Assistant prefixes .
2023-12-13 20:19:26 +01:00
* @ param { string } addSysHumanMsg Add Human message between system prompt and assistant .
2023-09-16 17:48:06 +02:00
* @ returns { string } Prompt for Claude
* @ copyright Prompt Conversion script taken from RisuAI by kwaroran ( GPLv3 ) .
* /
2023-12-22 16:04:58 +01:00
function convertClaudePrompt ( messages , addAssistantPostfix , addAssistantPrefill , withSysPromptSupport , useSystemPrompt , addSysHumanMsg , excludePrefixes ) {
2023-11-24 21:58:20 +01:00
2023-12-15 19:15:48 +01:00
//Prepare messages for claude.
2023-12-22 23:37:28 +01:00
//When 'Exclude Human/Assistant prefixes' checked, setting messages role to the 'system'(last message is exception).
2023-12-15 12:10:53 +01:00
if ( messages . length > 0 ) {
2023-12-22 16:04:58 +01:00
if ( excludePrefixes ) {
2023-12-22 23:25:48 +01:00
messages . slice ( 0 , - 1 ) . forEach ( message => message . role = 'system' ) ;
2023-12-22 16:04:58 +01:00
} else {
messages [ 0 ] . role = 'system' ;
}
2023-12-15 19:15:48 +01:00
//Add the assistant's message to the end of messages.
2023-12-15 12:10:53 +01:00
if ( addAssistantPostfix ) {
messages . push ( {
2023-12-15 19:15:48 +01:00
role : 'assistant' ,
2023-12-16 13:12:06 +01:00
content : addAssistantPrefill || '' ,
2023-12-15 12:10:53 +01:00
} ) ;
2023-11-21 21:11:26 +01:00
}
2023-12-15 19:15:48 +01:00
// Find the index of the first message with an assistant role and check for a "'user' role/Human:" before it.
2023-12-15 12:10:53 +01:00
let hasUser = false ;
const firstAssistantIndex = messages . findIndex ( ( message , i ) => {
2023-12-16 13:12:06 +01:00
if ( i >= 0 && ( message . role === 'user' || message . content . includes ( '\n\nHuman: ' ) ) ) {
2023-12-15 12:10:53 +01:00
hasUser = true ;
}
return message . role === 'assistant' && i > 0 ;
} ) ;
2023-12-22 23:25:48 +01:00
// When 2.1+ and 'Use system prompt' checked, switches to the system prompt format by setting the first message's role to the 'system'.
2023-12-16 13:12:06 +01:00
// Inserts the human's message before the first the assistant one, if there are no such message or prefix found.
2023-12-19 18:44:52 +01:00
if ( withSysPromptSupport && useSystemPrompt ) {
2023-12-15 12:10:53 +01:00
messages [ 0 ] . role = 'system' ;
2023-12-18 01:25:17 +01:00
if ( firstAssistantIndex > 0 && addSysHumanMsg && ! hasUser ) {
2023-12-15 12:10:53 +01:00
messages . splice ( firstAssistantIndex , 0 , {
role : 'user' ,
2023-12-18 01:25:17 +01:00
content : addSysHumanMsg ,
2023-12-15 12:10:53 +01:00
} ) ;
}
} else {
2023-12-15 19:15:48 +01:00
// Otherwise, use the default message format by setting the first message's role to 'user'(compatible with all claude models including 2.1.)
2023-12-15 12:10:53 +01:00
messages [ 0 ] . role = 'user' ;
2023-12-15 19:58:03 +01:00
// Fix messages order for default message format when(messages > Context Size) by merging two messages with "\n\nHuman: " prefixes into one, before the first Assistant's message.
2023-12-22 23:37:28 +01:00
if ( firstAssistantIndex > 0 && ! excludePrefixes ) {
2023-12-15 19:58:03 +01:00
messages [ firstAssistantIndex - 1 ] . role = firstAssistantIndex - 1 !== 0 && messages [ firstAssistantIndex - 1 ] . role === 'user' ? 'FixHumMsg' : messages [ firstAssistantIndex - 1 ] . role ;
2023-12-15 12:10:53 +01:00
}
}
}
2023-12-18 01:32:25 +01:00
// Convert messages to the prompt.
2023-12-13 20:19:26 +01:00
let requestPrompt = messages . map ( ( v , i ) => {
2023-12-26 12:45:39 +01:00
// Set prefix according to the role. Also, when "Exclude Human/Assistant prefixes" is checked, names are added via the system prefix.
2023-12-15 19:58:03 +01:00
let prefix = {
2023-12-15 19:15:48 +01:00
'assistant' : '\n\nAssistant: ' ,
'user' : '\n\nHuman: ' ,
2023-12-22 23:25:48 +01:00
'system' : i === 0 ? '' : v . name === 'example_assistant' ? '\n\nA: ' : v . name === 'example_user' ? '\n\nH: ' : excludePrefixes && v . name ? ` \n \n ${ v . name } : ` : '\n\n' ,
2023-12-15 19:58:03 +01:00
'FixHumMsg' : '\n\nFirst message: ' ,
2023-12-18 01:25:17 +01:00
} [ v . role ] ? ? '' ;
// Claude doesn't support message names, so we'll just add them to the message content.
return ` ${ prefix } ${ v . name && v . role !== 'system' ? ` ${ v . name } : ` : '' } ${ v . content } ` ;
2023-09-16 17:48:06 +02:00
} ) . join ( '' ) ;
return requestPrompt ;
}
2023-12-30 20:04:37 +01:00
/ * *
* Convert ChatML objects into working with Anthropic ' s new Messaging API .
* @ param { object [ ] } messages Array of messages
2024-03-04 19:40:19 +01:00
* @ param { string } prefillString User determined prefill string
* @ param { boolean } useSysPrompt See if we want to use a system prompt
* @ param { string } humanMsgFix Add Human message between system prompt and assistant .
2024-03-08 07:31:36 +01:00
* @ param { string } charName Character name
* @ param { string } userName User name
2023-12-30 20:04:37 +01:00
* /
2024-03-08 07:31:36 +01:00
function convertClaudeMessages ( messages , prefillString , useSysPrompt , humanMsgFix , charName = '' , userName = '' ) {
2023-12-30 20:04:37 +01:00
let systemPrompt = '' ;
2024-03-04 19:40:19 +01:00
if ( useSysPrompt ) {
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
let i ;
2024-03-04 20:01:36 +01:00
for ( i = 0 ; i < messages . length ; i ++ ) {
if ( messages [ i ] . role !== 'system' ) {
2024-03-04 19:40:19 +01:00
break ;
}
2024-03-27 22:37:28 +01:00
// Append example names if not already done by the frontend (e.g. for group chats).
2024-03-08 07:31:36 +01:00
if ( userName && messages [ i ] . name === 'example_user' ) {
2024-03-27 22:37:28 +01:00
if ( ! messages [ i ] . content . startsWith ( ` ${ userName } : ` ) ) {
messages [ i ] . content = ` ${ userName } : ${ messages [ i ] . content } ` ;
}
2024-03-08 07:31:36 +01:00
}
if ( charName && messages [ i ] . name === 'example_assistant' ) {
2024-03-27 22:37:28 +01:00
if ( ! messages [ i ] . content . startsWith ( ` ${ charName } : ` ) ) {
messages [ i ] . content = ` ${ charName } : ${ messages [ i ] . content } ` ;
}
2024-03-08 07:31:36 +01:00
}
2024-03-04 20:01:36 +01:00
systemPrompt += ` ${ messages [ i ] . content } \n \n ` ;
2023-12-30 20:04:37 +01:00
}
2024-03-04 20:01:36 +01:00
messages . splice ( 0 , i ) ;
2023-12-30 20:04:37 +01:00
2024-03-04 19:40:19 +01:00
// Check if the first message in the array is of type user, if not, interject with humanMsgFix or a blank message.
2024-03-12 00:17:35 +01:00
// Also prevents erroring out if the messages array is empty.
if ( messages . length === 0 || ( messages . length > 0 && messages [ 0 ] . role !== 'user' ) ) {
2024-03-04 20:01:36 +01:00
messages . unshift ( {
2024-03-04 19:40:19 +01:00
role : 'user' ,
2024-03-05 00:57:37 +01:00
content : humanMsgFix || '[Start a new chat]' ,
2024-03-04 19:40:19 +01:00
} ) ;
}
2023-12-30 20:04:37 +01:00
}
2024-03-04 19:40:19 +01:00
// Now replace all further messages that have the role 'system' with the role 'user'. (or all if we're not using one)
2024-03-04 20:01:36 +01:00
messages . forEach ( ( message ) => {
2023-12-30 20:04:37 +01:00
if ( message . role === 'system' ) {
2024-03-09 20:10:12 +01:00
if ( userName && message . name === 'example_user' ) {
message . content = ` ${ userName } : ${ message . content } ` ;
}
if ( charName && message . name === 'example_assistant' ) {
message . content = ` ${ charName } : ${ message . content } ` ;
}
2023-12-30 20:04:37 +01:00
message . role = 'user' ;
}
} ) ;
2024-03-14 13:51:56 +01:00
// Shouldn't be conditional anymore, messages api expects the last role to be user unless we're explicitly prefilling
if ( prefillString ) {
messages . push ( {
role : 'assistant' ,
content : prefillString . trimEnd ( ) ,
} ) ;
}
2024-03-04 20:01:36 +01:00
// Since the messaging endpoint only supports user assistant roles in turns, we have to merge messages with the same role if they follow eachother
2024-03-04 23:41:57 +01:00
// Also handle multi-modality, holy slop.
2024-03-04 20:01:36 +01:00
let mergedMessages = [ ] ;
messages . forEach ( ( message ) => {
2024-03-12 00:10:53 +01:00
const imageEntry = message . content ? . [ 1 ] ? . image _url ;
2024-03-04 23:41:57 +01:00
const imageData = imageEntry ? . url ;
2024-03-12 00:10:53 +01:00
const mimeType = imageData ? . split ( ';' ) ? . [ 0 ] . split ( ':' ) ? . [ 1 ] ;
const base64Data = imageData ? . split ( ',' ) ? . [ 1 ] ;
// Take care of name properties since claude messages don't support them
if ( message . name ) {
if ( Array . isArray ( message . content ) ) {
message . content [ 0 ] . text = ` ${ message . name } : ${ message . content [ 0 ] . text } ` ;
} else {
message . content = ` ${ message . name } : ${ message . content } ` ;
}
delete message . name ;
}
2024-03-04 23:41:57 +01:00
2024-03-04 20:01:36 +01:00
if ( mergedMessages . length > 0 && mergedMessages [ mergedMessages . length - 1 ] . role === message . role ) {
2024-03-12 00:10:53 +01:00
if ( Array . isArray ( message . content ) ) {
if ( Array . isArray ( mergedMessages [ mergedMessages . length - 1 ] . content ) ) {
2024-03-04 23:41:57 +01:00
mergedMessages [ mergedMessages . length - 1 ] . content [ 0 ] . text += '\n\n' + message . content [ 0 ] . text ;
} else {
mergedMessages [ mergedMessages . length - 1 ] . content += '\n\n' + message . content [ 0 ] . text ;
}
} else {
2024-03-12 00:10:53 +01:00
if ( Array . isArray ( mergedMessages [ mergedMessages . length - 1 ] . content ) ) {
2024-03-04 23:41:57 +01:00
mergedMessages [ mergedMessages . length - 1 ] . content [ 0 ] . text += '\n\n' + message . content ;
} else {
mergedMessages [ mergedMessages . length - 1 ] . content += '\n\n' + message . content ;
}
}
2024-03-04 20:01:36 +01:00
} else {
mergedMessages . push ( message ) ;
}
2024-03-04 23:41:57 +01:00
if ( imageData ) {
mergedMessages [ mergedMessages . length - 1 ] . content = [
{ type : 'text' , text : mergedMessages [ mergedMessages . length - 1 ] . content [ 0 ] ? . text || mergedMessages [ mergedMessages . length - 1 ] . content } ,
{
type : 'image' , source : {
type : 'base64' ,
media _type : mimeType ,
data : base64Data ,
} ,
} ,
] ;
}
2024-03-04 20:01:36 +01:00
} ) ;
2024-03-04 20:28:19 +01:00
2024-03-04 19:40:19 +01:00
return { messages : mergedMessages , systemPrompt : systemPrompt . trim ( ) } ;
2023-12-30 20:04:37 +01:00
}
2024-04-01 23:20:17 +02:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Cohere .
* @ param { object [ ] } messages Array of messages
* @ param { string } charName Character name
* @ param { string } userName User name
* @ returns { { systemPrompt : string , chatHistory : object [ ] , userPrompt : string } } Prompt for Cohere
* /
function convertCohereMessages ( messages , charName = '' , userName = '' ) {
const roleMap = {
'system' : 'SYSTEM' ,
'user' : 'USER' ,
'assistant' : 'CHATBOT' ,
} ;
const placeholder = '[Start a new chat]' ;
let systemPrompt = '' ;
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
let i ;
for ( i = 0 ; i < messages . length ; i ++ ) {
if ( messages [ i ] . role !== 'system' ) {
break ;
}
// Append example names if not already done by the frontend (e.g. for group chats).
if ( userName && messages [ i ] . name === 'example_user' ) {
if ( ! messages [ i ] . content . startsWith ( ` ${ userName } : ` ) ) {
messages [ i ] . content = ` ${ userName } : ${ messages [ i ] . content } ` ;
}
}
if ( charName && messages [ i ] . name === 'example_assistant' ) {
if ( ! messages [ i ] . content . startsWith ( ` ${ charName } : ` ) ) {
messages [ i ] . content = ` ${ charName } : ${ messages [ i ] . content } ` ;
}
}
systemPrompt += ` ${ messages [ i ] . content } \n \n ` ;
}
messages . splice ( 0 , i ) ;
if ( messages . length === 0 ) {
messages . unshift ( {
role : 'user' ,
content : placeholder ,
} ) ;
}
const lastNonSystemMessageIndex = messages . findLastIndex ( msg => msg . role === 'user' || msg . role === 'assistant' ) ;
const userPrompt = messages . slice ( lastNonSystemMessageIndex ) . map ( msg => msg . content ) . join ( '\n\n' ) || placeholder ;
const chatHistory = messages . slice ( 0 , lastNonSystemMessageIndex ) . map ( msg => {
return {
role : roleMap [ msg . role ] || 'USER' ,
message : msg . content ,
} ;
} ) ;
return { systemPrompt : systemPrompt . trim ( ) , chatHistory , userPrompt } ;
}
2023-12-14 19:05:27 +01:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Google MakerSuite models .
* @ param { object [ ] } messages Array of messages
* @ param { string } model Model name
2024-04-11 08:38:20 +02:00
* @ param { boolean } useSysPrompt Use system prompt
* @ returns { { contents : * [ ] , system _instruction : { parts : { text : string } } } } Prompt for Google MakerSuite models
2023-12-14 19:05:27 +01:00
* /
2024-04-11 08:38:20 +02:00
function convertGooglePrompt ( messages , model , useSysPrompt = false ) {
2023-12-14 20:21:37 +01:00
// This is a 1x1 transparent PNG
const PNG _PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=' ;
2023-12-14 16:28:54 +01:00
2024-03-27 04:52:51 +01:00
const visionSupportedModels = [
'gemini-1.0-pro-vision-latest' ,
'gemini-1.5-pro-latest' ,
'gemini-pro-vision' ,
] ;
const isMultimodal = visionSupportedModels . includes ( model ) ;
2024-03-27 06:48:26 +01:00
let hasImage = false ;
2023-12-14 16:28:54 +01:00
2024-04-11 08:38:20 +02:00
let sys _prompt = '' ;
if ( useSysPrompt ) {
while ( messages . length > 1 && messages [ 0 ] . role === 'system' ) {
sys _prompt += ` ${ messages [ 0 ] . content } \n \n ` ;
messages . shift ( ) ;
}
}
2024-04-11 08:51:05 +02:00
const system _instruction = { parts : { text : sys _prompt . trim ( ) } } ;
2024-04-11 08:38:20 +02:00
2024-03-27 06:48:26 +01:00
const contents = [ ] ;
messages . forEach ( ( message , index ) => {
// fix the roles
if ( message . role === 'system' ) {
message . role = 'user' ;
} else if ( message . role === 'assistant' ) {
message . role = 'model' ;
}
2023-12-14 16:28:54 +01:00
2024-03-27 06:48:26 +01:00
// similar story as claude
if ( message . name ) {
if ( Array . isArray ( message . content ) ) {
message . content [ 0 ] . text = ` ${ message . name } : ${ message . content [ 0 ] . text } ` ;
2023-12-14 16:28:54 +01:00
} else {
2024-03-27 06:48:26 +01:00
message . content = ` ${ message . name } : ${ message . content } ` ;
}
delete message . name ;
}
//create the prompt parts
const parts = [ ] ;
if ( typeof message . content === 'string' ) {
parts . push ( { text : message . content } ) ;
} else if ( Array . isArray ( message . content ) ) {
message . content . forEach ( ( part ) => {
if ( part . type === 'text' ) {
parts . push ( { text : part . text } ) ;
} else if ( part . type === 'image_url' && isMultimodal ) {
parts . push ( {
inlineData : {
mimeType : 'image/png' ,
data : part . image _url . url ,
} ,
2023-12-14 16:28:54 +01:00
} ) ;
2024-03-27 06:48:26 +01:00
hasImage = true ;
2023-12-14 16:28:54 +01:00
}
2024-03-27 06:48:26 +01:00
} ) ;
}
// merge consecutive messages with the same role
if ( index > 0 && message . role === contents [ contents . length - 1 ] . role ) {
contents [ contents . length - 1 ] . parts [ 0 ] . text += '\n\n' + parts [ 0 ] . text ;
} else {
contents . push ( {
role : message . role ,
parts : parts ,
} ) ;
}
} ) ;
// pro 1.5 doesn't require a dummy image to be attached, other vision models do
if ( isMultimodal && model !== 'gemini-1.5-pro-latest' && ! hasImage ) {
contents [ 0 ] . parts . push ( {
inlineData : {
mimeType : 'image/png' ,
data : PNG _PIXEL ,
} ,
2023-12-14 16:28:54 +01:00
} ) ;
}
2024-04-11 08:38:20 +02:00
return { contents : contents , system _instruction : system _instruction } ;
2023-12-14 06:49:50 +01:00
}
2023-12-14 15:00:17 +01:00
/ * *
* Convert a prompt from the ChatML objects to the format used by Text Completion API .
* @ param { object [ ] } messages Array of messages
* @ returns { string } Prompt for Text Completion API
* /
function convertTextCompletionPrompt ( messages ) {
if ( typeof messages === 'string' ) {
return messages ;
}
const messageStrings = [ ] ;
messages . forEach ( m => {
if ( m . role === 'system' && m . name === undefined ) {
messageStrings . push ( 'System: ' + m . content ) ;
}
else if ( m . role === 'system' && m . name !== undefined ) {
messageStrings . push ( m . name + ': ' + m . content ) ;
}
else {
messageStrings . push ( m . role + ': ' + m . content ) ;
}
} ) ;
return messageStrings . join ( '\n' ) + '\nassistant:' ;
}
2023-09-16 17:48:06 +02:00
module . exports = {
convertClaudePrompt ,
2023-12-30 20:04:37 +01:00
convertClaudeMessages ,
2023-12-14 06:49:50 +01:00
convertGooglePrompt ,
2023-12-14 15:00:17 +01:00
convertTextCompletionPrompt ,
2024-04-01 23:20:17 +02:00
convertCohereMessages ,
2023-12-02 20:11:06 +01:00
} ;