mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-22 06:57:41 +01:00
Claude: remove user filler from prompt converter
This commit is contained in:
parent
626db7f8f2
commit
9382845dee
@ -230,7 +230,6 @@
|
||||
"show_external_models": false,
|
||||
"assistant_prefill": "",
|
||||
"assistant_impersonation": "",
|
||||
"human_sysprompt_message": "Let's get started. Please generate your response based on the information and instructions provided above.",
|
||||
"claude_use_sysprompt": false,
|
||||
"use_alt_scale": false,
|
||||
"squash_system_messages": false,
|
||||
|
@ -1951,15 +1951,6 @@
|
||||
Send the system prompt for supported models. If disabled, the user message is added to the beginning of the prompt.
|
||||
</span>
|
||||
</div>
|
||||
<div id="claude_human_sysprompt_message_block" class="wide100p">
|
||||
<div class="range-block-title openai_restorable">
|
||||
<span data-i18n="User first message">User first message</span>
|
||||
<div id="claude_human_sysprompt_message_restore" title="Restore User first message" data-i18n="[title]Restore User first message" class="right_menu_button">
|
||||
<div class="fa-solid fa-clock-rotate-left"></div>
|
||||
</div>
|
||||
</div>
|
||||
<textarea id="claude_human_sysprompt_textarea" class="text_pole textarea_compact autoSetHeight" rows="2" data-i18n="[placeholder]Human message" placeholder="Human message, instruction, etc. Adds nothing when empty, i.e. requires a new prompt with the role 'user'."></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block m-t-1" data-source="openai,openrouter,scale,custom">
|
||||
|
@ -99,7 +99,6 @@ const default_wi_format = '{0}';
|
||||
const default_new_chat_prompt = '[Start a new Chat]';
|
||||
const default_new_group_chat_prompt = '[Start a new group chat. Group members: {{group}}]';
|
||||
const default_new_example_chat_prompt = '[Example Chat]';
|
||||
const default_claude_human_sysprompt_message = 'Let\'s get started. Please generate your response based on the information and instructions provided above.';
|
||||
const default_continue_nudge_prompt = '[Continue the following message. Do not include ANY parts of the original message. Use capitalization and punctuation as if your reply is a part of the original message: {{lastChatMessage}}]';
|
||||
const default_bias = 'Default (none)';
|
||||
const default_personality_format = '[{{char}}\'s personality: {{personality}}]';
|
||||
@ -276,7 +275,6 @@ const default_settings = {
|
||||
proxy_password: '',
|
||||
assistant_prefill: '',
|
||||
assistant_impersonation: '',
|
||||
human_sysprompt_message: default_claude_human_sysprompt_message,
|
||||
claude_use_sysprompt: false,
|
||||
use_makersuite_sysprompt: true,
|
||||
use_alt_scale: false,
|
||||
@ -353,7 +351,6 @@ const oai_settings = {
|
||||
proxy_password: '',
|
||||
assistant_prefill: '',
|
||||
assistant_impersonation: '',
|
||||
human_sysprompt_message: default_claude_human_sysprompt_message,
|
||||
claude_use_sysprompt: false,
|
||||
use_makersuite_sysprompt: true,
|
||||
use_alt_scale: false,
|
||||
@ -1892,7 +1889,6 @@ async function sendOpenAIRequest(type, messages, signal) {
|
||||
generate_data['top_k'] = Number(oai_settings.top_k_openai);
|
||||
generate_data['claude_use_sysprompt'] = oai_settings.claude_use_sysprompt;
|
||||
generate_data['stop'] = getCustomStoppingStrings(); // Claude shouldn't have limits on stop strings.
|
||||
generate_data['human_sysprompt_message'] = substituteParams(oai_settings.human_sysprompt_message);
|
||||
// Don't add a prefill on quiet gens (summarization) and when using continue prefill.
|
||||
if (!isQuiet && !(isContinue && oai_settings.continue_prefill)) {
|
||||
generate_data['assistant_prefill'] = isImpersonate ? substituteParams(oai_settings.assistant_impersonation) : substituteParams(oai_settings.assistant_prefill);
|
||||
@ -3030,7 +3026,6 @@ function loadOpenAISettings(data, settings) {
|
||||
oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password;
|
||||
oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill;
|
||||
oai_settings.assistant_impersonation = settings.assistant_impersonation ?? default_settings.assistant_impersonation;
|
||||
oai_settings.human_sysprompt_message = settings.human_sysprompt_message ?? default_settings.human_sysprompt_message;
|
||||
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
|
||||
oai_settings.inline_image_quality = settings.inline_image_quality ?? default_settings.inline_image_quality;
|
||||
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
|
||||
@ -3070,7 +3065,6 @@ function loadOpenAISettings(data, settings) {
|
||||
$('#openai_proxy_password').val(oai_settings.proxy_password);
|
||||
$('#claude_assistant_prefill').val(oai_settings.assistant_prefill);
|
||||
$('#claude_assistant_impersonation').val(oai_settings.assistant_impersonation);
|
||||
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
|
||||
$('#openai_image_inlining').prop('checked', oai_settings.image_inlining);
|
||||
$('#openai_bypass_status_check').prop('checked', oai_settings.bypass_status_check);
|
||||
|
||||
@ -3400,7 +3394,6 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
||||
show_external_models: settings.show_external_models,
|
||||
assistant_prefill: settings.assistant_prefill,
|
||||
assistant_impersonation: settings.assistant_impersonation,
|
||||
human_sysprompt_message: settings.human_sysprompt_message,
|
||||
claude_use_sysprompt: settings.claude_use_sysprompt,
|
||||
use_makersuite_sysprompt: settings.use_makersuite_sysprompt,
|
||||
use_alt_scale: settings.use_alt_scale,
|
||||
@ -3825,7 +3818,6 @@ function onSettingsPresetChange() {
|
||||
proxy_password: ['#openai_proxy_password', 'proxy_password', false],
|
||||
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
|
||||
assistant_impersonation: ['#claude_assistant_impersonation', 'assistant_impersonation', false],
|
||||
human_sysprompt_message: ['#claude_human_sysprompt_textarea', 'human_sysprompt_message', false],
|
||||
claude_use_sysprompt: ['#claude_use_sysprompt', 'claude_use_sysprompt', true],
|
||||
use_makersuite_sysprompt: ['#use_makersuite_sysprompt', 'use_makersuite_sysprompt', true],
|
||||
use_alt_scale: ['#use_alt_scale', 'use_alt_scale', true],
|
||||
@ -4677,10 +4669,6 @@ function toggleChatCompletionForms() {
|
||||
const validSources = $(this).data('source').split(',');
|
||||
$(this).toggle(validSources.includes(oai_settings.chat_completion_source));
|
||||
});
|
||||
|
||||
if (chat_completion_sources.CLAUDE == oai_settings.chat_completion_source) {
|
||||
$('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt);
|
||||
}
|
||||
}
|
||||
|
||||
async function testApiConnection() {
|
||||
@ -5036,7 +5024,6 @@ export function initOpenAI() {
|
||||
|
||||
$('#claude_use_sysprompt').on('change', function () {
|
||||
oai_settings.claude_use_sysprompt = !!$('#claude_use_sysprompt').prop('checked');
|
||||
$('#claude_human_sysprompt_message_block').toggle(oai_settings.claude_use_sysprompt);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
@ -5113,12 +5100,6 @@ export function initOpenAI() {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#claude_human_sysprompt_message_restore').on('click', function () {
|
||||
oai_settings.human_sysprompt_message = default_claude_human_sysprompt_message;
|
||||
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#newgroupchat_prompt_restore').on('click', function () {
|
||||
oai_settings.new_group_chat_prompt = default_new_group_chat_prompt;
|
||||
$('#newgroupchat_prompt_textarea').val(oai_settings.new_group_chat_prompt);
|
||||
@ -5210,11 +5191,6 @@ export function initOpenAI() {
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#claude_human_sysprompt_textarea').on('input', function () {
|
||||
oai_settings.human_sysprompt_message = String($('#claude_human_sysprompt_textarea').val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#openrouter_use_fallback').on('input', function () {
|
||||
oai_settings.openrouter_use_fallback = !!$(this).prop('checked');
|
||||
saveSettingsDebounced();
|
||||
|
@ -102,7 +102,7 @@ async function sendClaudeRequest(request, response) {
|
||||
const additionalHeaders = {};
|
||||
const useTools = request.body.model.startsWith('claude-3') && Array.isArray(request.body.tools) && request.body.tools.length > 0;
|
||||
const useSystemPrompt = (request.body.model.startsWith('claude-2') || request.body.model.startsWith('claude-3')) && request.body.claude_use_sysprompt;
|
||||
const convertedPrompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, useSystemPrompt, useTools, request.body.human_sysprompt_message, request.body.char_name, request.body.user_name);
|
||||
const convertedPrompt = convertClaudeMessages(request.body.messages, request.body.assistant_prefill, useSystemPrompt, useTools, request.body.char_name, request.body.user_name);
|
||||
// Add custom stop sequences
|
||||
const stopSequences = [];
|
||||
if (Array.isArray(request.body.stop)) {
|
||||
|
@ -91,11 +91,10 @@ export function convertClaudePrompt(messages, addAssistantPostfix, addAssistantP
|
||||
* @param {string} prefillString User determined prefill string
|
||||
* @param {boolean} useSysPrompt See if we want to use a system prompt
|
||||
* @param {boolean} useTools See if we want to use tools
|
||||
* @param {string} humanMsgFix Add Human message between system prompt and assistant.
|
||||
* @param {string} charName Character name
|
||||
* @param {string} userName User name
|
||||
*/
|
||||
export function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools, humanMsgFix, charName = '', userName = '') {
|
||||
export function convertClaudeMessages(messages, prefillString, useSysPrompt, useTools, charName, userName) {
|
||||
let systemPrompt = [];
|
||||
if (useSysPrompt) {
|
||||
// Collect all the system messages up until the first instance of a non-system message, and then remove them from the messages array.
|
||||
@ -122,10 +121,10 @@ export function convertClaudeMessages(messages, prefillString, useSysPrompt, use
|
||||
|
||||
// Check if the first message in the array is of type user, if not, interject with humanMsgFix or a blank message.
|
||||
// Also prevents erroring out if the messages array is empty.
|
||||
if (messages.length === 0 || (messages.length > 0 && messages[0].role !== 'user')) {
|
||||
if (messages.length === 0) {
|
||||
messages.unshift({
|
||||
role: 'user',
|
||||
content: humanMsgFix || PROMPT_PLACEHOLDER,
|
||||
content: PROMPT_PLACEHOLDER,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user