Merge pull request #2868 from SillyTavern/sysprompt-divorce

Decouple system prompts from instruct mode
This commit is contained in:
Cohee 2024-09-22 23:56:02 +03:00 committed by GitHub
commit 733fff4e22
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
64 changed files with 1074 additions and 365 deletions

View File

@ -467,10 +467,6 @@
"filename": "presets/context/Adventure.json",
"type": "context"
},
{
"filename": "presets/context/Alpaca-Roleplay.json",
"type": "context"
},
{
"filename": "presets/context/Alpaca-Single-Turn.json",
"type": "context"
@ -551,10 +547,6 @@
"filename": "presets/instruct/Adventure.json",
"type": "instruct"
},
{
"filename": "presets/instruct/Alpaca-Roleplay.json",
"type": "instruct"
},
{
"filename": "presets/instruct/Alpaca-Single-Turn.json",
"type": "instruct"
@ -679,6 +671,54 @@
"filename": "presets/instruct/Gemma 2.json",
"type": "instruct"
},
{
"filename": "presets/sysprompt/Actor.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Assistant - Expert.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Assistant - Simple.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Chain of Thought.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Roleplay - Detailed.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Roleplay - Immersive.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Roleplay - Simple.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Text Adventure.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Writer - Creative.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Writer - Realistic.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Blank.json",
"type": "sysprompt"
},
{
"filename": "presets/sysprompt/Neutral - Chat.json",
"type": "sysprompt"
},
{
"filename": "presets/instruct/Mistral V1.json",
"type": "instruct"

View File

@ -1,11 +0,0 @@
{
"story_string": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.\n\n{{#if system}}{{system}}\n\n{{/if}}### Input:\n{{#if wiBefore}}{{wiBefore}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if wiAfter}}{{wiAfter}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}\n\n",
"example_separator": "### New Roleplay:",
"chat_start": "### New Roleplay:",
"use_stop_strings": false,
"allow_jailbreak": false,
"always_force_name2": true,
"trim_sentences": false,
"single_line": false,
"name": "Alpaca-Roleplay"
}

View File

@ -1,5 +1,5 @@
{
"story_string": "### Instruction:\nWrite {{char}}'s next reply in this roleplay with {{user}}. Use the provided character sheet and example dialogue for formatting direction and character speech patterns.\n\n{{#if system}}{{system}}\n\n{{/if}}### Character Sheet:\n{{#if wiBefore}}{{wiBefore}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if wiAfter}}{{wiAfter}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}",
"story_string": "### Instruction:\n{{#if system}}{{system}}\n\n{{/if}}### Character Sheet:\n{{#if wiBefore}}{{wiBefore}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if wiAfter}}{{wiAfter}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}",
"example_separator": "### Example:",
"chat_start": "### START ROLEPLAY:",
"use_stop_strings": false,

View File

@ -1,5 +1,4 @@
{
"system_prompt": "[Enter Adventure Mode. Narrate the story based on {{user}}'s dialogue and actions after \">\". Describe the surroundings in vivid detail. Be detailed, creative, verbose, and proactive. Move the story forward by introducing fantasy elements and interesting characters.]",
"input_sequence": "",
"output_sequence": "",
"last_output_sequence": "",

View File

@ -1,23 +0,0 @@
{
"system_prompt": "Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions.",
"input_sequence": "### Instruction:",
"output_sequence": "### Response:",
"last_output_sequence": "### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):",
"system_sequence": "### Input:",
"stop_sequence": "",
"wrap": true,
"macro": true,
"names_behavior": "always",
"activation_regex": "",
"system_sequence_prefix": "",
"system_sequence_suffix": "",
"first_output_sequence": "",
"skip_examples": false,
"output_suffix": "\n\n",
"input_suffix": "\n\n",
"system_suffix": "\n\n",
"user_alignment_message": "",
"system_same_as_user": false,
"last_system_sequence": "",
"name": "Alpaca-Roleplay"
}

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\nWrite 1 reply only, italicize actions, and avoid quotation marks. Use markdown. Be proactive, creative, and drive the plot and conversation forward. Include dialog as well as narration.",
"input_sequence": "",
"output_sequence": "",
"last_output_sequence": "\n### Response:",
@ -11,7 +10,7 @@
"activation_regex": "",
"system_sequence_prefix": "",
"system_sequence_suffix": "",
"first_output_sequence": "<START OF ROLEPLAY>",
"first_output_sequence": "<START OF CHAT>",
"skip_examples": false,
"output_suffix": "",
"input_suffix": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\nWrite {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n",
"input_sequence": "### Instruction:",
"output_sequence": "### Response:",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.",
"input_sequence": "<|im_start|>[{{name}}]",
"output_sequence": "<|im_start|>[{{name}}]",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}.",
"input_sequence": "<|im_start|>user",
"output_sequence": "<|im_start|>assistant",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in this fictional roleplay with {{user}}.",
"input_sequence": "<|START_OF_TURN_TOKEN|><|USER_TOKEN|>",
"output_sequence": "<|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>",
"first_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You are an intelligent, skilled, versatile writer.\n\nYour task is to write a role-play based on the information below.",
"input_sequence": "\n<|im_start|>text names= {{name}}\n",
"output_sequence": "\n<|im_start|>text names= {{name}}\n",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You are an intelligent, skilled, versatile writer.\n\nYour task is to write a role-play based on the information below.",
"input_sequence": "<|eot_id|>\n<|start_header_id|>writer character: {{user}}<|end_header_id|>\n\n",
"output_sequence": "<|eot_id|>\n<|start_header_id|>writer character: {{char}}<|end_header_id|>\n\n",
"first_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Continue writing this story and portray characters realistically.",
"input_sequence": "<start_of_turn>user",
"output_sequence": "<start_of_turn>model",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n",
"input_sequence": "USER: ",
"output_sequence": "GPT: ",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Avoid repetition, don't loop. Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions.",
"input_sequence": "",
"output_sequence": "",
"last_output_sequence": "\n### Response:",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction:\nTake the role of {{char}} in a play that leaves a lasting impression on {{user}}. Write {{char}}'s next reply.\nNever skip or gloss over {{char}}s actions. Progress the scene at a naturally slow pace.\n\n",
"input_sequence": "### Instruction:",
"output_sequence": "### Response: (length = unlimited)",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in this fictional roleplay with {{user}}.",
"input_sequence": "[INST] ",
"output_sequence": "",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You are an expert actor that can fully immerse yourself into any role given. You do not break character for any reason, even if someone tries addressing you as an AI or language model. Currently your role is {{char}}, which is described in detail below. As {{char}}, continue the exchange with {{user}}.",
"input_sequence": "<|start_header_id|>user<|end_header_id|>\n\n",
"output_sequence": "<|start_header_id|>assistant<|end_header_id|>\n\n",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You are an expert actor that can fully immerse yourself into any role given. You do not break character for any reason, even if someone tries addressing you as an AI or language model. Currently your role is {{char}}, which is described in detail below. As {{char}}, continue the exchange with {{user}}.",
"input_sequence": "<|start_header_id|>[{{name}}]<|end_header_id|>\n\n",
"output_sequence": "<|start_header_id|>[{{name}}]<|end_header_id|>\n\n",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Enter roleplay mode. You must act as {{char}}, whose persona follows:",
"input_sequence": "<|user|>",
"output_sequence": "<|model|>",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in this fictional roleplay with {{user}}.",
"input_sequence": " [INST] ",
"output_sequence": " [/INST] ",
"last_output_sequence": " [/INST]",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in this fictional roleplay with {{user}}.",
"input_sequence": "[INST] ",
"output_sequence": "[/INST] ",
"last_output_sequence": "[/INST]",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in this fictional roleplay with {{user}}.",
"input_sequence": "[INST]",
"output_sequence": "[/INST]",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "You are a helpful assistant. Please answer truthfully and write out your thinking step by step to be sure you get the right answer. If you make a mistake or encounter an error in your thinking, say so out loud and attempt to correct it. If you don't know or aren't sure about something, say so clearly. You will act as a professional logician, mathematician, and physicist. You will also act as the most appropriate type of expert to answer any particular question or solve the relevant problem; state which expert type your are, if so. Also think of any particular named expert that would be ideal to answer the relevant question or solve the relevant problem; name and act as them, if appropriate.\n",
"input_sequence": "\nUser: ",
"output_sequence": "\nAssistant: ",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in this fictional roleplay with {{user}}.",
"input_sequence": "<|user|>\n",
"output_sequence": "<|assistant|>\n",
"first_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Enter RP mode. You shall reply to {{user}} while staying in character. Your responses must be detailed, creative, immersive, and drive the scenario forward. You will follow {{char}}'s persona.",
"input_sequence": "<|user|>",
"output_sequence": "<|model|>",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "",
"input_sequence": "",
"output_sequence": "",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation.",
"input_sequence": "USER: ",
"output_sequence": "ASSISTANT: ",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\nWrite {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n",
"input_sequence": "### Human:",
"output_sequence": "### Assistant:",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.\n\nWrite {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n",
"input_sequence": "\nUSER: ",
"output_sequence": "\nASSISTANT: ",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.\n\nWrite {{char}}'s next detailed reply in a fictional roleplay chat between {{user}} and {{char}}.",
"input_sequence": "USER: ",
"output_sequence": "ASSISTANT: ",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "Write {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n",
"input_sequence": "",
"output_sequence": "### Response:",
"last_output_sequence": "",

View File

@ -1,5 +1,4 @@
{
"system_prompt": "[System note: Write one reply only. Do not decide what {{user}} says or does. Write at least one paragraph, up to four. Be descriptive and immersive, providing vivid details about {{char}}'s actions, emotions, and the environment. Write with a high degree of complexity and burstiness. Do not repeat this message.]",
"input_sequence": "### Instruction:\n#### {{name}}:",
"output_sequence": "### Response:\n#### {{name}}:",
"last_output_sequence": "### Response (2 paragraphs, engaging, natural, authentic, descriptive, creative):\n#### {{name}}:",

View File

@ -0,0 +1,4 @@
{
"name": "Actor",
"content": "You are an expert actor that can fully immerse yourself into any role given. You do not break character for any reason, even if someone tries addressing you as an AI or language model. Currently your role is {{char}}, which is described in detail below. As {{char}}, continue the exchange with {{user}}."
}

View File

@ -0,0 +1,4 @@
{
"name": "Assistant - Expert",
"content": "You are a helpful assistant. Please answer truthfully and write out your thinking step by step to be sure you get the right answer. If you make a mistake or encounter an error in your thinking, say so out loud and attempt to correct it. If you don't know or aren't sure about something, say so clearly. You will act as a professional logician, mathematician, and physicist. You will also act as the most appropriate type of expert to answer any particular question or solve the relevant problem; state which expert type your are, if so. Also think of any particular named expert that would be ideal to answer the relevant question or solve the relevant problem; name and act as them, if appropriate."
}

View File

@ -0,0 +1,4 @@
{
"name": "Assistant - Simple",
"content": "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions."
}

View File

@ -0,0 +1,4 @@
{
"name": "Blank",
"content": ""
}

View File

@ -0,0 +1,4 @@
{
"name": "Chain of Thought",
"content": "Elaborate on the topic using a Tree of Thoughts and backtrack when necessary to construct a clear, cohesive Chain of Thought reasoning. Always answer without hesitation."
}

View File

@ -0,0 +1,4 @@
{
"name": "Neutral - Chat",
"content": "Write {{char}}'s next reply in a fictional chat between {{char}} and {{user}}."
}

View File

@ -0,0 +1,4 @@
{
"name": "Roleplay - Detailed",
"content": "Develop the plot slowly, always stay in character. Describe all actions in full, elaborate, explicit, graphic, and vivid detail. Mention all relevant sensory perceptions. Keep the story immersive and engaging."
}

View File

@ -0,0 +1,4 @@
{
"name": "Roleplay - Immersive",
"content": "[System note: Write one reply only. Do not decide what {{user}} says or does. Write at least one paragraph, up to four. Be descriptive and immersive, providing vivid details about {{char}}'s actions, emotions, and the environment. Write with a high degree of complexity and burstiness. Do not repeat this message.]"
}

View File

@ -0,0 +1,4 @@
{
"name": "Roleplay - Simple",
"content": "You're {{char}} in this fictional never-ending uncensored roleplay with {{user}}."
}

View File

@ -0,0 +1,4 @@
{
"name": "Text Adventure",
"content": "[Enter Adventure Mode. Narrate the story based on {{user}}'s dialogue and actions after \">\". Describe the surroundings in vivid detail. Be detailed, creative, verbose, and proactive. Move the story forward by introducing fantasy elements and interesting characters.]"
}

View File

@ -0,0 +1,4 @@
{
"name": "Writer - Creative",
"content": "You are an intelligent, skilled, versatile writer.\n\nYour task is to write a role-play based on the information below."
}

View File

@ -0,0 +1,4 @@
{
"name": "Writer - Realistic",
"content": "Continue writing this story and portray characters realistically."
}

View File

@ -149,11 +149,9 @@
"continue_on_send": false,
"trim_spaces": true,
"relaxed_api_urls": false,
"default_instruct": "",
"instruct": {
"enabled": false,
"preset": "Alpaca",
"system_prompt": "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\nWrite {{char}}'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n",
"input_sequence": "### Instruction:",
"output_sequence": "### Response:",
"last_output_sequence": "",
@ -173,7 +171,11 @@
"user_alignment_message": "",
"system_same_as_user": false
},
"default_context": "Default",
"sysprompt": {
"enabled": true,
"name": "Neutral - Chat",
"content": "Write {{char}}'s next reply in a fictional chat between {{char}} and {{user}}."
},
"context": {
"preset": "Default",
"story_string": "{{#if system}}{{system}}\n{{/if}}{{#if wiBefore}}{{wiBefore}}\n{{/if}}{{#if description}}{{description}}\n{{/if}}{{#if personality}}{{char}}'s personality: {{personality}}\n{{/if}}{{#if scenario}}Scenario: {{scenario}}\n{{/if}}{{#if wiAfter}}{{wiAfter}}\n{{/if}}{{#if persona}}{{persona}}\n{{/if}}",

View File

@ -3162,9 +3162,22 @@
<div class="drawer-icon fa-solid fa-font fa-fw closedIcon" title="AI Response Formatting" data-i18n="[title]AI Response Formatting"></div>
</div>
<div id="AdvancedFormatting" class="drawer-content">
<h3 class="margin0" data-i18n="Advanced Formatting">
Advanced Formatting
</h3>
<div class="flex-container alignItemsBaseline">
<h3 class="margin0 flex1" data-i18n="Advanced Formatting">
Advanced Formatting
</h3>
<div class="flex-container">
<input id="af_master_import_file" type="file" hidden accept=".json" class="displayNone">
<div id="af_master_import" class="menu_button menu_button_icon" title="Import Advanced Formatting settings" data-i18n="[title]Import Advanced Formatting settings">
<i class="fa-solid fa-file-import"></i>
<span data-i18n="Master Import">Master Import</span>
</div>
<div id="af_master_export" class="menu_button menu_button_icon" title="Export Advanced Formatting settings" data-i18n="[title]Export Advanced Formatting settings">
<i class="fa-solid fa-file-export"></i>
<span data-i18n="Master Export">Master Export</span>
</div>
</div>
</div>
<div class="flex-container spaceEvenly">
<div id="ContextSettings" class="flex-container flexNoGap flexFlowColumn flex1">
<div>
@ -3176,18 +3189,17 @@
</a>
</div>
</h4>
<div class="flex-container justifyCenter">
<input type="file" hidden data-preset-manager-file="context" accept=".json, .settings">
<i id="context_set_default" class="menu_button fa-solid fa-heart" title="Auto-select this template for Instruct Mode." data-i18n="[title]Auto-select this template for Instruct Mode"></i>
<i data-preset-manager-update="context" class="menu_button fa-solid fa-save" title="Update current template" data-i18n="[title]Update current template"></i>
<i data-preset-manager-new="context" class="menu_button fa-solid fa-file-circle-plus" title="Save template as" data-i18n="[title]Save template as"></i>
<i data-preset-manager-import="context" class="menu_button fa-solid fa-file-import" title="Import template" data-i18n="[title]Import template"></i>
<i data-preset-manager-export="context" class="menu_button fa-solid fa-file-export" title="Export template" data-i18n="[title]Export template"></i>
<i data-preset-manager-restore="context" class="menu_button fa-solid fa-recycle" title="Restore current template" data-i18n="[title]Restore current template"></i>
<i id="context_delete_preset" data-preset-manager-delete="context" class="menu_button fa-solid fa-trash-can" title="Delete the template" data-i18n="[title]Delete the template"></i>
</div>
<div class="flex-container" title="Select your current Context Template" data-i18n="[title]Select your current Context Template">
<select id="context_presets" data-preset-manager-for="context" class="flex1 text_pole"></select>
<div class="flex-container justifyCenter">
<input type="file" hidden data-preset-manager-file="context" accept=".json, .settings">
<i data-preset-manager-update="context" class="menu_button fa-solid fa-save" title="Update current template" data-i18n="[title]Update current template"></i>
<i data-preset-manager-new="context" class="menu_button fa-solid fa-file-circle-plus" title="Save template as" data-i18n="[title]Save template as"></i>
<i data-preset-manager-import="context" class="displayNone menu_button fa-solid fa-file-import" title="Import template" data-i18n="[title]Import template"></i>
<i data-preset-manager-export="context" class="displayNone menu_button fa-solid fa-file-export" title="Export template" data-i18n="[title]Export template"></i>
<i data-preset-manager-restore="context" class="menu_button fa-solid fa-recycle" title="Restore current template" data-i18n="[title]Restore current template"></i>
<i id="context_delete_preset" data-preset-manager-delete="context" class="menu_button fa-solid fa-trash-can" title="Delete the template" data-i18n="[title]Delete the template"></i>
</div>
</div>
<div>
<label for="context_story_string" class="flex-container">
@ -3218,9 +3230,6 @@
<span data-i18n="Context Formatting">
Context Formatting
</span>
<small data-i18n="(Saves to Template)">
(Saves to Template)
</small>
</h4>
<label class="checkbox_label" for="always-force-name2-checkbox">
@ -3266,31 +3275,6 @@
<small data-i18n="Allow Post-History Instructions">Allow Post-History Instructions</small>
</label>
</div>
<div>
<h4 class="range-block-title justifyLeft standoutHeader">
<span data-i18n="Custom Stopping Strings">
Custom Stopping Strings
</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#custom-stopping-strings" class="notes-link" target="_blank">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</h4>
<div>
<small>
<span data-i18n="JSON serialized array of strings">JSON serialized array of strings</span>
<i class="fa-solid fa-question-circle opacity50p" title="e.g: [&quot;Ford&quot;, &quot;BMW&quot;, &quot;Fiat&quot;]"></i>
</small>
</div>
<div>
<textarea id="custom_stopping_strings" class="text_pole textarea_compact monospace autoSetHeight"></textarea>
</div>
<label class="checkbox_label" for="custom_stopping_strings_macro">
<input id="custom_stopping_strings_macro" type="checkbox" checked>
<small data-i18n="Replace Macro in Custom Stopping Strings">
Replace Macro in Custom Stopping Strings
</small>
</label>
</div>
</div>
</div>
<div id="InstructSettingsColumn" class="flex-container flexNoGap flexFlowColumn flex1">
@ -3303,29 +3287,28 @@
</a>
</div>
<div class="flex-container">
<label for="instruct_enabled" class="checkbox_label flex1" title="Enable Instruct Mode" data-i18n="[title]instruct_enabled">
<input id="instruct_enabled" type="checkbox" style="display:none;" />
<small><i class="fa-solid fa-power-off menu_button margin0"></i></small>
</label>
<label for="instruct_bind_to_context" class="checkbox_label flex1" title="Bind to Context&#10If enabled, Context templates will be automatically selected based on selected Instruct template name or by preference." data-i18n="[title]instruct_bind_to_context">
<input id="instruct_bind_to_context" type="checkbox" style="display:none;" />
<small><i class="fa-solid fa-link menu_button margin0"></i></small>
</label>
<label for="instruct_enabled" class="checkbox_label flex1" title="Enable Instruct Mode" data-i18n="[title]instruct_enabled">
<input id="instruct_enabled" type="checkbox" style="display:none;" />
<small><i class="fa-solid fa-power-off menu_button margin0"></i></small>
</label>
</div>
</h4>
<div class="flex-container margin0 justifyCenter">
<input type="file" hidden data-preset-manager-file="instruct" accept=".json, .settings">
<i id="instruct_set_default" class="menu_button fa-solid fa-heart" title="Auto-select this template on API connection." data-i18n="[title]Auto-select this template on API connection"></i>
<i data-preset-manager-update="instruct" class="menu_button fa-solid fa-save" title="Update current template" data-i18n="[title]Update current template"></i>
<i data-preset-manager-new="instruct" class="menu_button fa-solid fa-file-circle-plus" title="Save template as" data-i18n="[title]Save template as"></i>
<i data-preset-manager-import="instruct" class=" menu_button fa-solid fa-file-import" title="Import template" data-i18n="[title]Import template"></i>
<i data-preset-manager-export="instruct" class=" menu_button fa-solid fa-file-export" title="Export template" data-i18n="[title]Export template"></i>
<i data-preset-manager-restore="instruct" class=" menu_button fa-solid fa-recycle" title="Restore current template" data-i18n="[title]Restore current template"></i>
<i data-preset-manager-delete="instruct" class=" menu_button fa-solid fa-trash-can" title="Delete template" data-i18n="[title]Delete template"></i>
</div>
<div class="flex-container" title="Select your current Instruct Template" data-i18n="[title]Select your current Instruct Template">
<select id="instruct_presets" data-preset-manager-for="instruct" class="flex1 text_pole"></select>
<div class="flex-container margin0 justifyCenter">
<input type="file" hidden data-preset-manager-file="instruct" accept=".json, .settings">
<i data-preset-manager-update="instruct" class="menu_button fa-solid fa-save" title="Update current template" data-i18n="[title]Update current template"></i>
<i data-preset-manager-new="instruct" class="menu_button fa-solid fa-file-circle-plus" title="Save template as" data-i18n="[title]Save template as"></i>
<i data-preset-manager-import="instruct" class="displayNone menu_button fa-solid fa-file-import" title="Import template" data-i18n="[title]Import template"></i>
<i data-preset-manager-export="instruct" class="displayNone menu_button fa-solid fa-file-export" title="Export template" data-i18n="[title]Export template"></i>
<i data-preset-manager-restore="instruct" class="menu_button fa-solid fa-recycle" title="Restore current template" data-i18n="[title]Restore current template"></i>
<i data-preset-manager-delete="instruct" class="menu_button fa-solid fa-trash-can" title="Delete template" data-i18n="[title]Delete template"></i>
</div>
</div>
<label>
<small>
@ -3336,14 +3319,6 @@
<div>
<input type="text" id="instruct_activation_regex" class="text_pole textarea_compact" placeholder="e.g. /llama(-)?[3|3.1]/i">
</div>
<div>
<label for="instruct_system_prompt" class="flex-container">
<small data-i18n="System Prompt">System Prompt</small>
<i class="editor_maximize fa-solid fa-maximize right_menu_button" data-for="instruct_system_prompt" title="Expand the editor" data-i18n="[title]Expand the editor"></i>
</label>
<textarea id="instruct_system_prompt" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div>
<label for="instruct_wrap" class="checkbox_label">
<input id="instruct_wrap" type="checkbox" />
@ -3357,25 +3332,203 @@
<input id="instruct_skip_examples" type="checkbox" />
<small data-i18n="Skip Example Dialogues Formatting">Skip Example Dialogues Formatting</small>
</label>
<div id="instruct_names_behavior">
<div>
<small data-i18n="Include Names">
Include Names
</small>
<label for="instruct_names_behavior_none" class="checkbox_label" title="Do not add name prefixes before the message contents.">
<input id="instruct_names_behavior_none" name="names_behavior" value="none" type="radio" />
<small data-i18n="Never">Never</small>
</label>
<label for="instruct_names_behavior_force" class="checkbox_label" title="Only add name prefixes to messages from group characters and past personas.">
<input id="instruct_names_behavior_force" name="names_behavior" value="force" type="radio" />
<small data-i18n="Groups and Past Personas">Groups and Past Personas</small>
</label>
<label for="instruct_names_behavior_always" class="checkbox_label" title="Always add name prefixes to messages.">
<input id="instruct_names_behavior_always" name="names_behavior" value="always" type="radio" />
<small data-i18n="Always">Always</small>
</label>
<select id="instruct_names_behavior">
<option value="none" data-i18n="Never">Never</option>
<option value="force" data-i18n="Groups and Past Personas">Groups and Past Personas</option>
<option value="always" data-i18n="Always">Always</option>
</select>
</div>
</div>
</div>
<div id="InstructSequencesColumn" class="wide100p flexFlowColumn">
<h4 class="standoutHeader title_restorable">
<b>
<span data-i18n="Instruct Sequences">
Instruct Sequences
</span>
</b>
</h4>
<!-- We keep one auto-open so the user would know what is going on in the picked template -->
<details open>
<summary>User Message Sequences</summary>
<div class="flex-container">
<div class="flexAuto" title="Inserted before a User message and as a last prompt line when impersonating." data-i18n="[title]Inserted before a User message and as a last prompt line when impersonating.">
<small data-i18n="User Prefix">User Message Prefix</small>
<textarea id="instruct_input_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted after a User message." data-i18n="[title]Inserted after a User message.">
<small data-i18n="User Suffix">User Message Suffix</small>
<textarea id="instruct_input_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
</details>
<details open>
<summary>Assistant Message Sequences</summary>
<div class="flex-container">
<div class="flexAuto" title="Inserted before an Assistant message and as a last prompt line when generating an AI reply." data-i18n="[title]Inserted before an Assistant message and as a last prompt line when generating an AI reply.">
<small data-i18n="Assistant Prefix">Assistant Message Prefix</small>
<textarea id="instruct_output_sequence" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted after an Assistant message." data-i18n="[title]Inserted after an Assistant message.">
<small data-i18n="Assistant Suffix">Assistant Message Suffix</small>
<textarea id="instruct_output_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
</details>
<details>
<summary>System Message Sequences</summary>
<div class="flex-container">
<div class="flexAuto" id="instruct_system_sequence_block" title="Inserted before a System (added by slash commands or extensions) message." data-i18n="[title]Inserted before a System (added by slash commands or extensions) message.">
<small data-i18n="System Prefix">System Message Prefix</small>
<textarea id="instruct_system_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" id="instruct_system_suffix_block" title="Inserted after a System message." data-i18n="[title]Inserted after a System message.">
<small data-i18n="System Suffix">System Message Suffix</small>
<textarea id="instruct_system_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flexBasis100p" title="If enabled, System Sequences will be the same as User Sequences." data-i18n="[title]If enabled, System Sequences will be the same as User Sequences.">
<label class="checkbox_label" for="instruct_system_same_as_user">
<input id="instruct_system_same_as_user" type="checkbox" />
<small data-i18n="System same as User">System same as User</small>
</label>
</div>
</details>
<details>
<summary>System Prompt Sequences</summary>
<div class="flex-container">
<div class="flexAuto" title="Inserted before a System prompt." data-i18n="[title]Inserted before a System prompt.">
<label for="instruct_system_sequence_prefix">
<small data-i18n="System Prompt Prefix">System Prompt Prefix</small>
</label>
<div>
<textarea id="instruct_system_sequence_prefix" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flexAuto" title="Inserted after a System prompt." data-i18n="[title]Inserted after a System prompt.">
<label for="instruct_system_sequence_suffix">
<small data-i18n="System Prompt Suffix">System Prompt Suffix</small>
</label>
<div>
<textarea id="instruct_system_sequence_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
</div>
</details>
<details>
<summary data-i18n="Misc. Sequences">Misc. Sequences</summary>
<div class="flex-container">
<div class="flexAuto" title="Inserted before the first Assistant's message." data-i18n="[title]Inserted before the first Assistant's message.">
<small data-i18n="First Assistant Prefix">First Assistant Prefix</small>
<textarea id="instruct_first_output_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted before the last Assistant's message or as a last prompt line when generating an AI reply (except a neutral/system role)." data-i18n="[title]instruct_last_output_sequence">
<small data-i18n="Last Assistant Prefix">Last Assistant Prefix</small>
<textarea id="instruct_last_output_sequence" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Inserted before the first User's message." data-i18n="[title]Inserted before the first User's message.">
<small data-i18n="First User Prefix">First User Prefix</small>
<textarea id="instruct_first_input_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted before the last User's message." data-i18n="[title]instruct_last_input_sequence">
<small data-i18n="Last User Prefix">Last User Prefix</small>
<textarea id="instruct_last_input_sequence" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Will be inserted as a last prompt line when using system/neutral generation." data-i18n="[title]Will be inserted as a last prompt line when using system/neutral generation.">
<small data-i18n="System Instruction Prefix">System Instruction Prefix</small>
<textarea id="instruct_last_system_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="If a stop sequence is generated, everything past it will be removed from the output (inclusive)." data-i18n="[title]If a stop sequence is generated, everything past it will be removed from the output (inclusive).">
<small data-i18n="Stop Sequence">Stop Sequence</small>
<textarea id="instruct_stop_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Will be inserted at the start of the chat history if it doesn't start with a User message." data-i18n="[title]Will be inserted at the start of the chat history if it doesn't start with a User message.">
<small data-i18n="User Filler Message">User Filler Message</small>
<textarea id="instruct_user_alignment_message" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
</details>
</div>
</div>
<div id="SystemPromptColumn" class="flex-container flexNoGap flexFlowColumn flex1">
<div id="SystemPromptBlock">
<h4 class="standoutHeader title_restorable justifySpaceBetween">
<div class="flex-container">
<span data-i18n="System Prompt">System Prompt</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#system-prompt" class="notes-link" target="_blank">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</div>
<div class="flex-container">
<label for="sysprompt_enabled" class="checkbox_label flex1" title="Enable System Prompt" data-i18n="[title]sysprompt_enabled">
<input id="sysprompt_enabled" type="checkbox" style="display:none;" />
<small><i class="fa-solid fa-power-off menu_button margin0"></i></small>
</label>
</div>
</h4>
<div class="flex-container" title="Select your current System Prompt" data-i18n="[title]Select your current System Prompt">
<select id="sysprompt_select" data-preset-manager-for="sysprompt" class="flex1 text_pole"></select>
<div class="flex-container margin0 justifyCenter">
<input type="file" hidden data-preset-manager-file="sysprompt" accept=".json, .settings">
<i data-preset-manager-update="sysprompt" class="menu_button fa-solid fa-save" title="Update current prompt" data-i18n="[title]Update current prompt"></i>
<i data-preset-manager-new="sysprompt" class="menu_button fa-solid fa-file-circle-plus" title="Save prompt as" data-i18n="[title]Save prompt as"></i>
<i data-preset-manager-import="sysprompt" class="displayNone menu_button fa-solid fa-file-import" title="Import template" data-i18n="[title]Import template"></i>
<i data-preset-manager-export="sysprompt" class="displayNone menu_button fa-solid fa-file-export" title="Export template" data-i18n="[title]Export template"></i>
<i data-preset-manager-restore="sysprompt" class="menu_button fa-solid fa-recycle" title="Restore current prompt" data-i18n="[title]Restore current prompt"></i>
<i data-preset-manager-delete="sysprompt" class="menu_button fa-solid fa-trash-can" title="Delete prompt" data-i18n="[title]Delete prompt"></i>
</div>
</div>
<div>
<label for="sysprompt_content" class="flex-container">
<small data-i18n="Prompt Content">Prompt Content</small>
<i class="editor_maximize fa-solid fa-maximize right_menu_button" data-for="sysprompt_content" title="Expand the editor" data-i18n="[title]Expand the editor"></i>
</label>
<textarea id="sysprompt_content" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div>
&nbsp;
</div>
<div>
<h4 class="range-block-title justifyLeft standoutHeader">
<span data-i18n="Custom Stopping Strings">
Custom Stopping Strings
</span>
<a href="https://docs.sillytavern.app/usage/core-concepts/advancedformatting/#custom-stopping-strings" class="notes-link" target="_blank">
<span class="fa-solid fa-circle-question note-link-span"></span>
</a>
</h4>
<div>
<small>
<span data-i18n="JSON serialized array of strings">JSON serialized array of strings</span>
<i class="fa-solid fa-question-circle opacity50p" title="e.g: [&quot;Ford&quot;, &quot;BMW&quot;, &quot;Fiat&quot;]"></i>
</small>
</div>
<div>
<textarea id="custom_stopping_strings" class="text_pole textarea_compact monospace autoSetHeight"></textarea>
</div>
<label class="checkbox_label" for="custom_stopping_strings_macro">
<input id="custom_stopping_strings_macro" type="checkbox" checked>
<small data-i18n="Replace Macro in Custom Stopping Strings">
Replace Macro in Custom Stopping Strings
</small>
</label>
</div>
<div name="tokenizerSettingsBlock">
<div name="tokenizerSelectorBlock">
<h4 class="standoutHeader"><span data-i18n="Tokenizer">Tokenizer</span>
@ -3447,114 +3600,6 @@
</div>
</div>
</div>
<div id="InstructSequencesColumn" class="flex-container flexNoGap flexFlowColumn flex1">
<div class="wide100p flexFlowColumn">
<h4 class="standoutHeader title_restorable">
<b>
<span data-i18n="Instruct Sequences">
Instruct Sequences
</span>
<small data-i18n="(Saves to Template)">
(Saves to Template)
</small>
</b>
</h4>
<div class="flex-container">
<div class="flexAuto" title="Inserted before a System prompt." data-i18n="[title]Inserted before a System prompt.">
<label for="instruct_system_sequence_prefix">
<small data-i18n="System Prompt Prefix">System Prompt Prefix</small>
</label>
<div>
<textarea id="instruct_system_sequence_prefix" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flexAuto" title="Inserted after a System prompt." data-i18n="[title]Inserted after a System prompt.">
<label for="instruct_system_sequence_suffix">
<small data-i18n="System Prompt Suffix">System Prompt Suffix</small>
</label>
<div>
<textarea id="instruct_system_sequence_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Inserted before a User message and as a last prompt line when impersonating." data-i18n="[title]Inserted before a User message and as a last prompt line when impersonating.">
<small data-i18n="User Prefix">User Message Prefix</small>
<textarea id="instruct_input_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted after a User message." data-i18n="[title]Inserted after a User message.">
<small data-i18n="User Suffix">User Message Suffix</small>
<textarea id="instruct_input_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Inserted before an Assistant message and as a last prompt line when generating an AI reply." data-i18n="[title]Inserted before an Assistant message and as a last prompt line when generating an AI reply.">
<small data-i18n="Assistant Prefix">Assistant Message Prefix</small>
<textarea id="instruct_output_sequence" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted after an Assistant message." data-i18n="[title]Inserted after an Assistant message.">
<small data-i18n="Assistant Suffix">Assistant Message Suffix</small>
<textarea id="instruct_output_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" id="instruct_system_sequence_block" title="Inserted before a System (added by slash commands or extensions) message." data-i18n="[title]Inserted before a System (added by slash commands or extensions) message.">
<small data-i18n="System Prefix">System Message Prefix</small>
<textarea id="instruct_system_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" id="instruct_system_suffix_block" title="Inserted after a System message." data-i18n="[title]Inserted after a System message.">
<small data-i18n="System Suffix">System Message Suffix</small>
<textarea id="instruct_system_suffix" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flexBasis100p" title="If enabled, System Sequences will be the same as User Sequences." data-i18n="[title]If enabled, System Sequences will be the same as User Sequences.">
<label class="checkbox_label" for="instruct_system_same_as_user">
<input id="instruct_system_same_as_user" type="checkbox" />
<small data-i18n="System same as User">System same as User</small>
</label>
</div>
<h5 class="textAlignCenter" data-i18n="Misc. Sequences">
Misc. Sequences
</h5>
<div class="flex-container">
<div class="flexAuto" title="Inserted before the first Assistant's message." data-i18n="[title]Inserted before the first Assistant's message.">
<small data-i18n="First Assistant Prefix">First Assistant Prefix</small>
<textarea id="instruct_first_output_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted before the last Assistant's message or as a last prompt line when generating an AI reply (except a neutral/system role)." data-i18n="[title]instruct_last_output_sequence">
<small data-i18n="Last Assistant Prefix">Last Assistant Prefix</small>
<textarea id="instruct_last_output_sequence" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Inserted before the first User's message." data-i18n="[title]Inserted before the first User's message.">
<small data-i18n="First User Prefix">First User Prefix</small>
<textarea id="instruct_first_input_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="Inserted before the last User's message." data-i18n="[title]instruct_last_input_sequence">
<small data-i18n="Last User Prefix">Last User Prefix</small>
<textarea id="instruct_last_input_sequence" class="text_pole wide100p textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Will be inserted as a last prompt line when using system/neutral generation." data-i18n="[title]Will be inserted as a last prompt line when using system/neutral generation.">
<small data-i18n="System Instruction Prefix">System Instruction Prefix</small>
<textarea id="instruct_last_system_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
<div class="flexAuto" title="If a stop sequence is generated, everything past it will be removed from the output (inclusive)." data-i18n="[title]If a stop sequence is generated, everything past it will be removed from the output (inclusive).">
<small data-i18n="Stop Sequence">Stop Sequence</small>
<textarea id="instruct_stop_sequence" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
<div class="flex-container">
<div class="flexAuto" title="Will be inserted at the start of the chat history if it doesn't start with a User message." data-i18n="[title]Will be inserted at the start of the chat history if it doesn't start with a User message.">
<small data-i18n="User Filler Message">User Filler Message</small>
<textarea id="instruct_user_alignment_message" class="text_pole textarea_compact autoSetHeight"></textarea>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@ -244,6 +244,7 @@ import { SlashCommandEnumValue, enumTypes } from './scripts/slash-commands/Slash
import { commonEnumProviders, enumIcons } from './scripts/slash-commands/SlashCommandCommonEnumsProvider.js';
import { initInputMarkdown } from './scripts/input-md-formatting.js';
import { AbortReason } from './scripts/util/AbortReason.js';
import { initSystemPrompts } from './scripts/sysprompt.js';
//exporting functions and vars for mods
export {
@ -934,6 +935,8 @@ async function firstLoadInit() {
initDefaultSlashCommands();
initTextGenModels();
initOpenAI();
initSystemPrompts();
await initPresetManager();
await getSystemMessages();
sendSystemMessage(system_message_types.WELCOME);
sendSystemMessage(system_message_types.WELCOME_PROMPT);
@ -946,7 +949,6 @@ async function firstLoadInit() {
await getCharacters();
await getBackgrounds();
await initTokenizers();
await initPresetManager();
initBackgrounds();
initAuthorsNote();
initPersonas();
@ -3525,9 +3527,12 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
jailbreak,
} = getCharacterCardFields();
if (isInstruct) {
system = power_user.prefer_character_prompt && system ? system : baseChatReplace(power_user.instruct.system_prompt, name1, name2);
system = formatInstructModeSystemPrompt(substituteParams(system, name1, name2, power_user.instruct.system_prompt));
if (power_user.sysprompt.enabled) {
system = power_user.prefer_character_prompt && system ? system : baseChatReplace(power_user.sysprompt.content, name1, name2);
system = isInstruct ? formatInstructModeSystemPrompt(substituteParams(system, name1, name2, power_user.sysprompt.content)) : system;
} else {
// Nullify if it's not enabled
system = '';
}
// Depth prompt (character-specific A/N)
@ -3780,7 +3785,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
personality: personality,
persona: power_user.persona_description_position == persona_description_positions.IN_PROMPT ? persona : '',
scenario: scenario,
system: isInstruct ? system : '',
system: system,
char: name2,
user: name1,
wiBefore: worldInfoBefore,
@ -4354,7 +4359,7 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
this_max_context: this_max_context,
padding: power_user.token_padding,
main_api: main_api,
instruction: isInstruct ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.instruct.system_prompt) : '',
instruction: power_user.sysprompt.enabled ? substituteParams(power_user.prefer_character_prompt && system ? system : power_user.sysprompt.content) : '',
userPersona: (power_user.persona_description_position == persona_description_positions.IN_PROMPT ? (persona || '') : ''),
tokenizer: getFriendlyTokenizerName(main_api).tokenizerName || '',
};

View File

@ -34,6 +34,8 @@ const TC_COMMANDS = [
'preset',
'api-url',
'model',
'sysprompt',
'sysprompt-state',
'instruct',
'context',
'instruct-state',
@ -46,6 +48,8 @@ const FANCY_NAMES = {
'preset': 'Settings Preset',
'model': 'Model',
'proxy': 'Proxy Preset',
'sysprompt-state': 'Use System Prompt',
'sysprompt': 'System Prompt Name',
'instruct-state': 'Instruct Mode',
'instruct': 'Instruct Template',
'context': 'Context Template',
@ -181,6 +185,11 @@ async function readProfileFromCommands(mode, profile, cleanUp = false) {
}
if (cleanUp) {
for (const command of commands) {
if (command.endsWith('-state') && profile[command] === 'false') {
delete profile[command.replace('-state', '')];
}
}
for (const command of opposingCommands) {
if (commands.includes(command)) {
continue;

View File

@ -23,7 +23,6 @@ export const names_behavior_types = {
const controls = [
{ id: 'instruct_enabled', property: 'enabled', isCheckbox: true },
{ id: 'instruct_wrap', property: 'wrap', isCheckbox: true },
{ id: 'instruct_system_prompt', property: 'system_prompt', isCheckbox: false },
{ id: 'instruct_system_sequence_prefix', property: 'system_sequence_prefix', isCheckbox: false },
{ id: 'instruct_system_sequence_suffix', property: 'system_sequence_suffix', isCheckbox: false },
{ id: 'instruct_input_sequence', property: 'input_sequence', isCheckbox: false },
@ -35,7 +34,6 @@ const controls = [
{ id: 'instruct_last_system_sequence', property: 'last_system_sequence', isCheckbox: false },
{ id: 'instruct_user_alignment_message', property: 'user_alignment_message', isCheckbox: false },
{ id: 'instruct_stop_sequence', property: 'stop_sequence', isCheckbox: false },
{ id: 'instruct_names', property: 'names', isCheckbox: true },
{ id: 'instruct_first_output_sequence', property: 'first_output_sequence', isCheckbox: false },
{ id: 'instruct_last_output_sequence', property: 'last_output_sequence', isCheckbox: false },
{ id: 'instruct_first_input_sequence', property: 'first_input_sequence', isCheckbox: false },
@ -43,7 +41,7 @@ const controls = [
{ id: 'instruct_activation_regex', property: 'activation_regex', isCheckbox: false },
{ id: 'instruct_bind_to_context', property: 'bind_to_context', isCheckbox: true },
{ id: 'instruct_skip_examples', property: 'skip_examples', isCheckbox: true },
{ id: 'instruct_names_behavior input[name="names_behavior"]', property: 'names_behavior', isCheckbox: false },
{ id: 'instruct_names_behavior', property: 'names_behavior', isCheckbox: false },
{ id: 'instruct_system_same_as_user', property: 'system_same_as_user', isCheckbox: true, trigger: true },
];
@ -109,9 +107,10 @@ export async function loadInstructMode(data) {
if (control.isCheckbox) {
$element.prop('checked', power_user.instruct[control.property]);
} else if (control.property === 'names_behavior') {
const behavior = power_user.instruct[control.property];
$element.filter(`[value="${behavior}"]`).prop('checked', true);
} else if ($element.is('select')) {
const value = power_user.instruct[control.property];
$element.val(value);
$element.filter(`[value="${value}"]`).prop('checked', true);
} else {
$element.val(power_user.instruct[control.property]);
}
@ -137,12 +136,6 @@ export async function loadInstructMode(data) {
option.selected = name === power_user.instruct.preset;
$('#instruct_presets').append(option);
});
highlightDefaultPreset();
}
function highlightDefaultPreset() {
$('#instruct_set_default').toggleClass('default', power_user.default_instruct === power_user.instruct.preset);
}
/**
@ -159,13 +152,6 @@ export function selectContextPreset(preset, { quiet = false, isAuto = false } =
!quiet && toastr.info(`Context Template: "${preset}" ${isAuto ? 'auto-' : ''}selected`);
}
// If instruct mode is disabled, enable it, except for default context template
if (!power_user.instruct.enabled && preset !== power_user.default_context) {
power_user.instruct.enabled = true;
$('#instruct_enabled').prop('checked', true).trigger('change');
!quiet && toastr.info('Instruct Mode enabled');
}
saveSettingsDebounced();
}
@ -235,13 +221,6 @@ export function autoSelectInstructPreset(modelId) {
}
}
}
if (power_user.instruct.bind_to_context && power_user.default_instruct && power_user.instruct.preset !== power_user.default_instruct) {
if (instruct_presets.some(p => p.name === power_user.default_instruct)) {
console.log(`Instruct mode: default preset "${power_user.default_instruct}" selected`);
$('#instruct_presets').val(power_user.default_instruct).trigger('change');
}
}
}
return false;
@ -580,10 +559,6 @@ function selectMatchingContextTemplate(name) {
break;
}
}
if (!foundMatch) {
// If no match was found, select default context preset
selectContextPreset(power_user.default_context, { isAuto: true });
}
}
/**
@ -597,9 +572,13 @@ export function replaceInstructMacros(input, env) {
if (!input) {
return '';
}
const syspromptMacros = {
'systemPrompt': (power_user.prefer_character_prompt && env.charPrompt ? env.charPrompt : power_user.sysprompt.content),
'defaultSystemPrompt|instructSystem|instructSystemPrompt': power_user.sysprompt.content,
};
const instructMacros = {
'systemPrompt': (power_user.prefer_character_prompt && env.charPrompt ? env.charPrompt : power_user.instruct.system_prompt),
'instructSystem|instructSystemPrompt': power_user.instruct.system_prompt,
'instructSystemPromptPrefix': power_user.instruct.system_sequence_prefix,
'instructSystemPromptSuffix': power_user.instruct.system_sequence_suffix,
'instructInput|instructUserPrefix': power_user.instruct.input_sequence,
@ -622,6 +601,11 @@ export function replaceInstructMacros(input, env) {
input = input.replace(regex, power_user.instruct.enabled ? value : '');
}
for (const [placeholder, value] of Object.entries(syspromptMacros)) {
const regex = new RegExp(`{{(${placeholder})}}`, 'gi');
input = input.replace(regex, power_user.sysprompt.enabled ? value : '');
}
input = input.replace(/{{exampleSeparator}}/gi, power_user.context.example_separator);
input = input.replace(/{{chatStart}}/gi, power_user.context.chat_start);
@ -629,20 +613,6 @@ export function replaceInstructMacros(input, env) {
}
jQuery(() => {
$('#instruct_set_default').on('click', function () {
if (power_user.instruct.preset === power_user.default_instruct) {
power_user.default_instruct = null;
$(this).removeClass('default');
toastr.info('Default instruct template cleared');
} else {
power_user.default_instruct = power_user.instruct.preset;
$(this).addClass('default');
toastr.info(`Default instruct template set to ${power_user.default_instruct}`);
}
saveSettingsDebounced();
});
$('#instruct_system_same_as_user').on('input', function () {
const state = !!$(this).prop('checked');
if (state) {
@ -671,9 +641,6 @@ jQuery(() => {
// When instruct mode gets enabled, select context template matching selected instruct preset
if (power_user.instruct.enabled) {
selectMatchingContextTemplate(power_user.instruct.preset);
} else {
// When instruct mode gets disabled, select default context preset
selectContextPreset(power_user.default_context);
}
});
@ -699,9 +666,10 @@ jQuery(() => {
if (control.isCheckbox) {
$element.prop('checked', power_user.instruct[control.property]).trigger('input');
} else if (control.property === 'names_behavior') {
const behavior = power_user.instruct[control.property];
$element.filter(`[value="${behavior}"]`).prop('checked', true).trigger('input');
} else if ($element.is('select')) {
const value = power_user.instruct[control.property];
$element.val(value);
$element.filter(`[value="${value}"]`).prop('checked', true).trigger('input');
} else {
$element.val(power_user.instruct[control.property]);
$element.trigger('input');
@ -713,7 +681,13 @@ jQuery(() => {
// Select matching context template
selectMatchingContextTemplate(name);
}
highlightDefaultPreset();
});
if (!CSS.supports('field-sizing', 'content')) {
$('#InstructSequencesColumn details').on('toggle', function () {
if ($(this).prop('open')) {
resetScrollHeight($(this).find('textarea'));
}
});
}
});

View File

@ -50,6 +50,7 @@ import { AUTOCOMPLETE_SELECT_KEY, AUTOCOMPLETE_WIDTH } from './autocomplete/Auto
import { SlashCommandEnumValue, enumTypes } from './slash-commands/SlashCommandEnumValue.js';
import { commonEnumProviders, enumIcons } from './slash-commands/SlashCommandCommonEnumsProvider.js';
import { POPUP_TYPE, callGenericPopup } from './popup.js';
import { loadSystemPrompts } from './sysprompt.js';
export {
loadPowerUserSettings,
@ -205,11 +206,9 @@ let power_user = {
disable_group_trimming: false,
single_line: false,
default_instruct: '',
instruct: {
enabled: false,
preset: 'Alpaca',
system_prompt: 'Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\nWrite {{char}}\'s next reply in a fictional roleplay chat between {{user}} and {{char}}.\n',
input_sequence: '### Instruction:',
input_suffix: '',
output_sequence: '### Response:',
@ -233,7 +232,6 @@ let power_user = {
separator_sequence: '',
},
default_context: 'Default',
context: {
preset: 'Default',
story_string: defaultStoryString,
@ -244,6 +242,12 @@ let power_user = {
names_as_stop_strings: true,
},
sysprompt: {
enabled: true,
name: 'Neutral - Chat',
content: 'Write {{char}}\'s next reply in a fictional chat between {{char}} and {{user}}.',
},
personas: {},
default_persona: null,
persona_descriptions: {},
@ -1595,6 +1599,7 @@ async function loadPowerUserSettings(settings, data) {
reloadMarkdownProcessor(power_user.render_formulas);
await loadInstructMode(data);
await loadContextSettings();
await loadSystemPrompts(data);
loadMaxContextUnlocked();
switchWaifuMode();
switchSpoilerMode();
@ -1814,29 +1819,8 @@ async function loadContextSettings() {
}
}
highlightDefaultContext();
saveSettingsDebounced();
});
$('#context_set_default').on('click', function () {
if (power_user.context.preset !== power_user.default_context) {
power_user.default_context = power_user.context.preset;
$(this).addClass('default');
toastr.info(`Default context template set to ${power_user.default_context}`);
highlightDefaultContext();
saveSettingsDebounced();
}
});
highlightDefaultContext();
}
function highlightDefaultContext() {
$('#context_set_default').toggleClass('default', power_user.default_context === power_user.context.preset);
$('#context_set_default').toggleClass('disabled', power_user.default_context === power_user.context.preset);
$('#context_delete_preset').toggleClass('disabled', power_user.default_context === power_user.context.preset);
}
/**

View File

@ -18,13 +18,15 @@ import {
import { groups, selected_group } from './group-chats.js';
import { instruct_presets } from './instruct-mode.js';
import { kai_settings } from './kai-settings.js';
import { Popup } from './popup.js';
import { Popup, POPUP_RESULT, POPUP_TYPE } from './popup.js';
import { context_presets, getContextSettings, power_user } from './power-user.js';
import { SlashCommand } from './slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument } from './slash-commands/SlashCommandArgument.js';
import { enumIcons } from './slash-commands/SlashCommandCommonEnumsProvider.js';
import { SlashCommandEnumValue, enumTypes } from './slash-commands/SlashCommandEnumValue.js';
import { SlashCommandParser } from './slash-commands/SlashCommandParser.js';
import { checkForSystemPromptInInstructTemplate, system_prompts } from './sysprompt.js';
import { renderTemplateAsync } from './templates.js';
import {
textgenerationwebui_preset_names,
textgenerationwebui_presets,
@ -71,7 +73,7 @@ function autoSelectPreset() {
* @param {string} apiId API id
* @returns {PresetManager} Preset manager
*/
function getPresetManager(apiId = '') {
export function getPresetManager(apiId = '') {
if (!apiId) {
apiId = main_api == 'koboldhorde' ? 'kobold' : main_api;
}
@ -102,6 +104,215 @@ class PresetManager {
this.apiId = apiId;
}
static masterSections = {
'instruct': {
name: 'Instruct Template',
getData: () => {
const manager = getPresetManager('instruct');
const name = manager.getSelectedPresetName();
return manager.getPresetSettings(name);
},
setData: (data) => {
const manager = getPresetManager('instruct');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblyInstructData(data),
},
'context': {
name: 'Context Template',
getData: () => {
const manager = getPresetManager('context');
const name = manager.getSelectedPresetName();
return manager.getPresetSettings(name);
},
setData: (data) => {
const manager = getPresetManager('context');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblyContextData(data),
},
'sysprompt': {
name: 'System Prompt',
getData: () => {
const manager = getPresetManager('sysprompt');
const name = manager.getSelectedPresetName();
return manager.getPresetSettings(name);
},
setData: (data) => {
const manager = getPresetManager('sysprompt');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblySystemPromptData(data),
},
'preset': {
name: 'Text Completion Preset',
getData: () => {
const manager = getPresetManager('textgenerationwebui');
const name = manager.getSelectedPresetName();
const data = manager.getPresetSettings(name);
data['name'] = name;
return data;
},
setData: (data) => {
const manager = getPresetManager('textgenerationwebui');
const name = data.name;
return manager.savePreset(name, data);
},
isValid: (data) => PresetManager.isPossiblyTextCompletionData(data),
},
};
static isPossiblyInstructData(data) {
const instructProps = ['name', 'input_sequence', 'output_sequence'];
return data && instructProps.every(prop => Object.keys(data).includes(prop));
}
static isPossiblyContextData(data) {
const contextProps = ['name', 'story_string'];
return data && contextProps.every(prop => Object.keys(data).includes(prop));
}
static isPossiblySystemPromptData(data) {
const sysPromptProps = ['name', 'content'];
return data && sysPromptProps.every(prop => Object.keys(data).includes(prop));
}
static isPossiblyTextCompletionData(data) {
const textCompletionProps = ['temp', 'top_k', 'top_p', 'rep_pen'];
return data && textCompletionProps.every(prop => Object.keys(data).includes(prop));
}
/**
* Imports master settings from JSON data.
* @param {object} data Data to import
* @param {string} fileName File name
* @returns {Promise<void>}
*/
static async performMasterImport(data, fileName) {
if (!data || typeof data !== 'object') {
toastr.error('Invalid data provided for master import');
return;
}
// Check for legacy file imports
// 1. Instruct Template
if (this.isPossiblyInstructData(data)) {
toastr.info('Importing instruct template...', 'Instruct template detected');
return await getPresetManager('instruct').savePreset(data.name, data);
}
// 2. Context Template
if (this.isPossiblyContextData(data)) {
toastr.info('Importing as context template...', 'Context template detected');
return await getPresetManager('context').savePreset(data.name, data);
}
// 3. System Prompt
if (this.isPossiblySystemPromptData(data)) {
toastr.info('Importing as system prompt...', 'System prompt detected');
return await getPresetManager('sysprompt').savePreset(data.name, data);
}
// 4. Text Completion settings
if (this.isPossiblyTextCompletionData(data)) {
toastr.info('Importing as settings preset...', 'Text Completion settings detected');
return await getPresetManager('textgenerationwebui').savePreset(fileName, data);
}
const validSections = [];
for (const [key, section] of Object.entries(this.masterSections)) {
if (key in data && section.isValid(data[key])) {
validSections.push(key);
}
}
if (validSections.length === 0) {
toastr.error('No valid sections found in imported data');
return;
}
const sectionNames = validSections.reduce((acc, key) => {
acc[key] = { key: key, name: this.masterSections[key].name, preset: data[key]?.name || '' };
return acc;
}, {});
const html = $(await renderTemplateAsync('masterImport', { sections: sectionNames }));
const popup = new Popup(html, POPUP_TYPE.CONFIRM, '', {
okButton: 'Import',
cancelButton: 'Cancel',
});
const result = await popup.show();
// Import cancelled
if (result !== POPUP_RESULT.AFFIRMATIVE) {
return;
}
const importedSections = [];
const confirmedSections = html.find('input:checked').map((_, el) => el instanceof HTMLInputElement && el.value).get();
if (confirmedSections.length === 0) {
toastr.info('No sections selected for import');
return;
}
for (const section of confirmedSections) {
const sectionData = data[section];
const masterSection = this.masterSections[section];
if (sectionData && masterSection) {
await masterSection.setData(sectionData);
importedSections.push(masterSection.name);
}
}
toastr.success(`Imported ${importedSections.length} settings: ${importedSections.join(', ')}`);
}
/**
* Exports master settings to JSON data.
* @returns {Promise<string>} JSON data
*/
static async performMasterExport() {
const sectionNames = Object.entries(this.masterSections).reduce((acc, [key, section]) => {
acc[key] = { key: key, name: section.name, checked: key !== 'preset' };
return acc;
}, {});
const html = $(await renderTemplateAsync('masterExport', { sections: sectionNames }));
const popup = new Popup(html, POPUP_TYPE.CONFIRM, '', {
okButton: 'Export',
cancelButton: 'Cancel',
});
const result = await popup.show();
// Export cancelled
if (result !== POPUP_RESULT.AFFIRMATIVE) {
return;
}
const confirmedSections = html.find('input:checked').map((_, el) => el instanceof HTMLInputElement && el.value).get();
const data = {};
if (confirmedSections.length === 0) {
toastr.info('No sections selected for export');
return;
}
for (const section of confirmedSections) {
const masterSection = this.masterSections[section];
if (masterSection) {
data[section] = masterSection.getData();
}
}
return JSON.stringify(data, null, 4);
}
/**
* Gets all preset names.
* @returns {string[]} List of preset names
@ -182,6 +393,10 @@ class PresetManager {
}
async savePreset(name, settings) {
if (this.apiId === 'instruct' && settings) {
await checkForSystemPromptInInstructTemplate(name, settings);
}
const preset = settings ?? this.getPresetSettings(name);
const response = await fetch('/api/presets/save', {
@ -228,6 +443,10 @@ class PresetManager {
presets = instruct_presets;
preset_names = instruct_presets.map(x => x.name);
break;
case 'sysprompt':
presets = system_prompts;
preset_names = system_prompts.map(x => x.name);
break;
default:
console.warn(`Unknown API ID ${this.apiId}`);
}
@ -236,11 +455,11 @@ class PresetManager {
}
isKeyedApi() {
return this.apiId == 'textgenerationwebui' || this.apiId == 'context' || this.apiId == 'instruct';
return this.apiId == 'textgenerationwebui' || this.isAdvancedFormatting();
}
isAdvancedFormatting() {
return this.apiId == 'context' || this.apiId == 'instruct';
return this.apiId == 'context' || this.apiId == 'instruct' || this.apiId == 'sysprompt';
}
updateList(name, preset) {
@ -298,6 +517,11 @@ class PresetManager {
instruct_preset['name'] = name || power_user.instruct.preset;
return instruct_preset;
}
case 'sysprompt': {
const sysprompt_preset = structuredClone(power_user.sysprompt);
sysprompt_preset['name'] = name || power_user.sysprompt.preset;
return sysprompt_preset;
}
default:
console.warn(`Unknown API ID ${apiId}`);
return {};
@ -601,11 +825,6 @@ export async function initPresetManager() {
return;
}
// default context preset cannot be deleted
if (apiId == 'context' && power_user.default_context === power_user.context.preset) {
return;
}
const headerText = !presetManager.isAdvancedFormatting() ? 'Delete this preset?' : 'Delete this template?';
const confirm = await Popup.show.confirm(headerText, 'This action is irreversible and your current settings will be overwritten.');
if (!confirm) {
@ -682,4 +901,35 @@ export async function initPresetManager() {
toastr.success(successToast);
}
});
$('#af_master_import').on('click', () => {
$('#af_master_import_file').trigger('click');
});
$('#af_master_import_file').on('change', async function (e) {
if (!(e.target instanceof HTMLInputElement)) {
return;
}
const file = e.target.files[0];
if (!file) {
return;
}
const data = await parseJsonFile(file);
const fileName = file.name.replace('.json', '');
await PresetManager.performMasterImport(data, fileName);
e.target.value = null;
});
$('#af_master_export').on('click', async () => {
const data = await PresetManager.performMasterExport();
if (!data) {
return;
}
const shortDate = new Date().toISOString().split('T')[0];
download(data, `ST-formatting-${shortDate}.json`, 'application/json');
});
}

250
public/scripts/sysprompt.js Normal file
View File

@ -0,0 +1,250 @@
import { saveSettingsDebounced } from '../script.js';
import { callGenericPopup, POPUP_TYPE } from './popup.js';
import { power_user } from './power-user.js';
import { getPresetManager } from './preset-manager.js';
import { SlashCommand } from './slash-commands/SlashCommand.js';
import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from './slash-commands/SlashCommandArgument.js';
import { commonEnumProviders, enumIcons } from './slash-commands/SlashCommandCommonEnumsProvider.js';
import { enumTypes, SlashCommandEnumValue } from './slash-commands/SlashCommandEnumValue.js';
import { SlashCommandParser } from './slash-commands/SlashCommandParser.js';
import { renderTemplateAsync } from './templates.js';
import { isTrueBoolean, resetScrollHeight } from './utils.js';
export let system_prompts = [];
const $enabled = $('#sysprompt_enabled');
const $select = $('#sysprompt_select');
const $content = $('#sysprompt_content');
const $contentBlock = $('#SystemPromptBlock');
async function migrateSystemPromptFromInstructMode() {
if ('system_prompt' in power_user.instruct) {
const prompt = String(power_user.instruct.system_prompt);
delete power_user.instruct.system_prompt;
power_user.sysprompt.enabled = power_user.instruct.enabled;
power_user.sysprompt.content = prompt;
const existingPromptName = system_prompts.find(x => x.content === prompt)?.name;
if (existingPromptName) {
power_user.sysprompt.name = existingPromptName;
} else {
const data = { name: `[Migrated] ${power_user.instruct.preset}`, content: prompt };
await getPresetManager('sysprompt')?.savePreset(data.name, data);
power_user.sysprompt.name = data.name;
}
saveSettingsDebounced();
toastr.info('System prompt settings have been moved from the Instruct Mode.', 'Migration notice', { timeOut: 5000 });
}
}
/**
* Loads sysprompt settings from the given data object.
* @param {object} data Settings data object.
*/
export async function loadSystemPrompts(data) {
if (data.sysprompt !== undefined) {
system_prompts = data.sysprompt;
}
await migrateSystemPromptFromInstructMode();
toggleSystemPromptDisabledControls();
for (const prompt of system_prompts) {
$('<option>').val(prompt.name).text(prompt.name).appendTo($select);
}
$enabled.prop('checked', power_user.sysprompt.enabled);
$select.val(power_user.sysprompt.name);
$content.val(power_user.sysprompt.content);
if (!CSS.supports('field-sizing', 'content')) {
await resetScrollHeight($content);
}
}
/**
* Checks if the instruct template has a system prompt and prompts the user to save it as a system prompt.
* @param {string} name Name of the instruct template
* @param {object} template Instruct template object
*/
export async function checkForSystemPromptInInstructTemplate(name, template) {
if (!template || !name || typeof name !== 'string' || typeof template !== 'object') {
return;
}
if ('system_prompt' in template && template.system_prompt) {
const existingName = system_prompts.find(x => x.content === template.system_prompt)?.name;
const html = await renderTemplateAsync('migrateInstructPrompt', { prompt: template.system_prompt, existing: existingName });
const confirm = await callGenericPopup(html, POPUP_TYPE.CONFIRM);
if (confirm) {
const migratedName = `[Migrated] ${name}`;
const prompt = { name: migratedName, content: template.system_prompt };
const presetManager = getPresetManager('sysprompt');
await presetManager.savePreset(migratedName, prompt);
toastr.success(`System prompt "${migratedName}" has been saved.`);
} else {
toastr.info('System prompt has been discarded.');
}
delete template.system_prompt;
}
}
function toggleSystemPromptDisabledControls() {
$enabled.parent().find('i').toggleClass('toggleEnabled', !!power_user.sysprompt.enabled);
$contentBlock.toggleClass('disabled', !power_user.sysprompt.enabled);
}
/**
* Sets the system prompt state.
* @param {boolean} state System prompt state
* @returns {string} Empty string
*/
function setSystemPromptStateCallback(state) {
power_user.sysprompt.enabled = state;
$enabled.prop('checked', state);
toggleSystemPromptDisabledControls();
saveSettingsDebounced();
return '';
}
function toggleSystemPromptCallback(_args, state) {
if (!state || typeof state !== 'string') {
return String(power_user.sysprompt.enabled);
}
const newState = isTrueBoolean(state);
setSystemPromptStateCallback(newState);
return String(power_user.sysprompt.enabled);
}
function selectSystemPromptCallback(args, name) {
if (!power_user.sysprompt.enabled && !isTrueBoolean(args.forceGet)) {
return '';
}
if (!name) {
return power_user.sysprompt.name ?? '';
}
const quiet = isTrueBoolean(args?.quiet);
const systemPromptNames = system_prompts.map(preset => preset.name);
let foundName = systemPromptNames.find(x => x.toLowerCase() === name.toLowerCase());
if (!foundName) {
const fuse = new Fuse(systemPromptNames);
const result = fuse.search(name);
if (result.length === 0) {
!quiet && toastr.warning(`System prompt "${name}" not found`);
return '';
}
foundName = result[0].item;
}
$select.val(foundName).trigger('input');
!quiet && toastr.success(`System prompt "${foundName}" selected`);
return foundName;
}
export function initSystemPrompts() {
$enabled.on('input', function () {
power_user.sysprompt.enabled = !!$(this).prop('checked');
toggleSystemPromptDisabledControls();
saveSettingsDebounced();
});
$select.on('change', async function () {
if (!power_user.sysprompt.enabled) {
$enabled.prop('checked', true).trigger('input');
}
const name = String($(this).val());
const prompt = system_prompts.find(p => p.name === name);
if (prompt) {
$content.val(prompt.content);
if (!CSS.supports('field-sizing', 'content')) {
await resetScrollHeight($content);
}
power_user.sysprompt.name = name;
power_user.sysprompt.content = prompt.content;
}
saveSettingsDebounced();
});
$content.on('input', function () {
power_user.sysprompt.content = String($(this).val());
saveSettingsDebounced();
});
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt',
aliases: ['system-prompt'],
callback: selectSystemPromptCallback,
returns: 'current prompt name',
namedArgumentList: [
SlashCommandNamedArgument.fromProps({
name: 'quiet',
description: 'Suppress the toast message on prompt change',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'false',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
SlashCommandNamedArgument.fromProps({
name: 'forceGet',
description: 'Force getting a name even if system prompt is disabled',
typeList: [ARGUMENT_TYPE.BOOLEAN],
defaultValue: 'false',
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'system prompt name',
typeList: [ARGUMENT_TYPE.STRING],
enumProvider: () => system_prompts.map(x => new SlashCommandEnumValue(x.name, null, enumTypes.enum, enumIcons.preset)),
}),
],
helpString: `
<div>
Selects a system prompt by name, using fuzzy search to find the closest match.
Gets the current system prompt if no name is provided and sysprompt is enabled or <code>forceGet=true</code> is passed.
</div>
<div>
<strong>Example:</strong>
<ul>
<li>
<pre><code class="language-stscript">/sysprompt </code></pre>
</li>
</ul>
</div>
`,
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt-on',
aliases: ['sysprompt-enable'],
callback: () => setSystemPromptStateCallback(true),
helpString: 'Enables system prompt.',
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt-off',
aliases: ['sysprompt-disable'],
callback: () => setSystemPromptStateCallback(false),
helpString: 'Disables system prompt',
}));
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'sysprompt-state',
aliases: ['sysprompt-toggle'],
helpString: 'Gets the current system prompt state. If an argument is provided, it will set the system prompt state.',
unnamedArgumentList: [
SlashCommandArgument.fromProps({
description: 'system prompt state',
typeList: [ARGUMENT_TYPE.BOOLEAN],
enumList: commonEnumProviders.boolean('trueFalse')(),
}),
],
callback: toggleSystemPromptCallback,
}));
}

View File

@ -57,8 +57,8 @@
<li><tt>&lcub;&lcub;maxPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_41">max allowed prompt length in tokens = (context size - response length)</span></li>
<li><tt>&lcub;&lcub;exampleSeparator&rcub;&rcub;</tt> <span data-i18n="help_macros_42">context template example dialogues separator</span></li>
<li><tt>&lcub;&lcub;chatStart&rcub;&rcub;</tt> <span data-i18n="help_macros_43">context template chat start line</span></li>
<li><tt>&lcub;&lcub;systemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_44">main system prompt (either character prompt override if chosen, or instructSystemPrompt)</span></li>
<li><tt>&lcub;&lcub;instructSystemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_45">instruct system prompt</span></li>
<li><tt>&lcub;&lcub;systemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_44">system prompt content if enabled (either character prompt override if allowed, or defaultSystemPrompt)</span></li>
<li><tt>&lcub;&lcub;defaultSystemPrompt&rcub;&rcub;</tt> <span data-i18n="help_macros_45">system prompt content</span></li>
<li><tt>&lcub;&lcub;instructSystemPromptPrefix&rcub;&rcub;</tt> <span data-i18n="help_macros_46">instruct system prompt prefix sequence</span></li>
<li><tt>&lcub;&lcub;instructSystemPromptSuffix&rcub;&rcub;</tt> <span data-i18n="help_macros_47">instruct system prompt suffix sequence</span></li>
<li><tt>&lcub;&lcub;instructUserPrefix&rcub;&rcub;</tt> <span data-i18n="help_macros_48">instruct user prefix sequence</span></li>

View File

@ -0,0 +1,17 @@
<h3>
Choose what to export
</h3>
<div class="flex-container flexFlowColumn justifyLeft">
{{#each sections}}
{{#with this}}
<label class="checkbox_label">
{{#if checked}}
<input type="checkbox" value="{{key}}" checked />
{{else}}
<input type="checkbox" value="{{key}}" />
{{/if}}
<span data-i18n="{{name}}">{{name}}</span>
</label>
{{/with}}
{{/each}}
</div>

View File

@ -0,0 +1,17 @@
<h3>
Choose what to import
</h3>
<div class="flex-container flexFlowColumn justifyLeft">
{{#each sections}}
{{#with this}}
<label class="checkbox_label">
<input type="checkbox" value="{{key}}" checked>
<span data-i18n="{{name}}">{{name}}</span>
{{#if preset}}
<span>&ndash;</span>
<small>{{preset}}</small>
{{/if}}
</label>
{{/with}}
{{/each}}
</div>

View File

@ -0,0 +1,27 @@
<h3>
This instruct template also contains a system prompt.
</h3>
<div>
Would you like to migrate the system prompt from the template?
</div>
{{#if existing}}
<div class="marginTopBot5">
<b>Note:</b>
<span>you already have this prompt saved as:</span>
<span>{{existing}}</span>
</div>
{{/if}}
<div class="justifyLeft marginTop5">
<div>
<small>
<b>"Yes"</b> &ndash; The prompt will be imported and selected as a current system prompt.
</small>
</div>
<div>
<small>
<b>"No"</b> &ndash; The prompt will be ignored, no changes to the current system prompt.
</small>
</div>
</div>
<textarea class="wide100p textarea_compact" rows="10">{{prompt}}</textarea>

View File

@ -2573,7 +2573,7 @@ select {
-moz-appearance: none;
-webkit-appearance: none;
appearance: none;
padding: 3px 2px;
padding: 3px 5px;
background-color: var(--black30a);
border: 1px solid var(--SmartThemeBorderColor);
border-radius: 5px;
@ -2630,14 +2630,6 @@ select option:not(:checked) {
color: var(--active) !important;
}
#context_set_default.default {
color: var(--preferred) !important;
}
#instruct_set_default.default {
color: var(--preferred) !important;
}
.displayBlock {
display: block !important;
}
@ -5477,3 +5469,12 @@ body:not(.movingUI) .drawer-content.maximized {
#AdvancedFormatting .autoSetHeight {
overflow-wrap: anywhere;
}
#InstructSequencesColumn summary {
font-size: 0.95em;
cursor: pointer;
}
#InstructSequencesColumn details:not(:last-of-type) {
margin-bottom: 5px;
}

View File

@ -932,6 +932,7 @@ async function verifySecuritySettings() {
userModule.initUserStorage(dataRoot)
.then(userModule.ensurePublicDirectoriesExist)
.then(userModule.migrateUserData)
.then(userModule.migrateSystemPrompts)
.then(verifySecuritySettings)
.then(preSetupTasks)
.finally(startServer);

View File

@ -41,6 +41,7 @@ const USER_DIRECTORY_TEMPLATE = Object.freeze({
files: 'user/files',
vectors: 'vectors',
backups: 'backups',
sysprompt: 'sysprompt',
});
/**

View File

@ -43,6 +43,7 @@ const CONTENT_TYPES = {
CONTEXT: 'context',
MOVING_UI: 'moving_ui',
QUICK_REPLIES: 'quick_replies',
SYSPROMPT: 'sysprompt',
};
/**
@ -56,7 +57,7 @@ function getDefaultPresets(directories) {
const presets = [];
for (const contentItem of contentIndex) {
if (contentItem.type.endsWith('_preset') || contentItem.type === 'instruct' || contentItem.type === 'context') {
if (contentItem.type.endsWith('_preset') || contentItem.type === 'instruct' || contentItem.type === 'context' || contentItem.type === 'sysprompt') {
contentItem.name = path.parse(contentItem.filename).name;
contentItem.folder = getTargetByType(contentItem.type, directories);
presets.push(contentItem);
@ -217,6 +218,41 @@ function getContentIndex() {
return result;
}
/**
* Gets content by type and format.
* @param {string} type Type of content
* @param {'json'|'string'|'raw'} format Format of content
* @returns {string[]|Buffer[]} Array of content
*/
function getContentOfType(type, format) {
const contentIndex = getContentIndex();
const indexItems = contentIndex.filter((item) => item.type === type && item.folder);
const files = [];
for (const item of indexItems) {
if (!item.folder) {
continue;
}
try {
const filePath = path.join(item.folder, item.filename);
const fileContent = fs.readFileSync(filePath);
switch (format) {
case 'json':
files.push(JSON.parse(fileContent.toString()));
break;
case 'string':
files.push(fileContent.toString());
break;
case 'raw':
files.push(fileContent);
break;
}
} catch {
// Ignore errors
}
}
return files;
}
/**
* Gets the target directory for the specified asset type.
* @param {ContentType} type Asset type
@ -257,6 +293,8 @@ function getTargetByType(type, directories) {
return directories.movingUI;
case CONTENT_TYPES.QUICK_REPLIES:
return directories.quickreplies;
case CONTENT_TYPES.SYSPROMPT:
return directories.sysprompt;
default:
return null;
}
@ -721,5 +759,6 @@ module.exports = {
checkForNewContent,
getDefaultPresets,
getDefaultPresetFile,
getContentOfType,
router,
};

View File

@ -27,6 +27,8 @@ function getPresetSettingsByAPI(apiId, directories) {
return { folder: directories.instruct, extension: '.json' };
case 'context':
return { folder: directories.context, extension: '.json' };
case 'sysprompt':
return { folder: directories.sysprompt, extension: '.json' };
default:
return { folder: null, extension: null };
}

View File

@ -251,6 +251,7 @@ router.post('/get', jsonParser, (request, response) => {
const instruct = readAndParseFromDirectory(request.user.directories.instruct);
const context = readAndParseFromDirectory(request.user.directories.context);
const sysprompt = readAndParseFromDirectory(request.user.directories.sysprompt);
response.send({
settings,
@ -268,6 +269,7 @@ router.post('/get', jsonParser, (request, response) => {
quickReplyPresets,
instruct,
context,
sysprompt,
enable_extensions: ENABLE_EXTENSIONS,
enable_extensions_auto_update: ENABLE_EXTENSIONS_AUTO_UPDATE,
enable_accounts: ENABLE_ACCOUNTS,

View File

@ -9,6 +9,8 @@ const storage = require('node-persist');
const express = require('express');
const mime = require('mime-types');
const archiver = require('archiver');
const writeFileAtomicSync = require('write-file-atomic').sync;
const _ = require('lodash');
const { USER_DIRECTORY_TEMPLATE, DEFAULT_USER, PUBLIC_DIRECTORIES, SETTINGS_FILE } = require('./constants');
const { getConfigValue, color, delay, setConfigValue, generateTimestamp } = require('./util');
@ -83,6 +85,7 @@ const STORAGE_KEYS = {
* @property {string} files - The directory where the uploaded files are stored
* @property {string} vectors - The directory where the vectors are stored
* @property {string} backups - The directory where the backups are stored
* @property {string} sysprompt - The directory where the system prompt data is stored
*/
/**
@ -323,6 +326,60 @@ async function migrateUserData() {
console.log(color.green('Migration completed!'));
}
async function migrateSystemPrompts() {
/**
* Gets the default system prompts.
* @returns {Promise<any[]>} - The list of default system prompts
*/
async function getDefaultSystemPrompts() {
try {
const { getContentOfType } = await import('./endpoints/content-manager.js');
return getContentOfType('sysprompt', 'json');
} catch {
return [];
}
}
const directories = await getUserDirectoriesList();
for (const directory of directories) {
try {
const migrateMarker = path.join(directory.sysprompt, '.migrated');
if (fs.existsSync(migrateMarker)) {
continue;
}
const defaultPrompts = await getDefaultSystemPrompts();
const instucts = fs.readdirSync(directory.instruct);
let migratedPrompts = [];
for (const instruct of instucts) {
const instructPath = path.join(directory.instruct, instruct);
const sysPromptPath = path.join(directory.sysprompt, instruct);
if (path.extname(instruct) === '.json' && !fs.existsSync(sysPromptPath)) {
const instructData = JSON.parse(fs.readFileSync(instructPath, 'utf8'));
if ('system_prompt' in instructData && 'name' in instructData) {
const syspromptData = { name: instructData.name, content: instructData.system_prompt };
migratedPrompts.push(syspromptData);
delete instructData.system_prompt;
writeFileAtomicSync(instructPath, JSON.stringify(instructData, null, 4));
}
}
}
// Only leave unique contents
migratedPrompts = _.uniqBy(migratedPrompts, 'content');
// Only leave contents that are not in the default prompts
migratedPrompts = migratedPrompts.filter(x => !defaultPrompts.some(y => y.content === x.content));
for (const sysPromptData of migratedPrompts) {
sysPromptData.name = `[Migrated] ${sysPromptData.name}`;
const syspromptPath = path.join(directory.sysprompt, `${sysPromptData.name}.json`);
writeFileAtomicSync(syspromptPath, JSON.stringify(sysPromptData, null, 4));
console.log(`Migrated system prompt ${sysPromptData.name} for ${directory.root.split(path.sep).pop()}`);
}
writeFileAtomicSync(migrateMarker, '');
} catch {
// Ignore errors
}
}
}
/**
* Converts a user handle to a storage key.
* @param {string} handle User handle
@ -724,6 +781,7 @@ module.exports = {
requireLoginMiddleware,
requireAdminMiddleware,
migrateUserData,
migrateSystemPrompts,
getPasswordSalt,
getPasswordHash,
getCsrfSecret,