Old anchors removed

This commit is contained in:
SillyLossy
2023-05-14 19:47:32 +03:00
parent 3b0d0b580a
commit 6a94bb5063
4 changed files with 27 additions and 149 deletions

View File

@ -1348,23 +1348,12 @@
</div>
<div id="anchors-block">
<h4>
Anchors Order
<a href="/notes#anchors" class="notes-link" target="_blank">
<span class="note-link-span">?</span>
</a>
Anchors
</h4>
<select id="anchor_order">
<option value="0">Character then Style</option>
<option value="1">Style then Character</option>
</select>
<div id="anchor_checkbox">
<label for="character_anchor"><input id="character_anchor" type="checkbox" />
Character Anchor
</label>
<label for="style_anchor"><input id="style_anchor" type="checkbox" />
Style Anchor
</label>
</div>
<p>
This feature is obsolete and has been removed.
Something else is coming soon in its place!
</p>
</div>
</div>
</div>
@ -1925,6 +1914,17 @@
</div>
</div>
</div>
<div class="inline-drawer wide100p flexFlowColumn">
<div id="groupCurrentMemberListToggle" class="inline-drawer-toggle inline-drawer-header">
Current Members
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
</div>
<div class="inline-drawer-content">
<div name="Current Group Members" class="flex-container flexFlowColumn overflowYAuto flex1">
<div id="rm_group_members" class="overflowYAuto flex-container"></div>
</div>
</div>
</div>
<div class="inline-drawer wide100p flexFlowColumn">
<div id="groupAddMemberListToggle" class="inline-drawer-toggle inline-drawer-header">
Add Members
@ -1940,17 +1940,6 @@
</div>
</div>
</div>
<div class="inline-drawer wide100p flexFlowColumn">
<div id="groupCurrentMemberListToggle" class="inline-drawer-toggle inline-drawer-header">
Current Members
<div class="fa-solid fa-circle-chevron-down inline-drawer-icon down"></div>
</div>
<div class="inline-drawer-content">
<div name="Current Group Members" class="flex-container flexFlowColumn overflowYAuto flex1">
<div id="rm_group_members" class="overflowYAuto flex-container"></div>
</div>
</div>
</div>
</div>

View File

@ -393,26 +393,9 @@ _Lost API keys can't be restored! Make sure to keep it safe!_
## Anchors
Anchors are used to increase the length of messages.
There are two types of anchors: _Character Anchor_ and _Style Anchor_.
This feature is considered obsolete and has been removed.
_Character Anchor_ - affects the character played by the AI by motivating it to write longer messages.
Looks like: `[Elaborate speaker]`
_Style Anchor_ - affects the entire AI model, motivating the AI to write longer messages even when it is not acting as the character.
Looks like: `[Writing style: very long messages]`
***
Anchors Order sets the location of anchors in the prompt, the first anchor in the order is much further back in the context and thus has less influence than second.
The second anchor is only turned on after 8-12 messages, because when the chat still only has a few messages, the first anchor creates enough effect on its own.
Sometimes an AI model may not perceive anchors correctly or the AI model already generates sufficiently long messages. For these cases, you can disable the anchors by unchecking their respective boxes.
_When using Pygmalion models these anchors are automatically disabled, since Pygmalion already generates long enough messages._
The use of the Author's Note extension is now a preferred way to add prompt injections of variable depth.
## Instruct Mode
@ -594,6 +577,8 @@ Characters are drafted based on the order they are presented in group members li
## Multigen
*This feature provides a pseudo-streaming functionality which conflicts with token streaming. When Multigen is enabled and generation API supports streaming, only Multigen streaming will be used.*
SillyTavern tries to create faster and longer responses by chaining the generation using smaller batches.
### Default settings:
@ -614,6 +599,7 @@ Next batches = 30 tokens
2. Character starts speaking for You.
3. &lt;|endoftext|&gt; token reached.
4. No text generated.
5. Stop sequence generated. (Instruct mode only)
## User Settings

View File

@ -531,10 +531,6 @@ var message_already_generated = "";
var cycle_count_generation = 0;
var swipes = true;
let anchor_order = 0;
let style_anchor = true;
let character_anchor = true;
let extension_prompts = {};
var main_api;// = "kobold";
@ -1683,29 +1679,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
// bias from the latest message is top priority//
promptBias = messageBias ?? promptBias ?? '';
// Compute anchors
const topAnchorDepth = 8;
const bottomAnchorThreshold = 8;
let anchorTop = '';
let anchorBottom = '';
if (!is_pygmalion) {
console.log('saw not pyg');
let postAnchorChar = character_anchor ? name2 + " Elaborate speaker" : "";
let postAnchorStyle = style_anchor ? "Writing style: very long messages" : "";
if (anchor_order === 0) {
anchorTop = postAnchorChar;
anchorBottom = postAnchorStyle;
} else { // anchor_order === 1
anchorTop = postAnchorStyle;
anchorBottom = postAnchorChar;
}
if (anchorBottom) {
anchorBottom = "[" + anchorBottom + "]";
}
}
//*********************************
//PRE FORMATING STRING
//*********************************
@ -1761,6 +1734,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
console.log(`Core/all messages: ${coreChat.length}/${chat.length}`);
if (main_api === 'openai') {
message_already_generated = ''; // OpenAI doesn't have multigen
setOpenAIMessages(coreChat, quiet_prompt);
setOpenAIMessageExamples(mesExamplesArray);
}
@ -1773,11 +1747,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
storyString += appendToStoryString(Scenario, power_user.disable_scenario_formatting ? '' : 'Scenario: ');
} else {
storyString += appendToStoryString(charDescription, '');
if (coreChat.length < topAnchorDepth) {
storyString += appendToStoryString(charPersonality, power_user.disable_personality_formatting ? '' : name2 + "'s personality: ");
}
storyString += appendToStoryString(Scenario, power_user.disable_scenario_formatting ? '' : 'Circumstances and context of the dialogue: ');
}
@ -1886,9 +1856,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
storyString,
examplesString,
chatString,
anchorTop,
anchorBottom,
charPersonality,
promptBias,
allAnchors,
quiet_prompt,
@ -1950,7 +1917,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
generatedPromtCache += cycleGenerationPromt;
if (generatedPromtCache.length == 0) {
if (main_api === 'openai') {
generateOpenAIPromptCache(charPersonality, topAnchorDepth, anchorTop, bottomAnchorThreshold, anchorBottom);
generateOpenAIPromptCache();
}
console.log('generating prompt');
@ -1973,21 +1940,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
//item = item.substr(0, item.length - 1);
}
}
if (i === arrMes.length - topAnchorDepth && !is_pygmalion) {
//chatString = chatString.substr(0,chatString.length-1);
//anchorAndPersonality = "[Genre: roleplay chat][Tone: very long messages with descriptions]";
let personalityAndAnchor = [charPersonality, anchorTop].filter(x => x).join(' ');
if (personalityAndAnchor) {
item += "[" + personalityAndAnchor + "]\n";
}
}
if (i === arrMes.length - 1 && coreChat.length > bottomAnchorThreshold && item.trim().startsWith(name1 + ":") && !is_pygmalion) {//For add anchor in end
//chatString+=postAnchor+"\n";//"[Writing style: very long messages]\n";
if (anchorBottom) {
item = item.replace(/\n$/, " ");
item += anchorBottom + "\n";
}
}
if (is_pygmalion && !isInstruct) {
if (i === arrMes.length - 1 && item.trim().startsWith(name1 + ":")) {//for add name2 when user sent
item = item + name2 + ":";
@ -2075,9 +2027,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
storyString,
mesExmString,
mesSendString,
anchorTop,
anchorBottom,
charPersonality,
generatedPromtCache,
promptBias,
allAnchors,
@ -3459,30 +3408,15 @@ async function getSettings(type) {
`#settings_perset_novel option[value=${novelai_setting_names[nai_settings.preset_settings_novel]}]`
).attr("selected", "true");
//Load AI model config settings (temp, context length, anchors, and anchor order)
//Load AI model config settings
amount_gen = settings.amount_gen;
if (settings.max_context !== undefined)
max_context = parseInt(settings.max_context);
if (settings.anchor_order !== undefined)
anchor_order = parseInt(settings.anchor_order);
if (settings.style_anchor !== undefined)
style_anchor = !!settings.style_anchor;
if (settings.character_anchor !== undefined)
character_anchor = !!settings.character_anchor;
$("#style_anchor").prop("checked", style_anchor);
$("#character_anchor").prop("checked", character_anchor);
$("#anchor_order option[value=" + anchor_order + "]").attr(
"selected",
"true"
);
swipes = settings.swipes !== undefined ? !!settings.swipes : true; // enable swipes by default
$('#swipes-checkbox').prop('checked', swipes); /// swipecode
//console.log('getSettings -- swipes = ' + swipes + '. toggling box');
hideSwipeButtons();
//console.log('getsettings calling showswipebtns');
showSwipeButtons();
// Kobold
@ -3580,9 +3514,6 @@ async function saveSettings(type) {
user_avatar: user_avatar,
amount_gen: amount_gen,
max_context: max_context,
anchor_order: anchor_order,
style_anchor: style_anchor,
character_anchor: character_anchor,
main_api: main_api,
world_info: world_info,
world_info_depth: world_info_depth,
@ -5519,17 +5450,6 @@ $(document).ready(function () {
//////////////////////////////////////////////////////////////
$("#style_anchor").change(function () {
style_anchor = !!$("#style_anchor").prop("checked");
saveSettingsDebounced();
});
$("#character_anchor").change(function () {
character_anchor = !!$("#character_anchor").prop("checked");
saveSettingsDebounced();
});
$("#select_chat_cross").click(function () {
$("#shadow_select_chat_popup").transition({
opacity: 0,
@ -5809,11 +5729,6 @@ $(document).ready(function () {
is_api_button_press_novel = true;
});
$("#anchor_order").change(function () {
anchor_order = parseInt($("#anchor_order").find(":selected").val());
saveSettingsDebounced();
});
//**************************CHARACTER IMPORT EXPORT*************************//
$("#character_import_button").click(function () {
$("#character_import_file").click();

View File

@ -205,22 +205,10 @@ function setOpenAIMessageExamples(mesExamplesArray) {
}
}
function generateOpenAIPromptCache(charPersonality, topAnchorDepth, anchorTop, bottomAnchorThreshold, anchorBottom) {
function generateOpenAIPromptCache() {
openai_msgs = openai_msgs.reverse();
openai_msgs.forEach(function (msg, i, arr) {//For added anchors and others
openai_msgs.forEach(function (msg, i, arr) {
let item = msg["content"];
if (i === openai_msgs.length - topAnchorDepth) {
let personalityAndAnchor = [charPersonality, anchorTop].filter(x => x).join(' ');
if (personalityAndAnchor) {
item = `[${name2} is ${personalityAndAnchor}]\n${item}`;
}
}
if (i === openai_msgs.length - 1 && openai_msgs.length > bottomAnchorThreshold && msg.role === "user") {//For add anchor in end
if (anchorBottom) {
item = anchorBottom + "\n" + item;
}
}
msg["content"] = item;
openai_msgs[i] = msg;
});