Prompt bias adjustments

This commit is contained in:
SillyLossy
2023-05-18 12:15:53 +03:00
parent 2e4bf2a41a
commit 3659b1d847

View File

@@ -1733,21 +1733,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
deactivateSendButtons(); deactivateSendButtons();
let promptBias = null; let { messageBias, promptBias } = getBiasStrings(textareaText);
let messageBias = extractMessageBias(textareaText);
// gets bias of the latest message where it was applied
for (let mes of chat.slice().reverse()) {
if (mes && mes.is_user && mes.extra && mes.extra.bias) {
if (mes.extra.bias.trim().length > 0) {
promptBias = mes.extra.bias;
}
break;
}
}
// bias from the latest message is top priority//
promptBias = messageBias ?? promptBias ?? '';
//********************************* //*********************************
//PRE FORMATING STRING //PRE FORMATING STRING
@@ -1809,7 +1795,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
} }
// Pygmalion does that anyway // Pygmalion does that anyway
if (power_user.always_force_name2 && !is_pygmalion) { if (promptBias || (power_user.always_force_name2 && !is_pygmalion)) {
force_name2 = true; force_name2 = true;
} }
@@ -1871,7 +1857,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
storyString, storyString,
examplesString, examplesString,
chatString, chatString,
promptBias,
allAnchors, allAnchors,
quiet_prompt, quiet_prompt,
].join('').replace(/\r/gm, ''); ].join('').replace(/\r/gm, '');
@@ -1893,15 +1878,14 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
} }
chatString = item + chatString; chatString = item + chatString;
if (canFitMessages()) { //(The number of tokens in the entire promt) need fix, it must count correctly (added +120, so that the description of the character does not hide) if (canFitMessages()) {
//if (is_pygmalion && i == chat2.length-1) item='<START>\n'+item;
arrMes[arrMes.length] = item; arrMes[arrMes.length] = item;
} else { } else {
break; break;
} }
await delay(1); //For disable slow down (encode gpt-2 need fix)
// Prevent UI thread lock on tokenization
await delay(1);
} }
if (main_api !== 'openai') { if (main_api !== 'openai') {
@@ -2005,19 +1989,28 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
const isBottom = j === mesSend.length - 1; const isBottom = j === mesSend.length - 1;
mesSendString += mesSend[j]; mesSendString += mesSend[j];
if (isBottom) {
mesSendString = modifyLastPromptLine(mesSendString);
}
}
}
function modifyLastPromptLine(mesSendString) {
// Add quiet generation prompt at depth 0 // Add quiet generation prompt at depth 0
if (isBottom && quiet_prompt && quiet_prompt.length) { if (quiet_prompt && quiet_prompt.length) {
const name = is_pygmalion ? 'You' : name1; const name = is_pygmalion ? 'You' : name1;
const quietAppend = isInstruct ? formatInstructModeChat(name, quiet_prompt, false, true) : `\n${name}: ${quiet_prompt}`; const quietAppend = isInstruct ? formatInstructModeChat(name, quiet_prompt, false, true) : `\n${name}: ${quiet_prompt}`;
mesSendString += quietAppend; mesSendString += quietAppend;
} }
if (isInstruct && isBottom && tokens_already_generated === 0) { // Get instruct mode line
if (isInstruct && tokens_already_generated === 0) {
const name = isImpersonate ? (is_pygmalion ? 'You' : name1) : name2; const name = isImpersonate ? (is_pygmalion ? 'You' : name1) : name2;
mesSendString += formatInstructModePrompt(name, isImpersonate); mesSendString += formatInstructModePrompt(name, isImpersonate);
} }
if (!isInstruct && isImpersonate && isBottom && tokens_already_generated === 0) { // Get non-instruct impersonation line
if (!isInstruct && isImpersonate && tokens_already_generated === 0) {
const name = is_pygmalion ? 'You' : name1; const name = is_pygmalion ? 'You' : name1;
if (!mesSendString.endsWith('\n')) { if (!mesSendString.endsWith('\n')) {
mesSendString += '\n'; mesSendString += '\n';
@@ -2025,13 +2018,15 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
mesSendString += name + ':'; mesSendString += name + ':';
} }
if (force_name2 && isBottom && tokens_already_generated === 0) { // Add character's name
if (force_name2 && tokens_already_generated === 0) {
if (!mesSendString.endsWith('\n')) { if (!mesSendString.endsWith('\n')) {
mesSendString += '\n'; mesSendString += '\n';
} }
mesSendString += name2 + ':'; mesSendString += name2 + ':' + promptBias;
}
} }
return mesSendString;
} }
function checkPromtSize() { function checkPromtSize() {
@@ -2043,7 +2038,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
mesExmString, mesExmString,
mesSendString, mesSendString,
generatedPromtCache, generatedPromtCache,
promptBias,
allAnchors, allAnchors,
quiet_prompt, quiet_prompt,
].join('').replace(/\r/gm, ''); ].join('').replace(/\r/gm, '');
@@ -2081,8 +2075,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
afterScenarioAnchor + afterScenarioAnchor +
mesExmString + mesExmString +
mesSendString + mesSendString +
generatedPromtCache + generatedPromtCache;
promptBias;
if (zeroDepthAnchor && zeroDepthAnchor.length) { if (zeroDepthAnchor && zeroDepthAnchor.length) {
if (!isMultigenEnabled() || tokens_already_generated == 0) { if (!isMultigenEnabled() || tokens_already_generated == 0) {
@@ -2182,9 +2175,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
type: 'POST', // type: 'POST', //
url: generate_url, // url: generate_url, //
data: JSON.stringify(generate_data), data: JSON.stringify(generate_data),
beforeSend: function () { beforeSend: () => {},
},
cache: false, cache: false,
dataType: "json", dataType: "json",
contentType: "application/json", contentType: "application/json",
@@ -2353,15 +2344,32 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
} //rungenerate ends } //rungenerate ends
} else { //generate's primary loop ends, after this is error handling for no-connection or safety-id } else { //generate's primary loop ends, after this is error handling for no-connection or safety-id
if (this_chid === undefined || this_chid === 'invalid-safety-id') { if (this_chid === undefined || this_chid === 'invalid-safety-id') {
//send ch sel toastr.warning('Сharacter is not selected');
popup_type = 'char_not_selected';
callPopup('<h3>Сharacter is not selected</h3>');
} }
is_send_press = false; is_send_press = false;
} }
//console.log('generate ending'); //console.log('generate ending');
} //generate ends } //generate ends
function getBiasStrings(textareaText) {
let promptBias = '';
let messageBias = extractMessageBias(textareaText);
// gets bias of the latest message where it was applied
for (let mes of chat.slice().reverse()) {
if (mes && mes.is_user && mes.extra && mes.extra.bias) {
if (mes.extra.bias.trim().length > 0) {
promptBias = mes.extra.bias;
}
break;
}
}
// bias from the latest message is top priority//
promptBias = messageBias || promptBias || '';
return { messageBias, promptBias };
}
function formatMessageHistoryItem(chatItem, isInstruct) { function formatMessageHistoryItem(chatItem, isInstruct) {
const isNarratorType = chatItem?.extra?.type === system_message_types.NARRATOR; const isNarratorType = chatItem?.extra?.type === system_message_types.NARRATOR;
const characterName = selected_group ? chatItem.name : name2; const characterName = selected_group ? chatItem.name : name2;