Merge pull request #968 from bdashore3/staging

CFG: Improvements
This commit is contained in:
Cohee
2023-08-22 19:00:39 +03:00
committed by GitHub
6 changed files with 330 additions and 153 deletions

View File

@@ -163,6 +163,7 @@ import { deviceInfo } from "./scripts/RossAscends-mods.js";
import { registerPromptManagerMigration } from "./scripts/PromptManager.js";
import { getRegexedString, regex_placement } from "./scripts/extensions/regex/engine.js";
import { FILTER_TYPES, FilterHelper } from "./scripts/filters.js";
import { getCfgPrompt, getGuidanceScale } from "./scripts/extensions/cfg/util.js";
import {
formatInstructModeChat,
formatInstructModePrompt,
@@ -2729,6 +2730,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
const anchorDepth = Math.abs(i - arrMes.length + 1);
// NOTE: Depth injected here!
const extensionAnchor = getExtensionPrompt(extension_prompt_types.IN_CHAT, anchorDepth);
if (anchorDepth > 0 && extensionAnchor && extensionAnchor.length) {
@@ -2740,7 +2742,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
let mesExmString = '';
let mesSendString = '';
function setPromtString() {
if (main_api == 'openai') {
@@ -2749,65 +2750,60 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
console.debug('--setting Prompt string');
mesExmString = pinExmString ?? mesExamplesArray.slice(0, count_exm_add).join('');
mesSendString = '';
for (let j = 0; j < mesSend.length; j++) {
const isBottom = j === mesSend.length - 1;
mesSendString += mesSend[j];
if (isBottom) {
mesSendString = modifyLastPromptLine(mesSendString);
}
}
mesSend[mesSend.length - 1] = modifyLastPromptLine(mesSend[mesSend.length - 1]);
}
function modifyLastPromptLine(mesSendString) {
function modifyLastPromptLine(lastMesString) {
// Add quiet generation prompt at depth 0
if (quiet_prompt && quiet_prompt.length) {
const name = is_pygmalion ? 'You' : name1;
const quietAppend = isInstruct ? formatInstructModeChat(name, quiet_prompt, false, true, false, name1, name2) : `\n${name}: ${quiet_prompt}`;
mesSendString += quietAppend;
lastMesString += quietAppend;
// Bail out early
return mesSendString;
return lastMesString;
}
// Get instruct mode line
if (isInstruct && tokens_already_generated === 0) {
const name = isImpersonate ? (is_pygmalion ? 'You' : name1) : name2;
mesSendString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2);
lastMesString += formatInstructModePrompt(name, isImpersonate, promptBias, name1, name2);
}
// Get non-instruct impersonation line
if (!isInstruct && isImpersonate && tokens_already_generated === 0) {
const name = is_pygmalion ? 'You' : name1;
if (!mesSendString.endsWith('\n')) {
mesSendString += '\n';
if (!lastMesString.endsWith('\n')) {
lastMesString += '\n';
}
mesSendString += name + ':';
lastMesString += name + ':';
}
// Add character's name
if (!isInstruct && force_name2 && tokens_already_generated === 0) {
if (!mesSendString.endsWith('\n')) {
mesSendString += '\n';
// Force name append on continue
if (!isInstruct && force_name2 && (tokens_already_generated === 0 || isContinue)) {
if (!lastMesString.endsWith('\n')) {
lastMesString += '\n';
}
// Add a leading space to the prompt bias if applicable
if (!promptBias || promptBias.length === 0) {
console.debug("No prompt bias was found.");
mesSendString += `${name2}:`;
} else if (promptBias.startsWith(' ')) {
console.debug(`A prompt bias with a leading space was found: ${promptBias}`);
mesSendString += `${name2}:${promptBias}`
} else {
console.debug(`A prompt bias was found: ${promptBias}`);
mesSendString += `${name2}: ${promptBias}`;
}
} else if (power_user.user_prompt_bias && !isImpersonate && !isInstruct) {
console.debug(`A prompt bias was found without character's name appended: ${promptBias}`);
mesSendString += substituteParams(power_user.user_prompt_bias);
lastMesString += `${name2}:`;
}
return lastMesString;
}
// Clean up the already generated prompt for seamless addition
function cleanupPromptCache(promptCache) {
// Remove the first occurrance of character's name
if (promptCache.trimStart().startsWith(`${name2}:`)) {
promptCache = promptCache.replace(`${name2}:`, '').trimStart();
}
return mesSendString;
// Remove the first occurrance of prompt bias
if (promptCache.trimStart().startsWith(promptBias)) {
promptCache = promptCache.replace(promptBias, '');
}
return promptCache;
}
function checkPromtSize() {
@@ -2816,7 +2812,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
const prompt = [
storyString,
mesExmString,
mesSendString,
mesSend.join(''),
generatedPromtCache,
allAnchors,
quiet_prompt,
@@ -2845,30 +2841,90 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
setPromtString();
}
// add chat preamble
mesSendString = addChatsPreamble(mesSendString);
// add a custom dingus (if defined)
mesSendString = addChatsSeparator(mesSendString);
let finalPromt =
storyString +
afterScenarioAnchor +
mesExmString +
mesSendString +
generatedPromtCache;
if (zeroDepthAnchor && zeroDepthAnchor.length) {
if (!isMultigenEnabled() || tokens_already_generated == 0) {
finalPromt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, finalPromt);
// Fetches the combined prompt for both negative and positive prompts
const cfgGuidanceScale = getGuidanceScale();
function getCombinedPrompt(isNegative) {
let finalMesSend = [...mesSend];
let cfgPrompt = {};
if (cfgGuidanceScale && cfgGuidanceScale?.value !== 1) {
cfgPrompt = getCfgPrompt(cfgGuidanceScale, isNegative);
}
if (cfgPrompt && cfgPrompt?.value) {
if (cfgPrompt?.depth === 0) {
finalMesSend[finalMesSend.length - 1] +=
/\s/.test(finalMesSend[finalMesSend.length - 1].slice(-1))
? cfgPrompt.value
: ` ${cfgPrompt.value}`;
} else {
// TODO: Switch from splice method to insertion depth method
finalMesSend.splice(mesSend.length - cfgPrompt.depth, 0, `${cfgPrompt.value}\n`);
}
}
// Add prompt bias after everything else
// Always run with continue
if (!isInstruct && !isImpersonate && (tokens_already_generated === 0 || isContinue)) {
if (promptBias.trim().length !== 0) {
finalMesSend[finalMesSend.length - 1] +=
/\s/.test(finalMesSend[finalMesSend.length - 1].slice(-1))
? promptBias.trimStart()
: ` ${promptBias.trimStart()}`;
}
}
// Prune from prompt cache if it exists
if (generatedPromtCache.length !== 0) {
generatedPromtCache = cleanupPromptCache(generatedPromtCache);
}
// Override for prompt bits data
if (!isNegative) {
mesSend = finalMesSend;
}
let mesSendString = finalMesSend.join('');
// add chat preamble
mesSendString = addChatsPreamble(mesSendString);
// add a custom dingus (if defined)
mesSendString = addChatsSeparator(mesSendString);
let combinedPrompt =
storyString +
afterScenarioAnchor +
mesExmString +
mesSendString +
generatedPromtCache;
// TODO: Move zero-depth anchor append to work like CFG and bias appends
if (zeroDepthAnchor && zeroDepthAnchor.length) {
if (!isMultigenEnabled() || tokens_already_generated == 0) {
combinedPrompt = appendZeroDepthAnchor(force_name2, zeroDepthAnchor, combinedPrompt);
}
}
combinedPrompt = combinedPrompt.replace(/\r/gm, '');
if (power_user.collapse_newlines) {
combinedPrompt = collapseNewlines(combinedPrompt);
}
return combinedPrompt;
}
finalPromt = finalPromt.replace(/\r/gm, '');
// Get the negative prompt first since it has the unmodified mesSend array
let negativePrompt = main_api == 'textgenerationwebui' ? getCombinedPrompt(true) : undefined;
let finalPromt = getCombinedPrompt(false);
// Include the entire guidance scale object
const cfgValues = {
guidanceScale: cfgGuidanceScale,
negativePrompt: negativePrompt
};
if (power_user.collapse_newlines) {
finalPromt = collapseNewlines(finalPromt);
}
let this_amount_gen = parseInt(amount_gen); // how many tokens the AI will be requested to generate
let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]];
@@ -2879,6 +2935,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
let thisPromptBits = [];
// TODO: Make this a switch
if (main_api == 'koboldhorde' && horde_settings.auto_adjust_response_length) {
this_amount_gen = Math.min(this_amount_gen, adjustedParams.maxLength);
this_amount_gen = Math.max(this_amount_gen, MIN_AMOUNT_GEN); // prevent validation errors
@@ -2901,12 +2958,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
}
else if (main_api == 'textgenerationwebui') {
generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate);
generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues);
generate_data.use_mancer = api_use_mancer_webui;
}
else if (main_api == 'novel') {
const this_settings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
generate_data = getNovelGenerationData(finalPromt, this_settings, this_amount_gen, isImpersonate);
generate_data = getNovelGenerationData(finalPromt, this_settings, this_amount_gen, isImpersonate, cfgValues);
}
else if (main_api == 'openai') {
let [prompt, counts] = prepareOpenAIMessages({
@@ -2961,7 +3018,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
storyString: storyString,
afterScenarioAnchor: afterScenarioAnchor,
examplesString: examplesString,
mesSendString: mesSendString,
mesSendString: mesSend.join(''),
generatedPromtCache: generatedPromtCache,
promptBias: promptBias,
finalPromt: finalPromt,