This commit is contained in:
RossAscends
2023-04-26 21:15:34 +09:00
7 changed files with 68 additions and 89 deletions

View File

@ -1498,7 +1498,12 @@ async function Generate(type, automatic_trigger, force_name2) {
if (mesExamples.replace(/<START>/gi, '').trim().length === 0) {
mesExamples = '';
}
let mesExamplesArray = mesExamples.split(/<START>/gi).slice(1).map(block => `<START>\n${block.trim()}\n`);
const blockHeading =
main_api === 'openai' ? '<START>' : // OpenAI handler always expects it
power_user.custom_chat_separator ? power_user.custom_chat_separator :
power_user.disable_examples_formatting ? '' :
is_pygmalion ? '<START>' : `This is how ${name2} should talk`;
let mesExamplesArray = mesExamples.split(/<START>/gi).slice(1).map(block => `${blockHeading}\n${block.trim()}\n`);
if (main_api === 'openai') {
const oai_chat = [...chat].filter(x => !x.is_system);
@ -1523,22 +1528,8 @@ async function Generate(type, automatic_trigger, force_name2) {
if (count_view_mes < topAnchorDepth) {
storyString += appendToStoryString(charPersonality, power_user.disable_personality_formatting ? '' : name2 + "'s personality: ");
}
}
if (power_user.custom_chat_separator && power_user.custom_chat_separator.length) {
for (let i = 0; i < mesExamplesArray.length; i++) {
mesExamplesArray[i] = mesExamplesArray[i].replace(/<START>/gi, power_user.custom_chat_separator);
}
}
if (power_user.pin_examples && main_api !== 'openai') {
for (let example of mesExamplesArray) {
if (!is_pygmalion) {
const replaceString = power_user.disable_examples_formatting ? '' : `This is how ${name2} should talk`;
example = example.replace(/<START>/i, replaceString);
}
storyString += appendToStoryString(example, '');
}
storyString += appendToStoryString(Scenario, power_user.disable_scenario_formatting ? '' : 'Circumstances and context of the dialogue: ');
}
// Pygmalion does that anyway
@ -1645,53 +1636,46 @@ async function Generate(type, automatic_trigger, force_name2) {
chat2.push('');
}
// Collect enough messages to fill the context
let examplesString = '';
let chatString = '';
function canFitMessages() {
const encodeString = JSON.stringify(worldInfoString + storyString + examplesString + chatString + anchorTop + anchorBottom + charPersonality + promptBias + allAnchors);
return getTokenCount(encodeString, padding_tokens) < this_max_context;
}
// Force pinned examples into the context
let pinExmString;
if (power_user.pin_examples) {
pinExmString = examplesString = mesExamplesArray.join('');
}
// Collect enough messages to fill the context
let arrMes = [];
for (let item of chat2) {
chatString = item + chatString;
const encodeString = JSON.stringify(
worldInfoString + storyString + chatString +
anchorTop + anchorBottom +
charPersonality + promptBias + allAnchors
);
const tokenCount = getTokenCount(encodeString, padding_tokens);
if (tokenCount < this_max_context) { //(The number of tokens in the entire promt) need fix, it must count correctly (added +120, so that the description of the character does not hide)
if (canFitMessages()) { //(The number of tokens in the entire promt) need fix, it must count correctly (added +120, so that the description of the character does not hide)
//if (is_pygmalion && i == chat2.length-1) item='<START>\n'+item;
arrMes[arrMes.length] = item;
} else {
break;
}
await delay(1); //For disable slow down (encode gpt-2 need fix)
}
// Prepare unpinned example messages
// Estimate how many unpinned example messages fit in the context
let count_exm_add = 0;
if (!power_user.pin_examples) {
let mesExmString = '';
for (let i = 0; i < mesExamplesArray.length; i++) {
mesExmString += mesExamplesArray[i];
const prompt = JSON.stringify(worldInfoString + storyString + mesExmString + chatString + anchorTop + anchorBottom + charPersonality + promptBias + allAnchors);
const tokenCount = getTokenCount(prompt, padding_tokens);
if (tokenCount < this_max_context) {
if (power_user.disable_examples_formatting) {
mesExamplesArray[i] = mesExamplesArray[i].replace(/<START>/i, '');
} else if (!is_pygmalion) {
mesExamplesArray[i] = mesExamplesArray[i].replace(/<START>/i, `This is how ${name2} should talk`);
}
for (let example of mesExamplesArray) {
examplesString += example;
if (canFitMessages()) {
count_exm_add++;
await delay(1);
} else {
break;
}
await delay(1);
}
}
if (!is_pygmalion && Scenario && Scenario.length > 0) {
storyString += !power_user.disable_scenario_formatting ? `Circumstances and context of the dialogue: ${Scenario}\n` : `${Scenario}\n`;
}
let mesSend = [];
console.log('calling runGenerate');
await runGenerate();
@ -1765,15 +1749,12 @@ async function Generate(type, automatic_trigger, force_name2) {
});
}
let mesSendString = '';
let mesExmString = '';
let mesSendString = '';
function setPromtString() {
mesExmString = pinExmString ?? mesExamplesArray.slice(0, count_exm_add).join('');
mesSendString = '';
mesExmString = '';
for (let j = 0; j < count_exm_add; j++) {
mesExmString += mesExamplesArray[j];
}
for (let j = 0; j < mesSend.length; j++) {
mesSendString += mesSend[j];