This commit is contained in:
SillyLossy
2023-05-02 16:01:26 +03:00
2 changed files with 16 additions and 3 deletions

View File

@ -1722,8 +1722,14 @@ async function Generate(type, automatic_trigger, force_name2) {
if (canFitMessages()) { //(The number of tokens in the entire promt) need fix, it must count correctly (added +120, so that the description of the character does not hide)
//if (is_pygmalion && i == chat2.length-1) item='<START>\n'+item;
arrMes[arrMes.length] = item;
} else {
$("#chat").children().removeClass('lastInContext');
console.log(arrMes.length);
$(`#chat .mes:nth-last-child(${arrMes.length})`).addClass('lastInContext');
break;
}
await delay(1); //For disable slow down (encode gpt-2 need fix)
}
@ -1817,6 +1823,7 @@ async function Generate(type, automatic_trigger, force_name2) {
let mesSendString = '';
function setPromtString() {
console.log('--setting Prompt string');
mesExmString = pinExmString ?? mesExamplesArray.slice(0, count_exm_add).join('');
mesSendString = '';
for (let j = 0; j < mesSend.length; j++) {
@ -1840,6 +1847,7 @@ async function Generate(type, automatic_trigger, force_name2) {
}
function checkPromtSize() {
console.log('---checking Prompt size');
setPromtString();
const prompt = [
worldInfoString,
@ -1864,15 +1872,16 @@ async function Generate(type, automatic_trigger, force_name2) {
checkPromtSize(); // and check size again..
} else {
//end
console.log(`---mesSend.length = ${mesSend.length}`);
}
}
}
if (generatedPromtCache.length > 0) {
//console.log('Generated Prompt Cache length: '+generatedPromtCache.length);
console.log('---Generated Prompt Cache length: ' + generatedPromtCache.length);
checkPromtSize();
} else {
//console.log('calling setPromtString')
console.log('---calling setPromtString ' + generatedPromtCache.length)
setPromtString();
}
@ -1924,7 +1933,7 @@ async function Generate(type, automatic_trigger, force_name2) {
if (power_user.collapse_newlines) {
finalPromt = collapseNewlines(finalPromt);
}
//console.log(`---Calculated Prompt Tokens: ${getTokenCount(finalPromt, padding_tokens)}`);
let this_amount_gen = parseInt(amount_gen); // how many tokens the AI will be requested to generate
let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]];