Add padding to cache key. Fix Safari display issues. Fix 400 on empty translate. Reset bias cache on changing model.

This commit is contained in:
Cohee
2023-08-23 10:32:48 +03:00
parent b385bd190a
commit e77da62b85
7 changed files with 35 additions and 20 deletions

View File

@@ -217,6 +217,10 @@ async function translateProviderDeepl(text, lang) {
async function translate(text, lang) {
try {
if (text == '') {
return '';
}
switch (extension_settings.translate.provider) {
case 'google':
return await translateProviderGoogle(text, lang);

View File

@@ -128,8 +128,8 @@ function loadNovelPreset(preset) {
function loadNovelSettings(settings) {
//load the rest of the Novel settings without any checks
nai_settings.model_novel = settings.model_novel;
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr("selected", true);
$('#model_novel_select').val(nai_settings.model_novel);
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr("selected", true);
if (settings.nai_preamble !== undefined) {
nai_settings.preamble = settings.nai_preamble;

View File

@@ -1647,6 +1647,11 @@ class ChatCompletion {
const index = this.findMessageIndex(identifier);
const message = this.messages.collection[index].collection.pop();
if (!message) {
this.log(`No message to remove from ${identifier}`);
return;
}
this.increaseTokenBudgetBy(message.getTokens());
this.log(`Removed ${message.identifier} from ${identifier}. Remaining tokens: ${this.tokenBudget}`);
@@ -2542,6 +2547,7 @@ function getMaxContextWindowAI(value) {
}
async function onModelChange() {
biasCache = undefined;
let value = String($(this).val());
if ($(this).is('#model_claude_select')) {

View File

@@ -282,11 +282,11 @@ function setNameCallback(_, name) {
setUserName(name); //this prevented quickReply usage
}
function setNarratorName(_, text) {
async function setNarratorName(_, text) {
const name = text || NARRATOR_NAME_DEFAULT;
chat_metadata[NARRATOR_NAME_KEY] = name;
toastr.info(`System narrator name set to ${name}`);
saveChatConditional();
await saveChatConditional();
}
async function sendMessageAs(_, text) {
@@ -341,7 +341,7 @@ async function sendMessageAs(_, text) {
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
await saveChatConditional();
}
async function sendNarratorMessage(_, text) {
@@ -373,7 +373,7 @@ async function sendNarratorMessage(_, text) {
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
await saveChatConditional();
}
async function sendCommentMessage(_, text) {
@@ -399,7 +399,7 @@ async function sendCommentMessage(_, text) {
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
await saveChatConditional();
}
function helpCommandCallback(_, type) {

View File

@@ -134,7 +134,7 @@ export function getTokenCount(str, padding = undefined) {
const cacheObject = getTokenCacheObject();
const hash = getStringHash(str);
const cacheKey = `${tokenizerType}-${hash}`;
const cacheKey = `${tokenizerType}-${hash}+${padding}`;
if (typeof cacheObject[cacheKey] === 'number') {
return cacheObject[cacheKey];