-
+
Cooldown
@@ -5919,7 +5958,7 @@
-
+
Delay
diff --git a/public/script.js b/public/script.js
index 4d97e91b9..716c9f86f 100644
--- a/public/script.js
+++ b/public/script.js
@@ -242,7 +242,7 @@ import { BulkEditOverlay, CharacterContextMenu } from './scripts/BulkEditOverlay
import { loadFeatherlessModels, loadMancerModels, loadOllamaModels, loadTogetherAIModels, loadInfermaticAIModels, loadOpenRouterModels, loadVllmModels, loadAphroditeModels, loadDreamGenModels, initTextGenModels, loadTabbyModels } from './scripts/textgen-models.js';
import { appendFileContent, hasPendingFileAttachment, populateFileAttachment, decodeStyleTags, encodeStyleTags, isExternalMediaAllowed, getCurrentEntityId, preserveNeutralChat, restoreNeutralChat } from './scripts/chats.js';
import { initPresetManager } from './scripts/preset-manager.js';
-import { MacrosParser, evaluateMacros, getLastMessageId } from './scripts/macros.js';
+import { MacrosParser, evaluateMacros, getLastMessageId, initMacros } from './scripts/macros.js';
import { currentUser, setUserControls } from './scripts/user.js';
import { POPUP_RESULT, POPUP_TYPE, Popup, callGenericPopup, fixToastrForDialogs } from './scripts/popup.js';
import { renderTemplate, renderTemplateAsync } from './scripts/templates.js';
@@ -957,6 +957,7 @@ async function firstLoadInit() {
initDynamicStyles();
initTags();
initBookmarks();
+ initMacros();
await getUserAvatars(true, user_avatar);
await getCharacters();
await getBackgrounds();
@@ -1238,8 +1239,9 @@ async function getStatusTextgen() {
const wantsInstructDerivation = (power_user.instruct.enabled && power_user.instruct.derived);
const wantsContextDerivation = power_user.context_derived;
+ const wantsContextSize = power_user.context_size_derived;
const supportsChatTemplate = [textgen_types.KOBOLDCPP, textgen_types.LLAMACPP].includes(textgen_settings.type);
- if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation)) {
+ if (supportsChatTemplate && (wantsInstructDerivation || wantsContextDerivation || wantsContextSize)) {
const response = await fetch('/api/backends/text-completions/props', {
method: 'POST',
headers: getRequestHeaders(),
@@ -1253,6 +1255,17 @@ async function getStatusTextgen() {
const data = await response.json();
if (data) {
const { chat_template, chat_template_hash } = data;
+ if (wantsContextSize && 'default_generation_settings' in data) {
+ const backend_max_context = data['default_generation_settings']['n_ctx'];
+ const old_value = max_context;
+ if (max_context !== backend_max_context) {
+ setGenerationParamsFromPreset({ max_length: backend_max_context });
+ }
+ if (old_value !== max_context) {
+ console.log(`Auto-switched max context from ${old_value} to ${max_context}`);
+ toastr.info(`${old_value} ⇒ ${max_context}`, 'Context Size Changed');
+ }
+ }
console.log(`We have chat template ${chat_template.split('\n')[0]}...`);
const templates = await deriveTemplatesFromChatTemplate(chat_template, chat_template_hash);
if (templates) {
@@ -6822,6 +6835,10 @@ export async function saveSettings(type) {
});
}
+/**
+ * Sets the generation parameters from a preset object.
+ * @param {{ genamt?: number, max_length?: number }} preset Preset object
+ */
export function setGenerationParamsFromPreset(preset) {
const needsUnlock = (preset.max_length ?? max_context) > MAX_CONTEXT_DEFAULT || (preset.genamt ?? amount_gen) > MAX_RESPONSE_DEFAULT;
$('#max_context_unlocked').prop('checked', needsUnlock).trigger('change');
diff --git a/public/scripts/extensions/expressions/settings.html b/public/scripts/extensions/expressions/settings.html
index a38362439..f2b7b79ac 100644
--- a/public/scripts/extensions/expressions/settings.html
+++ b/public/scripts/extensions/expressions/settings.html
@@ -40,7 +40,7 @@
Default / Fallback Expression
Set the default and fallback expression being used when no matching expression is found.
-
+
Custom Expressions
diff --git a/public/scripts/extensions/tts/settings.html b/public/scripts/extensions/tts/settings.html
index ecbdd65c6..dc005a406 100644
--- a/public/scripts/extensions/tts/settings.html
+++ b/public/scripts/extensions/tts/settings.html
@@ -57,7 +57,7 @@
-
+
Audio Playback Speed
@@ -80,4 +80,4 @@
-
\ No newline at end of file
+
diff --git a/public/scripts/macros.js b/public/scripts/macros.js
index 1efd42a14..0e4d7a8da 100644
--- a/public/scripts/macros.js
+++ b/public/scripts/macros.js
@@ -1,5 +1,5 @@
import { Handlebars, moment, seedrandom, droll } from '../lib.js';
-import { chat, chat_metadata, main_api, getMaxContextSize, getCurrentChatId, substituteParams } from '../script.js';
+import { chat, chat_metadata, main_api, getMaxContextSize, getCurrentChatId, substituteParams, eventSource, event_types } from '../script.js';
import { timestampToMoment, isDigitsOnly, getStringHash, escapeRegex, uuidv4 } from './utils.js';
import { textgenerationwebui_banned_in_macros } from './textgen-settings.js';
import { getInstructMacros } from './instruct-mode.js';
@@ -521,3 +521,22 @@ export function evaluateMacros(content, env, postProcessFn) {
return content;
}
+
+export function initMacros() {
+ function initLastGenerationType() {
+ let lastGenerationType = '';
+
+ MacrosParser.registerMacro('lastGenerationType', () => lastGenerationType);
+
+ eventSource.on(event_types.GENERATION_STARTED, (type, _params, isDryRun) => {
+ if (isDryRun) return;
+ lastGenerationType = type || 'normal';
+ });
+
+ eventSource.on(event_types.CHAT_CHANGED, () => {
+ lastGenerationType = '';
+ });
+ }
+
+ initLastGenerationType();
+}
diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js
index 931067333..504d53b13 100644
--- a/public/scripts/power-user.js
+++ b/public/scripts/power-user.js
@@ -246,6 +246,7 @@ let power_user = {
},
context_derived: false,
+ context_size_derived: false,
sysprompt: {
enabled: true,
@@ -1482,6 +1483,7 @@ async function loadPowerUserSettings(settings, data) {
$('#example_messages_behavior').val(getExampleMessagesBehavior());
$(`#example_messages_behavior option[value="${getExampleMessagesBehavior()}"]`).prop('selected', true);
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
+ $('#context_size_derived').prop('checked', !!power_user.context_size_derived);
$('#console_log_prompts').prop('checked', power_user.console_log_prompts);
$('#request_token_probabilities').prop('checked', power_user.request_token_probabilities);
@@ -3079,6 +3081,16 @@ $(document).ready(() => {
$('#context_derived').parent().find('i').toggleClass('toggleEnabled', !!power_user.context_derived);
});
+ $('#context_size_derived').on('input', function () {
+ const value = !!$(this).prop('checked');
+ power_user.context_size_derived = value;
+ saveSettingsDebounced();
+ });
+
+ $('#context_size_derived').on('change', function () {
+ $('#context_size_derived').prop('checked', !!power_user.context_size_derived);
+ });
+
$('#always-force-name2-checkbox').change(function () {
power_user.always_force_name2 = !!$(this).prop('checked');
saveSettingsDebounced();
diff --git a/public/scripts/templates/macros.html b/public/scripts/templates/macros.html
index 6e96ae4fa..38f6dec0c 100644
--- a/public/scripts/templates/macros.html
+++ b/public/scripts/templates/macros.html
@@ -8,6 +8,7 @@
{{noop}} – no operation, just an empty string.
{{original}} – global prompts defined in API settings. Only valid in Advanced Definitions prompt overrides.
{{input}} – the user input
+
{{lastGenerationType}} - the type of the last queued generation request. Empty if no generations were performed yet or the active chat was switched. Possible values: "normal", "impersonate", "regenerate", "quiet", "swipe", "continue".
{{charPrompt}} – the Character's Main Prompt override
{{charInstruction}} – the Character's Post-History Instructions override
{{description}} – the Character's Description
diff --git a/src/endpoints/backends/text-completions.js b/src/endpoints/backends/text-completions.js
index 34c215bce..80955696e 100644
--- a/src/endpoints/backends/text-completions.js
+++ b/src/endpoints/backends/text-completions.js
@@ -152,7 +152,7 @@ router.post('/status', jsonParser, async function (request, response) {
if (!modelsReply.ok) {
console.log('Models endpoint is offline.');
- return response.status(400);
+ return response.sendStatus(400);
}
/** @type {any} */
@@ -173,7 +173,7 @@ router.post('/status', jsonParser, async function (request, response) {
if (!Array.isArray(data.data)) {
console.log('Models response is not an array.');
- return response.status(400);
+ return response.sendStatus(400);
}
const modelIds = data.data.map(x => x.id);
@@ -224,7 +224,7 @@ router.post('/status', jsonParser, async function (request, response) {
return response.send({ result, data: data.data });
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -244,7 +244,7 @@ router.post('/props', jsonParser, async function (request, response) {
const propsReply = await fetch(propsUrl, args);
if (!propsReply.ok) {
- return response.status(400);
+ return response.sendStatus(400);
}
/** @type {any} */
@@ -258,7 +258,7 @@ router.post('/props', jsonParser, async function (request, response) {
return response.send(props);
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -450,7 +450,7 @@ ollama.post('/download', jsonParser, async function (request, response) {
return response.send({ ok: true });
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -493,7 +493,7 @@ ollama.post('/caption-image', jsonParser, async function (request, response) {
return response.send({ caption });
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -540,7 +540,7 @@ llamacpp.post('/caption-image', jsonParser, async function (request, response) {
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -569,7 +569,7 @@ llamacpp.post('/props', jsonParser, async function (request, response) {
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -619,7 +619,7 @@ llamacpp.post('/slots', jsonParser, async function (request, response) {
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
@@ -665,7 +665,7 @@ tabby.post('/download', jsonParser, async function (request, response) {
return response.send({ ok: true });
} catch (error) {
console.error(error);
- return response.status(500);
+ return response.sendStatus(500);
}
});
diff --git a/tests/package-lock.json b/tests/package-lock.json
index 70616dcef..c64c228b7 100644
--- a/tests/package-lock.json
+++ b/tests/package-lock.json
@@ -2214,9 +2214,9 @@
}
},
"node_modules/cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
+ "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",