From 46d5f79fd96a1a30562e4b4c3be0e0fa08adc2b8 Mon Sep 17 00:00:00 2001
From: Cohee <18619528+Cohee1207@users.noreply.github.com>
Date: Tue, 18 Mar 2025 21:33:11 +0200
Subject: [PATCH 01/25] OpenRouter: Allow applying prompt post-processing Fixes
#3689
---
public/index.html | 16 +++++++++-------
public/scripts/openai.js | 2 +-
src/endpoints/backends/chat-completions.js | 17 +++++++++--------
3 files changed, 19 insertions(+), 16 deletions(-)
diff --git a/public/index.html b/public/index.html
index ec0e088a3..d82d7f833 100644
--- a/public/index.html
+++ b/public/index.html
@@ -3415,13 +3415,6 @@
- Prompt Post-Processing
-
@@ -3440,6 +3433,15 @@
+
+
Prompt Post-Processing
+
+
diff --git a/public/scripts/openai.js b/public/scripts/openai.js
index 80d1e9ed7..36e224bf9 100644
--- a/public/scripts/openai.js
+++ b/public/scripts/openai.js
@@ -2020,6 +2020,7 @@ async function sendOpenAIRequest(type, messages, signal) {
'reasoning_effort': String(oai_settings.reasoning_effort),
'enable_web_search': Boolean(oai_settings.enable_web_search),
'request_images': Boolean(oai_settings.request_images),
+ 'custom_prompt_post_processing': oai_settings.custom_prompt_post_processing,
};
if (!canMultiSwipe && ToolManager.canPerformToolCalls(type)) {
@@ -2100,7 +2101,6 @@ async function sendOpenAIRequest(type, messages, signal) {
generate_data['custom_include_body'] = oai_settings.custom_include_body;
generate_data['custom_exclude_body'] = oai_settings.custom_exclude_body;
generate_data['custom_include_headers'] = oai_settings.custom_include_headers;
- generate_data['custom_prompt_post_processing'] = oai_settings.custom_prompt_post_processing;
}
if (isCohere) {
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index 4d2eee774..e8d3842ad 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -1121,14 +1121,6 @@ router.post('/generate', function (request, response) {
mergeObjectWithYaml(bodyParams, request.body.custom_include_body);
mergeObjectWithYaml(headers, request.body.custom_include_headers);
-
- if (request.body.custom_prompt_post_processing) {
- console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
- request.body.messages = postProcessPrompt(
- request.body.messages,
- request.body.custom_prompt_post_processing,
- getPromptNames(request));
- }
} else if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.PERPLEXITY) {
apiUrl = API_PERPLEXITY;
apiKey = readSecret(request.user.directories, SECRET_KEYS.PERPLEXITY);
@@ -1160,6 +1152,15 @@ router.post('/generate', function (request, response) {
return response.status(400).send({ error: true });
}
+ const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
+ if (postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
+ console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
+ request.body.messages = postProcessPrompt(
+ request.body.messages,
+ request.body.custom_prompt_post_processing,
+ getPromptNames(request));
+ }
+
// A few of OpenAIs reasoning models support reasoning effort
if ([CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENAI].includes(request.body.chat_completion_source)) {
if (['o1', 'o3-mini', 'o3-mini-2025-01-31'].includes(request.body.model)) {
From fcaea46a54fdcda610dbbdd94446ee6b383dddde Mon Sep 17 00:00:00 2001
From: Cohee <18619528+Cohee1207@users.noreply.github.com>
Date: Tue, 18 Mar 2025 23:17:26 +0200
Subject: [PATCH 02/25] Apply post-process before setting cache at depth
---
src/endpoints/backends/chat-completions.js | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js
index e8d3842ad..a39dcbe2f 100644
--- a/src/endpoints/backends/chat-completions.js
+++ b/src/endpoints/backends/chat-completions.js
@@ -1048,6 +1048,15 @@ router.post('/generate', function (request, response) {
let bodyParams;
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
+ const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
+ if (Array.isArray(request.body.messages) && postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
+ console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
+ request.body.messages = postProcessPrompt(
+ request.body.messages,
+ request.body.custom_prompt_post_processing,
+ getPromptNames(request));
+ }
+
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(request.user.directories, SECRET_KEYS.OPENAI);
@@ -1152,15 +1161,6 @@ router.post('/generate', function (request, response) {
return response.status(400).send({ error: true });
}
- const postProcessTypes = [CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENROUTER];
- if (postProcessTypes.includes(request.body.chat_completion_source) && request.body.custom_prompt_post_processing) {
- console.info('Applying custom prompt post-processing of type', request.body.custom_prompt_post_processing);
- request.body.messages = postProcessPrompt(
- request.body.messages,
- request.body.custom_prompt_post_processing,
- getPromptNames(request));
- }
-
// A few of OpenAIs reasoning models support reasoning effort
if ([CHAT_COMPLETION_SOURCES.CUSTOM, CHAT_COMPLETION_SOURCES.OPENAI].includes(request.body.chat_completion_source)) {
if (['o1', 'o3-mini', 'o3-mini-2025-01-31'].includes(request.body.model)) {
From 5a6058d319196c5e8326678ea177f17ec2bf4ade Mon Sep 17 00:00:00 2001
From: Wolfsblvt
Date: Wed, 19 Mar 2025 03:06:50 +0100
Subject: [PATCH 03/25] Adds sprite-based filtering for expressions -
Functionality only available for LLM/webLLM - New toggle to filter
expressions on availalbe sprites - `getExpressionsList` filters cached
expressions when checked (using sprite folder name/override) -
`/expression-list` slash command has "filter" arg to filter list -
`/expression-classify` slash command has "filter" arg now, to use filtered
list for classification - `getExpressionLabel` uses filtered expressions when
LLM/webLLM
---
.../scripts/extensions/expressions/index.js | 68 +++++++++++++++----
.../extensions/expressions/settings.html | 6 +-
2 files changed, 58 insertions(+), 16 deletions(-)
diff --git a/public/scripts/extensions/expressions/index.js b/public/scripts/extensions/expressions/index.js
index aa592cfd7..a94908130 100644
--- a/public/scripts/extensions/expressions/index.js
+++ b/public/scripts/extensions/expressions/index.js
@@ -4,7 +4,7 @@ import { characters, eventSource, event_types, generateRaw, getRequestHeaders, m
import { dragElement, isMobile } from '../../RossAscends-mods.js';
import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch, renderExtensionTemplateAsync } from '../../extensions.js';
import { loadMovingUIState, performFuzzySearch, power_user } from '../../power-user.js';
-import { onlyUnique, debounce, getCharaFilename, trimToEndSentence, trimToStartSentence, waitUntilCondition, findChar } from '../../utils.js';
+import { onlyUnique, debounce, getCharaFilename, trimToEndSentence, trimToStartSentence, waitUntilCondition, findChar, isTrueBoolean } from '../../utils.js';
import { hideMutedSprites, selected_group } from '../../group-chats.js';
import { isJsonSchemaSupported } from '../../textgen-settings.js';
import { debounce_timeout } from '../../constants.js';
@@ -678,7 +678,7 @@ async function setSpriteFolderCommand(_, folder) {
return '';
}
-async function classifyCallback(/** @type {{api: string?, prompt: string?}} */ { api = null, prompt = null }, text) {
+async function classifyCallback(/** @type {{api: string?, filter: string?, prompt: string?}} */ { api = null, filter = null, prompt = null }, text) {
if (!text) {
toastr.error('No text provided');
return '';
@@ -689,13 +689,14 @@ async function classifyCallback(/** @type {{api: string?, prompt: string?}} */ {
}
const expressionApi = EXPRESSION_API[api] || extension_settings.expressions.api;
+ const filterAvailable = isTrueBoolean(filter);
if (!modules.includes('classify') && expressionApi == EXPRESSION_API.extras) {
toastr.warning('Text classification is disabled or not available');
return '';
}
- const label = await getExpressionLabel(text, expressionApi, { customPrompt: prompt });
+ const label = await getExpressionLabel(text, expressionApi, { filterAvailable: filterAvailable, customPrompt: prompt });
console.debug(`Classification result for "${text}": ${label}`);
return label;
}
@@ -988,10 +989,11 @@ function onTextGenSettingsReady(args) {
* @param {string} text - The text to classify and retrieve the expression label for.
* @param {EXPRESSION_API} [expressionsApi=extension_settings.expressions.api] - The expressions API to use for classification.
* @param {object} [options={}] - Optional arguments.
+ * @param {boolean?} [options.filterAvailable=null] - Whether to filter available expressions. If not specified, uses the extension setting.
* @param {string?} [options.customPrompt=null] - The custom prompt to use for classification.
* @returns {Promise} - The label of the expression.
*/
-export async function getExpressionLabel(text, expressionsApi = extension_settings.expressions.api, { customPrompt = null } = {}) {
+export async function getExpressionLabel(text, expressionsApi = extension_settings.expressions.api, { filterAvailable = null, customPrompt = null } = {}) {
// Return if text is undefined, saving a costly fetch request
if ((!modules.includes('classify') && expressionsApi == EXPRESSION_API.extras) || !text) {
return extension_settings.expressions.fallback_expression;
@@ -1003,6 +1005,11 @@ export async function getExpressionLabel(text, expressionsApi = extension_settin
text = sampleClassifyText(text);
+ filterAvailable ??= extension_settings.expressions.filterAvailable;
+ if (filterAvailable && ![EXPRESSION_API.llm, EXPRESSION_API.webllm].includes(expressionsApi)) {
+ console.warn('Filter available is only supported for LLM and WebLLM expressions');
+ }
+
try {
switch (expressionsApi) {
// Local BERT pipeline
@@ -1027,7 +1034,7 @@ export async function getExpressionLabel(text, expressionsApi = extension_settin
return extension_settings.expressions.fallback_expression;
}
- const expressionsList = await getExpressionsList();
+ const expressionsList = await getExpressionsList({ filterAvailable: filterAvailable });
const prompt = substituteParamsExtended(customPrompt, { labels: expressionsList }) || await getLlmPrompt(expressionsList);
eventSource.once(event_types.TEXT_COMPLETION_SETTINGS_READY, onTextGenSettingsReady);
const emotionResponse = await generateRaw(text, main_api, false, false, prompt);
@@ -1040,7 +1047,7 @@ export async function getExpressionLabel(text, expressionsApi = extension_settin
return extension_settings.expressions.fallback_expression;
}
- const expressionsList = await getExpressionsList();
+ const expressionsList = await getExpressionsList({ filterAvailable: filterAvailable });
const prompt = substituteParamsExtended(customPrompt, { labels: expressionsList }) || await getLlmPrompt(expressionsList);
const messages = [
{ role: 'user', content: text + '\n\n' + prompt },
@@ -1320,12 +1327,28 @@ function getCachedExpressions() {
return [...expressionsList, ...extension_settings.expressions.custom].filter(onlyUnique);
}
-export async function getExpressionsList() {
- // Return cached list if available
- if (Array.isArray(expressionsList)) {
- return getCachedExpressions();
+export async function getExpressionsList({ filterAvailable = false } = {}) {
+ // If there is no cached list, load and cache it
+ if (!Array.isArray(expressionsList)) {
+ expressionsList = await resolveExpressionsList();
}
+ const expressions = getCachedExpressions();
+
+ // Filtering is only available for llm and webllm APIs
+ if (!filterAvailable || ![EXPRESSION_API.llm, EXPRESSION_API.webllm].includes(extension_settings.expressions.api)) {
+ return expressions;
+ }
+
+ // Get expressions with available sprites
+ const currentLastMessage = selected_group ? getLastCharacterMessage() : null;
+ const spriteFolderName = getSpriteFolderName(currentLastMessage, currentLastMessage?.name);
+
+ return expressions.filter(label => {
+ const expression = spriteCache[spriteFolderName]?.find(x => x.label === label);
+ return (expression?.files.length ?? 0) > 0;
+ });
+
/**
* Returns the list of expressions from the API or fallback in offline mode.
* @returns {Promise}
@@ -1372,9 +1395,6 @@ export async function getExpressionsList() {
expressionsList = DEFAULT_EXPRESSIONS.slice();
return expressionsList;
}
-
- const result = await resolveExpressionsList();
- return [...result, ...extension_settings.expressions.custom].filter(onlyUnique);
}
/**
@@ -2102,6 +2122,10 @@ function migrateSettings() {
extension_settings.expressions.rerollIfSame = !!$(this).prop('checked');
saveSettingsDebounced();
});
+ $('#expressions_filter_available').prop('checked', extension_settings.expressions.filterAvailable).on('input', function () {
+ extension_settings.expressions.filterAvailable = !!$(this).prop('checked');
+ saveSettingsDebounced();
+ });
$('#expression_override_cleanup_button').on('click', onClickExpressionOverrideRemoveAllButton);
$(document).on('dragstart', '.expression', (e) => {
e.preventDefault();
@@ -2279,13 +2303,13 @@ function migrateSettings() {
SlashCommandParser.addCommandObject(SlashCommand.fromProps({
name: 'expression-list',
aliases: ['expressions'],
- /** @type {(args: {return: string}) => Promise} */
+ /** @type {(args: {return: string, filterAvailable: string}) => Promise} */
callback: async (args) => {
let returnType =
/** @type {import('../../slash-commands/SlashCommandReturnHelper.js').SlashCommandReturnType} */
(args.return);
- const list = await getExpressionsList();
+ const list = await getExpressionsList({ filterAvailable: isTrueBoolean(args.filterAvailable) });
return await slashCommandReturnHelper.doReturn(returnType ?? 'pipe', list, { objectToStringFunc: list => list.join(', ') });
},
@@ -2298,6 +2322,13 @@ function migrateSettings() {
enumList: slashCommandReturnHelper.enumList({ allowObject: true }),
forceEnum: true,
}),
+ SlashCommandNamedArgument.fromProps({
+ name: 'filter',
+ description: 'Filter the list to only include expressions that have available sprites for the current character.',
+ typeList: [ARGUMENT_TYPE.BOOLEAN],
+ enumList: commonEnumProviders.boolean('trueFalse')(),
+ defaultValue: 'true',
+ }),
],
returns: 'The comma-separated list of available expressions, including custom expressions.',
helpString: 'Returns a list of available expressions, including custom expressions.',
@@ -2313,6 +2344,13 @@ function migrateSettings() {
typeList: [ARGUMENT_TYPE.STRING],
enumList: Object.keys(EXPRESSION_API).map(api => new SlashCommandEnumValue(api, null, enumTypes.enum)),
}),
+ SlashCommandNamedArgument.fromProps({
+ name: 'filter',
+ description: 'Filter the list to only include expressions that have available sprites for the current character.',
+ typeList: [ARGUMENT_TYPE.BOOLEAN],
+ enumList: commonEnumProviders.boolean('trueFalse')(),
+ defaultValue: 'true',
+ }),
SlashCommandNamedArgument.fromProps({
name: 'prompt',
description: 'Custom prompt for classification. Only relevant if Classifier API is set to LLM.',
diff --git a/public/scripts/extensions/expressions/settings.html b/public/scripts/extensions/expressions/settings.html
index 06e407d7d..98938cf4d 100644
--- a/public/scripts/extensions/expressions/settings.html
+++ b/public/scripts/extensions/expressions/settings.html
@@ -29,7 +29,11 @@
-