Merge branch 'staging' into welcome-screen

This commit is contained in:
Cohee
2025-05-13 19:52:40 +03:00
4 changed files with 15 additions and 6 deletions

View File

@ -28,6 +28,7 @@ import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '
import { MacrosParser } from '../../macros.js'; import { MacrosParser } from '../../macros.js';
import { countWebLlmTokens, generateWebLlmChatPrompt, getWebLlmContextSize, isWebLlmSupported } from '../shared.js'; import { countWebLlmTokens, generateWebLlmChatPrompt, getWebLlmContextSize, isWebLlmSupported } from '../shared.js';
import { commonEnumProviders } from '../../slash-commands/SlashCommandCommonEnumsProvider.js'; import { commonEnumProviders } from '../../slash-commands/SlashCommandCommonEnumsProvider.js';
import { removeReasoningFromString } from '../../reasoning.js';
export { MODULE_NAME }; export { MODULE_NAME };
const MODULE_NAME = '1_memory'; const MODULE_NAME = '1_memory';
@ -504,7 +505,7 @@ async function summarizeCallback(args, text) {
case summary_sources.extras: case summary_sources.extras:
return await callExtrasSummarizeAPI(text); return await callExtrasSummarizeAPI(text);
case summary_sources.main: case summary_sources.main:
return await generateRaw(text, '', false, false, prompt, extension_settings.memory.overrideResponseLength); return removeReasoningFromString(await generateRaw(text, '', false, false, prompt, extension_settings.memory.overrideResponseLength));
case summary_sources.webllm: { case summary_sources.webllm: {
const messages = [{ role: 'system', content: prompt }, { role: 'user', content: text }].filter(m => m.content); const messages = [{ role: 'system', content: prompt }, { role: 'user', content: text }].filter(m => m.content);
const params = extension_settings.memory.overrideResponseLength > 0 ? { max_tokens: extension_settings.memory.overrideResponseLength } : {}; const params = extension_settings.memory.overrideResponseLength > 0 ? { max_tokens: extension_settings.memory.overrideResponseLength } : {};
@ -699,7 +700,8 @@ async function summarizeChatMain(context, force, skipWIAN) {
return null; return null;
} }
summary = await generateRaw(rawPrompt, '', false, false, prompt, extension_settings.memory.overrideResponseLength); const rawSummary = await generateRaw(rawPrompt, '', false, false, prompt, extension_settings.memory.overrideResponseLength);
summary = removeReasoningFromString(rawSummary);
index = lastUsedIndex; index = lastUsedIndex;
} finally { } finally {
inApiCall = false; inApiCall = false;

View File

@ -36,6 +36,7 @@ import { slashCommandReturnHelper } from '../../slash-commands/SlashCommandRetur
import { callGenericPopup, POPUP_RESULT, POPUP_TYPE } from '../../popup.js'; import { callGenericPopup, POPUP_RESULT, POPUP_TYPE } from '../../popup.js';
import { generateWebLlmChatPrompt, isWebLlmSupported } from '../shared.js'; import { generateWebLlmChatPrompt, isWebLlmSupported } from '../shared.js';
import { WebLlmVectorProvider } from './webllm.js'; import { WebLlmVectorProvider } from './webllm.js';
import { removeReasoningFromString } from '../../reasoning.js';
/** /**
* @typedef {object} HashedMessage * @typedef {object} HashedMessage
@ -260,7 +261,7 @@ async function summarizeExtra(element) {
* @returns {Promise<boolean>} Sucess * @returns {Promise<boolean>} Sucess
*/ */
async function summarizeMain(element) { async function summarizeMain(element) {
element.text = await generateRaw(element.text, '', false, false, settings.summary_prompt); element.text = removeReasoningFromString(await generateRaw(element.text, '', false, false, settings.summary_prompt));
return true; return true;
} }

View File

@ -765,7 +765,10 @@ async function populationInjectionPrompts(prompts, messages) {
const wrap = false; const wrap = false;
// Group prompts by priority // Group prompts by priority
const orderGroups = {}; const extensionPromptsOrder = '0';
const orderGroups = {
[extensionPromptsOrder]: [],
};
for (const prompt of depthPrompts) { for (const prompt of depthPrompts) {
const order = prompt.injection_order || 0; const order = prompt.injection_order || 0;
if (!orderGroups[order]) { if (!orderGroups[order]) {
@ -788,7 +791,9 @@ async function populationInjectionPrompts(prompts, messages) {
.join(separator); .join(separator);
// Get extension prompt // Get extension prompt
const extensionPrompt = await getExtensionPrompt(extension_prompt_types.IN_CHAT, i, separator, roleTypes[role], wrap); const extensionPrompt = order === extensionPromptsOrder
? await getExtensionPrompt(extension_prompt_types.IN_CHAT, i, separator, roleTypes[role], wrap)
: '';
const jointPrompt = [rolePrompts, extensionPrompt].filter(x => x).map(x => x.trim()).join(separator); const jointPrompt = [rolePrompts, extensionPrompt].filter(x => x).map(x => x.trim()).join(separator);
if (jointPrompt && jointPrompt.length) { if (jointPrompt && jointPrompt.length) {

View File

@ -824,7 +824,8 @@ pollinations.post('/generate', async (request, response) => {
height: String(request.body.height ?? 1024), height: String(request.body.height ?? 1024),
nologo: String(true), nologo: String(true),
nofeed: String(true), nofeed: String(true),
referer: 'sillytavern', private: String(true),
referrer: 'sillytavern',
}); });
promptUrl.search = params.toString(); promptUrl.search = params.toString();