Merge branch 'staging' into staging

This commit is contained in:
Yokayo 2024-05-21 20:42:08 +07:00 committed by GitHub
commit 2c69d77fae
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 54 additions and 55 deletions

15
package-lock.json generated
View File

@ -25,7 +25,6 @@
"express": "^4.19.2",
"form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1",
"gpt3-tokenizer": "^1.1.5",
"he": "^1.2.0",
"helmet": "^7.1.0",
"ip-matching": "^2.1.2",
@ -1213,10 +1212,6 @@
"version": "1.1.1",
"license": "MIT"
},
"node_modules/array-keyed-map": {
"version": "2.1.3",
"license": "ISC"
},
"node_modules/async": {
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz",
@ -2738,16 +2733,6 @@
"version": "1.1.4",
"license": "MIT"
},
"node_modules/gpt3-tokenizer": {
"version": "1.1.5",
"license": "MIT",
"dependencies": {
"array-keyed-map": "^2.1.3"
},
"engines": {
"node": ">=12"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",

View File

@ -15,7 +15,6 @@
"express": "^4.19.2",
"form-data": "^4.0.0",
"google-translate-api-browser": "^3.0.1",
"gpt3-tokenizer": "^1.1.5",
"he": "^1.2.0",
"helmet": "^7.1.0",
"ip-matching": "^2.1.2",

View File

@ -1684,7 +1684,7 @@
</div>
</div>
<div class="range-block" data-source="openai,openrouter,makersuite,claude,custom">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand marginBot10">
<label for="openai_image_inlining" class="checkbox_label flexWrap widthFreeExpand">
<input id="openai_image_inlining" type="checkbox" />
<span data-i18n="Send inline images">Send inline images</span>
<div id="image_inlining_hint" class="flexBasis100p toggle-description justifyLeft">
@ -1693,7 +1693,7 @@
<code><i class="fa-solid fa-wand-magic-sparkles"></i></code> <span data-i18n="image_inlining_hint_3">menu to attach an image file to the chat.</span>
</div>
</label>
<div class="flex-container flexFlowColumn wide100p textAlignCenter">
<div class="flex-container flexFlowColumn wide100p textAlignCenter marginTop10" data-source="openai,custom">
<label for="openai_inline_image_quality" data-i18n="Inline Image Quality">
Inline Image Quality
</label>
@ -1729,8 +1729,8 @@
</span>
</label>
<div class="toggle-description justifyLeft marginBot5">
<span data-i18n="Merges all system messages up until the first message with a non system role, and sends them through google's system_instruction field instead of with the rest of the prompt contents.">
Merges all system messages up until the first message with a non system role, and sends them through google's system_instruction field instead of with the rest of the prompt contents.
<span data-i18n="Merges all system messages up until the first message with a non-system role, and sends them in a system_instruction field.">
Merges all system messages up until the first message with a non-system role, and sends them in a <code>system_instruction</code> field.
</span>
</div>
</div>

View File

@ -469,7 +469,6 @@ let settingsReady = false;
let currentVersion = '0.0.0';
let displayVersion = 'SillyTavern';
export const default_ch_mes = 'Hello';
let generatedPromptCache = '';
let generation_started = new Date();
/** @type {import('scripts/char-data.js').v1CharData[]} */
@ -5785,8 +5784,10 @@ async function getChatResult() {
name2 = characters[this_chid].name;
if (chat.length === 0) {
const message = getFirstMessage();
chat.push(message);
await saveChatConditional();
if (message.mes) {
chat.push(message);
await saveChatConditional();
}
}
await loadItemizedPrompts(getCurrentChatId());
await printMessages();
@ -5802,7 +5803,7 @@ async function getChatResult() {
}
function getFirstMessage() {
const firstMes = characters[this_chid].first_mes || default_ch_mes;
const firstMes = characters[this_chid].first_mes || '';
const alternateGreetings = characters[this_chid]?.data?.alternate_greetings;
const message = {
@ -5816,10 +5817,17 @@ function getFirstMessage() {
if (Array.isArray(alternateGreetings) && alternateGreetings.length > 0) {
const swipes = [message.mes, ...(alternateGreetings.map(greeting => getRegexedString(greeting, regex_placement.AI_OUTPUT)))];
if (!message.mes) {
swipes.shift();
message.mes = swipes[0];
}
message['swipe_id'] = 0;
message['swipes'] = swipes;
message['swipe_info'] = [];
}
return message;
}
@ -6381,9 +6389,9 @@ async function messageEditDone(div) {
appendMediaToMessage(mes, div.closest('.mes'));
addCopyToCodeBlocks(div.closest('.mes'));
await eventSource.emit(event_types.MESSAGE_UPDATED, this_edit_mes_id);
this_edit_mes_id = undefined;
await saveChatConditional();
await eventSource.emit(event_types.MESSAGE_UPDATED, this_edit_mes_id);
}
/**
@ -7400,8 +7408,8 @@ function openAlternateGreetings() {
template.find('.add_alternate_greeting').on('click', function () {
const array = getArray();
const index = array.length;
array.push(default_ch_mes);
addAlternateGreeting(template, default_ch_mes, index, getArray);
array.push('');
addAlternateGreeting(template, '', index, getArray);
updateAlternateGreetingsHintVisibility(template);
});
@ -7572,15 +7580,20 @@ async function createOrEditCharacter(e) {
eventSource.emit(event_types.CHARACTER_EDITED, { detail: { id: this_chid, character: characters[this_chid] } });
// Recreate the chat if it hasn't been used at least once (i.e. with continue).
if (chat.length === 1 && !selected_group && !chat_metadata['tainted']) {
const firstMessage = getFirstMessage();
chat[0] = firstMessage;
const message = getFirstMessage();
const shouldRegenerateMessage =
message.mes &&
!selected_group &&
!chat_metadata['tainted'] &&
(chat.length === 0 || (chat.length === 1 && !chat[0].is_user && !chat[0].is_system));
const chat_id = (chat.length - 1);
await eventSource.emit(event_types.MESSAGE_RECEIVED, chat_id);
if (shouldRegenerateMessage) {
chat.splice(0, chat.length, message);
const messageId = (chat.length - 1);
await eventSource.emit(event_types.MESSAGE_RECEIVED, messageId);
await clearChat();
await printMessages();
await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, chat_id);
await eventSource.emit(event_types.CHARACTER_MESSAGE_RENDERED, messageId);
await saveChatConditional();
}
},

View File

@ -31,13 +31,6 @@ import { ARGUMENT_TYPE, SlashCommandArgument, SlashCommandNamedArgument } from '
import { resolveVariable } from '../../variables.js';
export { MODULE_NAME };
// Wraps a string into monospace font-face span
const m = x => `<span class="monospace">${x}</span>`;
// Joins an array of strings with ' / '
const j = a => a.join(' / ');
// Wraps a string into paragraph block
const p = a => `<p>${a}</p>`;
const MODULE_NAME = 'sd';
const UPDATE_INTERVAL = 1000;
// This is a 1x1 transparent PNG
@ -151,11 +144,6 @@ const promptTemplates = {
[generationMode.USER_MULTIMODAL]: 'Provide an exhaustive comma-separated list of tags describing the appearance of the character on this image in great detail. Start with "full body portrait".',
};
const helpString = [
`${m('[quiet=false/true] (argument)')} requests to generate an image and posts it to chat (unless quiet=true argument is specified). Supported arguments: ${m(j(Object.values(triggerWords).flat()))}.`,
'Anything else would trigger a "free mode" to make generate whatever you prompted. Example: \'/imagine apple tree\' would generate a picture of an apple tree. Returns a link to the generated image.',
].join(' ');
const defaultPrefix = 'best quality, absurdres, aesthetic,';
const defaultNegative = 'lowres, bad anatomy, bad hands, text, error, cropped, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry';
@ -1897,7 +1885,7 @@ function processReply(str) {
str = str.replaceAll('“', '');
str = str.replaceAll('.', ',');
str = str.replaceAll('\n', ', ');
str = str.replace(/[^a-zA-Z0-9,:()\-']+/g, ' '); // Replace everything except alphanumeric characters and commas with spaces
str = str.replace(/[^a-zA-Z0-9,:(){}\-']+/g, ' '); // Replace everything except alphanumeric characters and commas with spaces
str = str.replace(/\s+/g, ' '); // Collapse multiple whitespaces into one
str = str.trim();
@ -2849,18 +2837,20 @@ async function onComfyDeleteWorkflowClick() {
*/
async function sendMessage(prompt, image, generationType, additionalNegativePrefix) {
const context = getContext();
const messageText = `[${context.name2} sends a picture that contains: ${prompt}]`;
const name = context.groupId ? systemUserName : context.name2;
const messageText = `[${name} sends a picture that contains: ${prompt}]`;
const message = {
name: context.groupId ? systemUserName : context.name2,
name: name,
is_user: false,
is_system: true,
send_date: getMessageTimeStamp(),
mes: context.groupId ? p(messageText) : messageText,
mes: messageText,
extra: {
image: image,
title: prompt,
generationType: generationType,
negative: additionalNegativePrefix,
inline_image: false,
},
};
context.chat.push(message);

View File

@ -646,7 +646,7 @@ jQuery(() => {
eventSource.makeFirst(event_types.USER_MESSAGE_RENDERED, handleOutgoingMessage);
eventSource.on(event_types.MESSAGE_SWIPED, handleIncomingMessage);
eventSource.on(event_types.IMPERSONATE_READY, handleImpersonateReady);
eventSource.on(event_types.MESSAGE_EDITED, handleMessageEdit);
eventSource.on(event_types.MESSAGE_UPDATED, handleMessageEdit);
document.body.classList.add('translate');
});

View File

@ -40,7 +40,6 @@ import {
online_status,
talkativeness_default,
selectRightMenuWithAnimation,
default_ch_mes,
deleteLastMessage,
showSwipeButtons,
hideSwipeButtons,
@ -204,6 +203,12 @@ export async function getGroupChat(groupId, reload = false) {
}
const mes = await getFirstCharacterMessage(character);
// No first message
if (!(mes?.mes)) {
continue;
}
chat.push(mes);
await eventSource.emit(event_types.MESSAGE_RECEIVED, (chat.length - 1));
addOneMessage(mes);
@ -452,7 +457,7 @@ async function getFirstCharacterMessage(character) {
mes['extra'] = { 'gen_id': Date.now() * Math.random() * 1000000 };
mes['mes'] = messageText
? substituteParams(messageText.trim(), name1, character.name)
: default_ch_mes;
: '';
mes['force_avatar'] =
character.avatar != 'none'
? getThumbnailUrl('avatar', character.avatar)

View File

@ -7,4 +7,5 @@
<li><span data-i18n="char_import_4">Pygmalion.chat Character (Direct Link or UUID)</span><br><span data-i18n="char_import_example">Example:</span> <tt>a7ca95a1-0c88-4e23-91b3-149db1e78ab9</tt></li>
<li><span data-i18n="char_import_5">AICharacterCard.com Character (Direct Link or ID)</span><br><span data-i18n="char_import_example">Example:</span> <tt>AICC/aicharcards/the-game-master</tt></li>
<li><span data-i18n="char_import_6">Direct PNG Link (refer to</span> <code>config.yaml</code><span data-i18n="char_import_7"> for allowed hosts)</span><br><span data-i18n="char_import_example">Example:</span> <tt>https://files.catbox.moe/notarealfile.png</tt></li>
<ul>
<li><span data-i18n="char_import_7">RisuRealm Character (Direct Link)<br><span data-i18n="char_import_example">Example:</span> <tt>https://realm.risuai.net/character/3ca54c71-6efe-46a2-b9d0-4f62df23d712</tt></li>
<ul>

View File

@ -263,8 +263,14 @@ function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '
const PNG_PIXEL = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=';
const visionSupportedModels = [
'gemini-1.0-pro-vision-latest',
'gemini-1.5-flash-latest',
'gemini-1.5-pro-latest',
'gemini-1.0-pro-vision-latest',
'gemini-pro-vision',
];
const dummyRequiredModels = [
'gemini-1.0-pro-vision-latest',
'gemini-pro-vision',
];
@ -343,7 +349,7 @@ function convertGooglePrompt(messages, model, useSysPrompt = false, charName = '
});
// pro 1.5 doesn't require a dummy image to be attached, other vision models do
if (isMultimodal && model !== 'gemini-1.5-pro-latest' && !hasImage) {
if (isMultimodal && dummyRequiredModels.includes(model) && !hasImage) {
contents[0].parts.push({
inlineData: {
mimeType: 'image/png',