Merge pull request #1594 from valadaptive/generate-cleanups-3

Clean up Generate(), part 3
This commit is contained in:
Cohee
2024-01-01 18:33:27 +02:00
committed by GitHub
5 changed files with 981 additions and 1009 deletions

View File

@@ -320,7 +320,6 @@ export const event_types = {
SETTINGS_LOADED_AFTER: 'settings_loaded_after',
CHATCOMPLETION_SOURCE_CHANGED: 'chatcompletion_source_changed',
CHATCOMPLETION_MODEL_CHANGED: 'chatcompletion_model_changed',
OAI_BEFORE_CHATCOMPLETION: 'oai_before_chatcompletion',
OAI_PRESET_CHANGED_BEFORE: 'oai_preset_changed_before',
OAI_PRESET_CHANGED_AFTER: 'oai_preset_changed_after',
WORLDINFO_SETTINGS_UPDATED: 'worldinfo_settings_updated',
@@ -1473,6 +1472,29 @@ export async function reloadCurrentChat() {
showSwipeButtons();
}
/**
* Send the message currently typed into the chat box.
*/
export function sendTextareaMessage() {
if (is_send_press) return;
let generateType;
// "Continue on send" is activated when the user hits "send" (or presses enter) on an empty chat box, and the last
// message was sent from a character (not the user or the system).
const textareaText = String($('#send_textarea').val());
if (power_user.continue_on_send &&
!textareaText &&
!selected_group &&
chat.length &&
!chat[chat.length - 1]['is_user'] &&
!chat[chat.length - 1]['is_system']
) {
generateType = 'continue';
}
Generate(generateType);
}
function messageFormatting(mes, ch_name, isSystem, isUser) {
if (!mes) {
return '';
@@ -2002,7 +2024,7 @@ function formatGenerationTimer(gen_started, gen_finished, tokenCount) {
tokenCount > 0 ? `Token rate: ${Number(tokenCount / seconds).toFixed(1)} t/s` : '',
].join('\n');
if (isNaN(seconds)) {
if (isNaN(seconds) || seconds < 0) {
return { timerValue: '', timerTitle };
}
@@ -2354,11 +2376,7 @@ export async function generateQuietPrompt(quiet_prompt, quietToLoud, skipWIAN, q
return generateFinished;
}
async function processCommands(message, type, dryRun) {
if (dryRun || type == 'regenerate' || type == 'swipe' || type == 'quiet') {
return null;
}
async function processCommands(message) {
const previousText = String($('#send_textarea').val());
const result = await executeSlashCommands(message);
@@ -2946,13 +2964,15 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
let message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
const interruptedByCommand = await processCommands($('#send_textarea').val(), type, dryRun);
if (!(dryRun || type == 'regenerate' || type == 'swipe' || type == 'quiet')) {
const interruptedByCommand = await processCommands($('#send_textarea').val());
if (interruptedByCommand) {
//$("#send_textarea").val('').trigger('input');
unblockGeneration();
return Promise.resolve();
}
}
if (main_api == 'kobold' && kai_settings.streaming_kobold && !kai_flags.can_use_streaming) {
toastr.error('Streaming is enabled, but the version of Kobold used does not support token streaming.', undefined, { timeOut: 10000, preventDuplicates: true });
@@ -2981,10 +3001,12 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
chat_metadata['tainted'] = true;
}
if (selected_group && !is_group_generating && !dryRun) {
if (selected_group && !is_group_generating) {
if (!dryRun) {
// Returns the promise that generateGroupWrapper returns; resolves when generation is done
return generateGroupWrapper(false, type, { quiet_prompt, force_chid, signal: abortController.signal, quietImage, maxLoops });
} else if (selected_group && !is_group_generating && dryRun) {
}
const characterIndexMap = new Map(characters.map((char, index) => [char.avatar, index]));
const group = groups.find((x) => x.id === selected_group);
@@ -3016,8 +3038,18 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
quiet_prompt = main_api == 'novel' && !quietToLoud ? adjustNovelInstructionPrompt(quiet_prompt) : quiet_prompt;
}
if (true === dryRun ||
(online_status != 'no_connection' && this_chid != undefined && this_chid !== 'invalid-safety-id')) {
const isChatValid = online_status != 'no_connection' && this_chid != undefined && this_chid !== 'invalid-safety-id';
// We can't do anything because we're not in a chat right now. (Unless it's a dry run, in which case we need to
// assemble the prompt so we can count its tokens regardless of whether a chat is active.)
if (!dryRun && !isChatValid) {
if (this_chid === undefined || this_chid === 'invalid-safety-id') {
toastr.warning('Сharacter is not selected');
}
is_send_press = false;
return Promise.resolve();
}
let textareaText;
if (type !== 'regenerate' && type !== 'swipe' && type !== 'quiet' && !isImpersonate && !dryRun) {
is_send_press = true;
@@ -3038,10 +3070,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
}
}
if (!type && !textareaText && power_user.continue_on_send && !selected_group && chat.length && !chat[chat.length - 1]['is_user'] && !chat[chat.length - 1]['is_system']) {
type = 'continue';
}
const isContinue = type == 'continue';
// Rewrite the generation timer to account for the time passed for all the continuations.
@@ -3655,8 +3683,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
scenario,
char: name2,
user: name1,
worldInfoBefore,
worldInfoAfter,
beforeScenarioAnchor,
afterScenarioAnchor,
mesExmString,
@@ -3745,9 +3771,12 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
}
}
async function finishGenerating() {
if (dryRun) return { error: 'dryRun' };
if (dryRun) {
generatedPromptCache = '';
return Promise.resolve();
}
async function finishGenerating() {
if (power_user.console_log_prompts) {
console.log(generate_data.prompt);
}
@@ -3832,12 +3861,15 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
if (!data) return;
let messageChunk = '';
if (data.error == 'dryRun') {
if (data.error) {
generatedPromptCache = '';
return;
if (data?.response) {
toastr.error(data.response, 'API Error');
}
throw data?.response;
}
if (!data.error) {
//const getData = await response.json();
let getMessage = extractMessageFromData(data);
let title = extractTitleFromData(data);
@@ -3924,14 +3956,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
return;
}
}
} else {
generatedPromptCache = '';
if (data?.response) {
toastr.error(data.response, 'API Error');
}
throw data?.response;
}
console.debug('/api/chats/save called by /Generate');
await saveChatConditional();
@@ -3953,12 +3977,6 @@ async function Generate(type, { automatic_trigger, force_name2, quiet_prompt, qu
streamingProcessor = null;
throw exception;
}
} else { //generate's primary loop ends, after this is error handling for no-connection or safety-id
if (this_chid === undefined || this_chid === 'invalid-safety-id') {
toastr.warning('Сharacter is not selected');
}
is_send_press = false;
}
}
function flushWIDepthInjections() {
@@ -5434,20 +5452,7 @@ function changeMainAPI() {
getHordeModels();
}
switch (oai_settings.chat_completion_source) {
case chat_completion_sources.SCALE:
case chat_completion_sources.OPENROUTER:
case chat_completion_sources.WINDOWAI:
case chat_completion_sources.CLAUDE:
case chat_completion_sources.OPENAI:
case chat_completion_sources.AI21:
case chat_completion_sources.MAKERSUITE:
case chat_completion_sources.MISTRALAI:
case chat_completion_sources.CUSTOM:
default:
setupChatCompletionPromptManager(oai_settings);
break;
}
}
////////////////////////////////////////////////////
@@ -7971,12 +7976,7 @@ jQuery(async function () {
});
$('#send_but').on('click', function () {
if (is_send_press == false) {
// This prevents from running /trigger command with a send button
// But send on Enter doesn't set is_send_press (it is done by the Generate itself)
// is_send_press = true;
Generate();
}
sendTextareaMessage();
});
//menu buttons setup

View File

@@ -1293,34 +1293,6 @@ class PromptManager {
this.log('Updated token usage with ' + this.tokenUsage);
}
/**
* Populates legacy token counts
*
* @deprecated This might serve no purpose and should be evaluated for removal
*
* @param {MessageCollection} messages
*/
populateLegacyTokenCounts(messages) {
// Update general token counts
const chatHistory = messages.getItemByIdentifier('chatHistory');
const startChat = chatHistory?.getCollection()[0]?.getTokens() || 0;
const continueNudge = chatHistory?.getCollection().find(message => message.identifier === 'continueNudge')?.getTokens() || 0;
this.tokenHandler.counts = {
...this.tokenHandler.counts,
...{
'start_chat': startChat,
'prompt': 0,
'bias': this.tokenHandler.counts.bias ?? 0,
'nudge': continueNudge,
'jailbreak': this.tokenHandler.counts.jailbreak ?? 0,
'impersonate': 0,
'examples': this.tokenHandler.counts.dialogueExamples ?? 0,
'conversation': this.tokenHandler.counts.chatHistory ?? 0,
},
};
}
/**
* Empties, then re-assembles the container containing the prompt list.
*/

View File

@@ -1,5 +1,4 @@
import {
Generate,
characters,
online_status,
main_api,
@@ -18,6 +17,7 @@ import {
menu_type,
substituteParams,
callPopup,
sendTextareaMessage,
} from '../script.js';
import {
@@ -954,9 +954,9 @@ export function initRossMods() {
//Enter to send when send_textarea in focus
if ($(':focus').attr('id') === 'send_textarea') {
const sendOnEnter = shouldSendOnEnter();
if (!event.shiftKey && !event.ctrlKey && !event.altKey && event.key == 'Enter' && is_send_press == false && sendOnEnter) {
if (!event.shiftKey && !event.ctrlKey && !event.altKey && event.key == 'Enter' && sendOnEnter) {
event.preventDefault();
Generate();
sendTextareaMessage();
}
}
if ($(':focus').attr('id') === 'dialogue_popup_input' && !isMobile()) {

View File

@@ -482,7 +482,10 @@ function setOpenAIMessageExamples(mesExamplesArray) {
*/
function setupChatCompletionPromptManager(openAiSettings) {
// Do not set up prompt manager more than once
if (promptManager) return promptManager;
if (promptManager) {
promptManager.render(false);
return promptManager;
}
promptManager = new PromptManager();
@@ -1031,9 +1034,6 @@ function preparePromptsForChatCompletion({ Scenario, charPersonality, name2, wor
prompts.set(jbReplacement, prompts.index('jailbreak'));
}
// Allow subscribers to manipulate the prompts object
eventSource.emit(event_types.OAI_BEFORE_CHATCOMPLETION, prompts);
return prompts;
}

View File

@@ -443,7 +443,7 @@ async function sendMistralAIRequest(request, response) {
const messages = Array.isArray(request.body.messages) ? request.body.messages : [];
const lastMsg = messages[messages.length - 1];
if (messages.length > 0 && lastMsg && (lastMsg.role === 'system' || lastMsg.role === 'assistant')) {
if (lastMsg.role === 'assistant') {
if (lastMsg.role === 'assistant' && lastMsg.name) {
lastMsg.content = lastMsg.name + ': ' + lastMsg.content;
} else if (lastMsg.role === 'system') {
lastMsg.content = '[INST] ' + lastMsg.content + ' [/INST]';