diff --git a/public/index.html b/public/index.html index b4bc6fbec..35e234d89 100644 --- a/public/index.html +++ b/public/index.html @@ -1704,6 +1704,10 @@ Auto-scroll Chat + + + Log prompts to console + Render Formulas diff --git a/public/script.js b/public/script.js index 90e5d221e..5a29de6af 100644 --- a/public/script.js +++ b/public/script.js @@ -2177,21 +2177,16 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, generate_data = getKoboldGenerationData(finalPromt, this_settings, this_amount_gen, maxContext, isImpersonate); } } - - if (main_api == 'textgenerationwebui') { + else if (main_api == 'textgenerationwebui') { generate_data = getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate); } - - if (main_api == 'novel') { + else if (main_api == 'novel') { const this_settings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]]; generate_data = getNovelGenerationData(finalPromt, this_settings); } - - let generate_url = getGenerateUrl(); - console.log('rungenerate calling API'); - - if (main_api == 'openai') { + else if (main_api == 'openai') { let [prompt, counts] = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type); + generate_data = { prompt : prompt }; // counts will return false if the user has not enabled the token breakdown feature if (counts) { @@ -2232,12 +2227,23 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject, } setInContextMessages(openai_messages_count, type); + } else if (main_api == 'poe') { + generate_data = { prompt : finalPromt }; + } + if (power_user.console_log_prompts) { + console.log(generate_data.prompt); + } + + let generate_url = getGenerateUrl(); + console.log('rungenerate calling API'); + + if (main_api == 'openai') { if (isStreamingEnabled() && type !== 'quiet') { - streamingProcessor.generator = await sendOpenAIRequest(type, prompt, streamingProcessor.abortController.signal); + streamingProcessor.generator = await sendOpenAIRequest(type, generate_data.prompt, streamingProcessor.abortController.signal); } else { - sendOpenAIRequest(type, prompt).then(onSuccess).catch(onError); + sendOpenAIRequest(type, generate_data.prompt).then(onSuccess).catch(onError); } } else if (main_api == 'kobold' && horde_settings.use_horde) { diff --git a/public/scripts/power-user.js b/public/scripts/power-user.js index ed0c83664..d27453f12 100644 --- a/public/scripts/power-user.js +++ b/public/scripts/power-user.js @@ -110,6 +110,7 @@ let power_user = { auto_scroll_chat_to_bottom: true, auto_fix_generated_markdown: true, send_on_enter: send_on_enter_options.AUTO, + console_log_prompts: false, render_formulas: false, allow_name1_display: false, allow_name2_display: false, @@ -477,6 +478,7 @@ function loadPowerUserSettings(settings, data) { power_user.font_scale = Number(localStorage.getItem(storage_keys.font_scale) ?? 1); power_user.blur_strength = Number(localStorage.getItem(storage_keys.blur_strength) ?? 10); + $("#console_log_prompts").prop("checked", power_user.console_log_prompts); $('#auto_fix_generated_markdown').prop("checked", power_user.auto_fix_generated_markdown); $('#auto_scroll_chat_to_bottom').prop("checked", power_user.auto_scroll_chat_to_bottom); $(`#tokenizer option[value="${power_user.tokenizer}"]`).attr('selected', true); @@ -1021,6 +1023,11 @@ $(document).ready(() => { saveSettingsDebounced(); }); + $("#console_log_prompts").on('input', function () { + power_user.console_log_prompts = !!$(this).prop('checked'); + saveSettingsDebounced(); + }); + $('#auto_scroll_chat_to_bottom').on("input", function () { power_user.auto_scroll_chat_to_bottom = !!$(this).prop('checked'); saveSettingsDebounced();