Merge branch 'SillyTavern:staging' into staging

This commit is contained in:
Tony Ribeiro
2023-08-23 18:52:10 +02:00
committed by GitHub
14 changed files with 193 additions and 142 deletions

View File

@@ -91,7 +91,7 @@
#top-settings-holder,
#top-bar {
position: fixed;
padding-top: 8px;
padding-top: 3px;
width: 100vw;
width: 100svw;
}
@@ -114,14 +114,14 @@
/* ,
#world_popup */
{
max-height: calc(100vh - 40px);
max-height: calc(100svh - 40px);
max-height: calc(100vh - 36px);
max-height: calc(100svh - 36px);
width: 100% !important;
margin: 0 auto;
max-width: 100%;
left: 0 !important;
resize: none !important;
top: 40px;
top: 36px;
}
.wi-settings {
@@ -179,7 +179,7 @@
border-right: 1px solid var(--grey30);
border-bottom: 1px solid var(--grey30);
border-radius: 0 0 20px 20px;
top: 40px !important;
top: 36px !important;
left: 0 !important;
backdrop-filter: blur(calc(var(--SmartThemeBlurStrength) * 2));
}
@@ -354,8 +354,8 @@
max-height: unset;
width: 100vw;
width: 100svw;
height: calc(100vh - 40px);
height: calc(100svh - 40px);
height: calc(100vh - 36px);
height: calc(100svh - 36px);
padding-right: max(env(safe-area-inset-right), 0px);
padding-left: max(env(safe-area-inset-left), 0px);
padding-bottom: 0;
@@ -396,7 +396,7 @@
#character_popup,
#world_popup,
.drawer-content {
margin-top: 40px;
margin-top: 36px;
}
.scrollableInner {

View File

@@ -349,7 +349,7 @@
}
.textarea_compact {
font-size: calc(var(--mainFontSize) * 0.9);
font-size: calc(var(--mainFontSize) * 0.95);
line-height: 1.2;
}

View File

@@ -165,6 +165,7 @@ import {
formatInstructModeExamples,
getInstructStoppingSequences,
autoSelectInstructPreset,
formatInstructModeSystemPrompt,
} from "./scripts/instruct-mode.js";
import { applyLocale } from "./scripts/i18n.js";
import { getTokenCount, getTokenizerModel, saveTokenCache } from "./scripts/tokenizers.js";
@@ -322,6 +323,7 @@ let generatedPromtCache = "";
let generation_started = new Date();
let characters = [];
let this_chid;
let saveCharactersPage = 0;
let backgrounds = [];
const default_avatar = "img/ai4.png";
export const system_avatar = "img/five.png";
@@ -893,6 +895,7 @@ async function printCharacters(fullRefresh = false) {
pageSize: Number(localStorage.getItem(storageKey)) || 50,
sizeChangerOptions: [10, 25, 50, 100, 250, 500, 1000],
pageRange: 1,
pageNumber: saveCharactersPage || 1,
position: 'top',
showPageNumbers: false,
showSizeChanger: true,
@@ -913,10 +916,14 @@ async function printCharacters(fullRefresh = false) {
},
afterSizeSelectorChange: function (e) {
localStorage.setItem(storageKey, e.target.value);
}
},
afterPaging: function (e) {
saveCharactersPage = e;
},
});
favsToHotswap();
saveCharactersPage = 0;
if (fullRefresh) {
printTagFilters(tag_filter_types.character);
@@ -2354,6 +2361,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
if (isInstruct) {
systemPrompt = power_user.prefer_character_prompt && systemPrompt ? systemPrompt : baseChatReplace(power_user.instruct.system_prompt, name1, name2);
systemPrompt = formatInstructModeSystemPrompt(substituteParams(systemPrompt, name1, name2, power_user.instruct.system_prompt));
}
// Parse example messages
@@ -2733,6 +2741,12 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
// Fetches the combined prompt for both negative and positive prompts
const cfgGuidanceScale = getGuidanceScale();
function getCombinedPrompt(isNegative) {
// Only return if the guidance scale doesn't exist or the value is 1
// Also don't return if constructing the neutral prompt
if (isNegative && (!cfgGuidanceScale || cfgGuidanceScale?.value === 1)) {
return;
}
let finalMesSend = [...mesSend];
let cfgPrompt = {};
if (cfgGuidanceScale && cfgGuidanceScale?.value !== 1) {
@@ -2746,7 +2760,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
? cfgPrompt.value
: ` ${cfgPrompt.value}`;
} else {
// TODO: Switch from splice method to insertion depth method
// TODO: Make all extension prompts use an array/splice method
finalMesSend.splice(mesSend.length - cfgPrompt.depth, 0, `${cfgPrompt.value}\n`);
}
}
@@ -2809,12 +2823,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
let finalPromt = getCombinedPrompt(false);
// Include the entire guidance scale object
const cfgValues = {
guidanceScale: cfgGuidanceScale,
negativePrompt: negativePrompt
};
const cfgValues = cfgGuidanceScale && cfgGuidanceScale?.value !== 1 ? ({guidanceScale: cfgGuidanceScale, negativePrompt: negativePrompt }) : null;
let this_amount_gen = parseInt(amount_gen); // how many tokens the AI will be requested to generate
let this_amount_gen = Number(amount_gen); // how many tokens the AI will be requested to generate
let this_settings = koboldai_settings[koboldai_setting_names[preset_settings]];
if (isMultigenEnabled() && type !== 'quiet') {
@@ -2991,8 +3002,6 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
return;
}
hideStopButton();
is_send_press = false;
if (!data.error) {
//const getData = await response.json();
let getMessage = extractMessageFromData(data);
@@ -3113,6 +3122,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
if (generatedTextFiltered(getMessage)) {
console.debug('swiping right automatically');
is_send_press = false;
swipe_right();
return
}
@@ -3132,7 +3142,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
}
console.debug('/savechat called by /Generate');
saveChatConditional();
await saveChatConditional();
is_send_press = false;
hideStopButton();
activateSendButtons();
showSwipeButtons();
setGenerationProgress(0);
@@ -6113,7 +6125,7 @@ async function deleteMessageImage() {
delete message.extra.inline_image;
mesBlock.find('.mes_img_container').removeClass('img_extra');
mesBlock.find('.mes_img').attr('src', '');
saveChatConditional();
await saveChatConditional();
}
function enlargeMessageImage() {
@@ -7908,7 +7920,7 @@ $(document).ready(function () {
});
//confirms message deletion with the "ok" button
$("#dialogue_del_mes_ok").click(function () {
$("#dialogue_del_mes_ok").click(async function () {
$("#dialogue_del_mes").css("display", "none");
$("#send_form").css("display", css_send_form_display);
$(".del_checkbox").each(function () {
@@ -7924,7 +7936,7 @@ $(document).ready(function () {
$(".mes[mesid='" + this_del_mes + "']").remove();
chat.length = this_del_mes;
count_view_mes = this_del_mes;
saveChatConditional();
await saveChatConditional();
var $textchat = $("#chat");
$textchat.scrollTop($textchat[0].scrollHeight);
eventSource.emit(event_types.MESSAGE_DELETED, chat.length);
@@ -8199,7 +8211,7 @@ $(document).ready(function () {
this_edit_mes_id = undefined;
});
$(document).on("click", ".mes_edit_up", function () {
$(document).on("click", ".mes_edit_up", async function () {
if (is_send_press || this_edit_mes_id <= 0) {
return;
}
@@ -8224,11 +8236,11 @@ $(document).ready(function () {
this_edit_mes_id = targetId;
updateViewMessageIds();
saveChatConditional();
await saveChatConditional();
showSwipeButtons();
});
$(document).on("click", ".mes_edit_down", function () {
$(document).on("click", ".mes_edit_down", async function () {
if (is_send_press || this_edit_mes_id >= chat.length - 1) {
return;
}
@@ -8253,7 +8265,7 @@ $(document).ready(function () {
this_edit_mes_id = targetId;
updateViewMessageIds();
saveChatConditional();
await saveChatConditional();
showSwipeButtons();
});
@@ -8277,14 +8289,14 @@ $(document).ready(function () {
addOneMessage(clone, { insertAfter: this_edit_mes_id });
updateViewMessageIds();
saveChatConditional();
await saveChatConditional();
$('#chat')[0].scrollTop = oldScroll;
showSwipeButtons();
});
$(document).on("click", ".mes_edit_delete", async function (event, customData) {
const fromSlashCommand = customData?.fromSlashCommand || false;
const swipeExists = (!chat[this_edit_mes_id].swipes || chat[this_edit_mes_id].swipes.length <= 1 || chat.is_user || parseInt(this_edit_mes_id) !== chat.length - 1);
const swipeExists = (!Array.isArray(chat[this_edit_mes_id].swipes) || chat[this_edit_mes_id].swipes.length <= 1 || chat[this_edit_mes_id].is_user || parseInt(this_edit_mes_id) !== chat.length - 1);
if (power_user.confirm_message_delete && fromSlashCommand !== true) {
const confirmation = swipeExists ? await callPopup("Are you sure you want to delete this message?", 'confirm')
: await callPopup("<h3>Delete this...</h3> <select id='del_type'><option value='swipe'>Swipe</option><option value='message'>Message</option></select>", 'confirm')
@@ -8316,7 +8328,7 @@ $(document).ready(function () {
this_edit_mes_id = undefined;
updateViewMessageIds();
saveChatConditional();
await saveChatConditional();
eventSource.emit(event_types.MESSAGE_DELETED, count_view_mes);

View File

@@ -521,7 +521,7 @@ export function dragElement(elmnt) {
}
//prevent resizing from top left into the top bar
if (top < 40 && maxX >= topBarFirstX && left <= topBarFirstX
if (top < 35 && maxX >= topBarFirstX && left <= topBarFirstX
) {
console.debug('prevent topbar underlap resize')
elmnt.css('width', width - 1 + "px");
@@ -556,7 +556,7 @@ export function dragElement(elmnt) {
}
//prevent underlap with topbar div
if (top < 40
if (top < 35
&& (maxX >= topBarFirstX && left <= topBarFirstX //elmnt is hitting topbar from left side
|| left <= topBarLastX && maxX >= topBarLastX //elmnt is hitting topbar from right side
|| left >= topBarFirstX && maxX <= topBarLastX) //elmnt hitting topbar in the middle
@@ -841,7 +841,7 @@ jQuery(async function () {
//this makes the chat input text area resize vertically to match the text size (limited by CSS at 50% window height)
$('#send_textarea').on('input', function () {
this.style.height = '40px';
this.style.height = '30px';
this.style.height = (this.scrollHeight) + 'px';
});

View File

@@ -348,7 +348,7 @@ jQuery(async () => {
.filter(":checked")
.map(function() { return parseInt($(this).val()) })
.get()
.filter((e) => e !== NaN) || [];
.filter((e) => !Number.isNaN(e)) || [];
chat_metadata[metadataKeys.prompt_combine] = values;
saveMetadataDebounced();

View File

@@ -39,10 +39,12 @@ export function getGuidanceScale() {
};
}
if (extension_settings.cfg.global && extension_settings.cfg.global?.guidance_scale !== 1) {
return {
type: cfgType.global,
value: extension_settings.cfg.global.guidance_scale
};
}
}
// Gets the CFG prompt

View File

@@ -217,6 +217,10 @@ async function translateProviderDeepl(text, lang) {
async function translate(text, lang) {
try {
if (text == '') {
return '';
}
switch (extension_settings.translate.provider) {
case 'google':
return await translateProviderGoogle(text, lang);

View File

@@ -189,6 +189,20 @@ export function formatInstructModeChat(name, mes, isUser, isNarrator, forceAvata
return text;
}
/**
* Formats instruct mode system prompt.
* @param {string} systemPrompt System prompt string.
* @returns {string} Formatted instruct mode system prompt.
*/
export function formatInstructModeSystemPrompt(systemPrompt){
if (power_user.instruct.system_sequence) {
const separator = power_user.instruct.wrap ? '\n' : '';
return power_user.instruct.system_sequence + separator + systemPrompt;
}
return systemPrompt;
}
/**
* Formats example messages according to instruct mode settings.
* @param {string} mesExamples Example messages string.

View File

@@ -128,8 +128,8 @@ function loadNovelPreset(preset) {
function loadNovelSettings(settings) {
//load the rest of the Novel settings without any checks
nai_settings.model_novel = settings.model_novel;
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr("selected", true);
$('#model_novel_select').val(nai_settings.model_novel);
$(`#model_novel_select option[value=${nai_settings.model_novel}]`).attr("selected", true);
if (settings.nai_preamble !== undefined) {
nai_settings.preamble = settings.nai_preamble;
@@ -396,7 +396,7 @@ function getBadWordPermutations(text) {
}
export function getNovelGenerationData(finalPrompt, this_settings, this_amount_gen, isImpersonate, cfgValues) {
if (cfgValues.guidanceScale && cfgValues.guidanceScale?.value !== 1) {
if (cfgValues && cfgValues.guidanceScale && cfgValues.guidanceScale?.value !== 1) {
cfgValues.negativePrompt = (getCfgPrompt(cfgValues.guidanceScale, true))?.value;
}
@@ -425,22 +425,22 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"input": finalPrompt,
"model": nai_settings.model_novel,
"use_string": true,
"temperature": parseFloat(nai_settings.temperature),
"temperature": Number(nai_settings.temperature),
"max_length": this_amount_gen < maximum_output_length ? this_amount_gen : maximum_output_length,
"min_length": parseInt(nai_settings.min_length),
"tail_free_sampling": parseFloat(nai_settings.tail_free_sampling),
"repetition_penalty": parseFloat(nai_settings.repetition_penalty),
"repetition_penalty_range": parseInt(nai_settings.repetition_penalty_range),
"repetition_penalty_slope": parseFloat(nai_settings.repetition_penalty_slope),
"repetition_penalty_frequency": parseFloat(nai_settings.repetition_penalty_frequency),
"repetition_penalty_presence": parseFloat(nai_settings.repetition_penalty_presence),
"top_a": parseFloat(nai_settings.top_a),
"top_p": parseFloat(nai_settings.top_p),
"top_k": parseInt(nai_settings.top_k),
"typical_p": parseFloat(nai_settings.typical_p),
"mirostat_lr": parseFloat(nai_settings.mirostat_lr),
"mirostat_tau": parseFloat(nai_settings.mirostat_tau),
"cfg_scale": cfgValues?.guidanceScale?.value ?? parseFloat(nai_settings.cfg_scale),
"min_length": Number(nai_settings.min_length),
"tail_free_sampling": Number(nai_settings.tail_free_sampling),
"repetition_penalty": Number(nai_settings.repetition_penalty),
"repetition_penalty_range": Number(nai_settings.repetition_penalty_range),
"repetition_penalty_slope": Number(nai_settings.repetition_penalty_slope),
"repetition_penalty_frequency": Number(nai_settings.repetition_penalty_frequency),
"repetition_penalty_presence": Number(nai_settings.repetition_penalty_presence),
"top_a": Number(nai_settings.top_a),
"top_p": Number(nai_settings.top_p),
"top_k": Number(nai_settings.top_k),
"typical_p": Number(nai_settings.typical_p),
"mirostat_lr": Number(nai_settings.mirostat_lr),
"mirostat_tau": Number(nai_settings.mirostat_tau),
"cfg_scale": cfgValues?.guidanceScale?.value ?? Number(nai_settings.cfg_scale),
"cfg_uc": cfgValues?.negativePrompt ?? nai_settings.cfg_uc ?? "",
"phrase_rep_pen": nai_settings.phrase_rep_pen,
"stop_sequences": stopSequences,
@@ -448,7 +448,6 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"logit_bias_exp": logitBias,
"generate_until_sentence": true,
"use_cache": false,
"use_string": true,
"return_full_text": false,
"prefix": prefix,
"order": nai_settings.order || this_settings.order || default_order,
@@ -640,7 +639,7 @@ export async function generateNovelWithStreaming(generate_data, signal) {
}
$("#nai_preamble_textarea").on('input', function () {
nai_settings.preamble = $('#nai_preamble_textarea').val();
nai_settings.preamble = String($('#nai_preamble_textarea').val());
saveSettingsDebounced();
});
@@ -668,7 +667,7 @@ jQuery(function () {
});
$("#model_novel_select").change(function () {
nai_settings.model_novel = $("#model_novel_select").find(":selected").val();
nai_settings.model_novel = String($("#model_novel_select").find(":selected").val());
saveSettingsDebounced();
// Update the selected preset to something appropriate
@@ -679,12 +678,12 @@ jQuery(function () {
});
$("#nai_prefix").on('change', function () {
nai_settings.prefix = $("#nai_prefix").find(":selected").val();
nai_settings.prefix = String($("#nai_prefix").find(":selected").val());
saveSettingsDebounced();
});
$("#phrase_rep_pen_novel").on('change', function () {
nai_settings.phrase_rep_pen = $("#phrase_rep_pen_novel").find(":selected").val();
nai_settings.phrase_rep_pen = String($("#phrase_rep_pen_novel").find(":selected").val());
saveSettingsDebounced();
});

View File

@@ -165,6 +165,7 @@ const default_settings = {
new_group_chat_prompt: default_new_group_chat_prompt,
new_example_chat_prompt: default_new_example_chat_prompt,
continue_nudge_prompt: default_continue_nudge_prompt,
nsfw_avoidance_prompt: default_nsfw_avoidance_prompt,
bias_preset_selected: default_bias,
bias_presets: default_bias_presets,
wi_format: default_wi_format,
@@ -228,6 +229,7 @@ const oai_settings = {
use_ai21_tokenizer: false,
exclude_assistant: false,
use_alt_scale: false,
nsfw_avoidance_prompt: default_nsfw_avoidance_prompt,
};
let openai_setting_names;
@@ -772,6 +774,7 @@ function preparePromptsForChatCompletion(Scenario, charPersonality, name2, world
* @param {string} content.bias - The bias to be added in the conversation.
* @param {string} content.type - The type of the chat, can be 'impersonate'.
* @param {string} content.quietPrompt - The quiet prompt to be used in the conversation.
* @param {string} content.cyclePrompt - The last prompt used for chat message continuation.
* @param {Array} content.extensionPrompts - An array of additional prompts.
* @param dryRun - Whether this is a live call or not.
* @returns {(*[]|boolean)[]} An array where the first element is the prepared chat and the second element is a boolean flag.
@@ -882,7 +885,7 @@ async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) {
let finished = false;
const currentModel = await window.ai.getCurrentModel();
let temperature = parseFloat(oai_settings.temp_openai);
let temperature = Number(oai_settings.temp_openai);
if ((currentModel.includes('claude') || currentModel.includes('palm-2')) && temperature > claude_max_temp) {
console.warn(`Claude and PaLM models only supports temperature up to ${claude_max_temp}. Clamping ${temperature} to ${claude_max_temp}.`);
@@ -1014,7 +1017,7 @@ function saveModelList(data) {
$('#model_openrouter_select').empty();
$('#model_openrouter_select').append($('<option>', { value: openrouter_website_model, text: 'Use OpenRouter website setting' }));
model_list.forEach((model) => {
let tokens_dollar = parseFloat(1 / (1000 * model.pricing.prompt));
let tokens_dollar = Number(1 / (1000 * model.pricing.prompt));
let tokens_rounded = (Math.round(tokens_dollar * 1000) / 1000).toFixed(0);
let model_description = `${model.id} | ${tokens_rounded}k t/$ | ${model.context_length} ctx`;
$('#model_openrouter_select').append(
@@ -1067,9 +1070,9 @@ async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) {
const generate_data = {
sysprompt: joinedSysMsgs,
prompt: openai_msgs_tosend,
temp: parseFloat(oai_settings.temp_openai),
top_p: parseFloat(oai_settings.top_p_openai),
max_tokens: parseFloat(oai_settings.openai_max_tokens),
temp: Number(oai_settings.temp_openai),
top_p: Number(oai_settings.top_p_openai),
max_tokens: Number(oai_settings.openai_max_tokens),
logit_bias: logit_bias,
}
@@ -1137,10 +1140,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
const generate_data = {
"messages": openai_msgs_tosend,
"model": model,
"temperature": parseFloat(oai_settings.temp_openai),
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"top_p": parseFloat(oai_settings.top_p_openai),
"temperature": Number(oai_settings.temp_openai),
"frequency_penalty": Number(oai_settings.freq_pen_openai),
"presence_penalty": Number(oai_settings.pres_pen_openai),
"top_p": Number(oai_settings.top_p_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": stream,
"logit_bias": logit_bias,
@@ -1155,7 +1158,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
if (isClaude) {
generate_data['use_claude'] = true;
generate_data['top_k'] = parseFloat(oai_settings.top_k_openai);
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['exclude_assistant'] = oai_settings.exclude_assistant;
// Don't add a prefill on quiet gens (summarization)
if (!isQuiet && !oai_settings.exclude_assistant) {
@@ -1165,7 +1168,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
if (isOpenRouter) {
generate_data['use_openrouter'] = true;
generate_data['top_k'] = parseFloat(oai_settings.top_k_openai);
generate_data['top_k'] = Number(oai_settings.top_k_openai);
}
if (isScale) {
@@ -1175,8 +1178,8 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
if (isAI21) {
generate_data['use_ai21'] = true;
generate_data['top_k'] = parseFloat(oai_settings.top_k_openai);
generate_data['count_pen'] = parseFloat(oai_settings.count_pen);
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['count_pen'] = Number(oai_settings.count_pen);
generate_data['stop_tokens'] = [name1 + ':', oai_settings.new_chat_prompt, oai_settings.new_group_chat_prompt];
}
@@ -1644,6 +1647,11 @@ class ChatCompletion {
const index = this.findMessageIndex(identifier);
const message = this.messages.collection[index].collection.pop();
if (!message) {
this.log(`No message to remove from ${identifier}`);
return;
}
this.increaseTokenBudgetBy(message.getTokens());
this.log(`Removed ${message.identifier} from ${identifier}. Remaining tokens: ${this.tokenBudget}`);
@@ -1860,7 +1868,6 @@ function loadOpenAISettings(data, settings) {
oai_settings.new_example_chat_prompt = settings.new_example_chat_prompt ?? default_settings.new_example_chat_prompt;
oai_settings.continue_nudge_prompt = settings.continue_nudge_prompt ?? default_settings.continue_nudge_prompt;
if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue;
if (settings.wrap_in_quotes !== undefined) oai_settings.wrap_in_quotes = !!settings.wrap_in_quotes;
if (settings.names_in_completion !== undefined) oai_settings.names_in_completion = !!settings.names_in_completion;
if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model;
@@ -1886,11 +1893,8 @@ function loadOpenAISettings(data, settings) {
$('#openai_max_tokens').val(oai_settings.openai_max_tokens);
$('#nsfw_toggle').prop('checked', oai_settings.nsfw_toggle);
$('#keep_example_dialogue').prop('checked', oai_settings.keep_example_dialogue);
$('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes);
$('#names_in_completion').prop('checked', oai_settings.names_in_completion);
$('#nsfw_first').prop('checked', oai_settings.nsfw_first);
$('#jailbreak_system').prop('checked', oai_settings.jailbreak_system);
$('#legacy_streaming').prop('checked', oai_settings.legacy_streaming);
$('#openai_show_external_models').prop('checked', oai_settings.show_external_models);
@@ -2130,7 +2134,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
}
function onLogitBiasPresetChange() {
const value = $('#openai_logit_bias_preset').find(':selected').val();
const value = String($('#openai_logit_bias_preset').find(':selected').val());
const preset = oai_settings.bias_presets[value];
if (!Array.isArray(preset)) {
@@ -2164,20 +2168,33 @@ function createLogitBiasListItem(entry) {
const template = $('#openai_logit_bias_template .openai_logit_bias_form').clone();
template.data('id', id);
template.find('.openai_logit_bias_text').val(entry.text).on('input', function () {
oai_settings.bias_presets[oai_settings.bias_preset_selected][id].text = $(this).val();
oai_settings.bias_presets[oai_settings.bias_preset_selected][id].text = String($(this).val());
biasCache = undefined;
saveSettingsDebounced();
});
template.find('.openai_logit_bias_value').val(entry.value).on('input', function () {
oai_settings.bias_presets[oai_settings.bias_preset_selected][id].value = Number($(this).val());
const min = Number($(this).attr('min'));
const max = Number($(this).attr('max'));
let value = Number($(this).val());
if (value < min) {
$(this).val(min);
value = min;
}
if (value > max) {
$(this).val(max);
value = max;
}
oai_settings.bias_presets[oai_settings.bias_preset_selected][id].value = value;
biasCache = undefined;
saveSettingsDebounced();
});
template.find('.openai_logit_bias_remove').on('click', function () {
$(this).closest('.openai_logit_bias_form').remove();
oai_settings.bias_presets[oai_settings.bias_preset_selected][id] = undefined;
biasCache = undefined;
saveSettingsDebounced();
oai_settings.bias_presets[oai_settings.bias_preset_selected].splice(id, 1);
onLogitBiasPresetChange();
});
$('.openai_logit_bias_list').prepend(template);
}
@@ -2312,18 +2329,17 @@ async function onLogitBiasPresetImportFileChange(e) {
return;
}
const validEntries = [];
for (const entry of importedFile) {
if (typeof entry == 'object') {
if (typeof entry == 'object' && entry !== null) {
if (entry.hasOwnProperty('text') && entry.hasOwnProperty('value')) {
continue;
validEntries.push(entry);
}
}
}
callPopup('Invalid logit bias preset file.', 'text');
return;
}
oai_settings.bias_presets[name] = importedFile;
oai_settings.bias_presets[name] = validEntries;
oai_settings.bias_preset_selected = name;
addLogitBiasPresetOption(name);
@@ -2496,7 +2512,6 @@ function getMaxContextOpenAI(value) {
}
}
function getMaxContextWindowAI(value) {
if (oai_settings.max_context_unlocked) {
return unlocked_max;
@@ -2532,7 +2547,8 @@ function getMaxContextWindowAI(value) {
}
async function onModelChange() {
let value = $(this).val();
biasCache = undefined;
let value = String($(this).val());
if ($(this).is('#model_claude_select')) {
console.log('Claude model changed to', value);
@@ -2700,7 +2716,7 @@ async function onNewPresetClick() {
}
function onReverseProxyInput() {
oai_settings.reverse_proxy = $(this).val();
oai_settings.reverse_proxy = String($(this).val());
$(".reverse_proxy_warning").toggle(oai_settings.reverse_proxy != '');
saveSettingsDebounced();
}
@@ -2716,7 +2732,7 @@ async function onConnectButtonClick(e) {
}
if (oai_settings.chat_completion_source == chat_completion_sources.OPENROUTER) {
const api_key_openrouter = $('#api_key_openrouter').val().trim();
const api_key_openrouter = String($('#api_key_openrouter').val()).trim();
if (api_key_openrouter.length) {
await writeSecret(SECRET_KEYS.OPENROUTER, api_key_openrouter);
@@ -2729,8 +2745,8 @@ async function onConnectButtonClick(e) {
}
if (oai_settings.chat_completion_source == chat_completion_sources.SCALE) {
const api_key_scale = $('#api_key_scale').val().trim();
const scale_cookie = $('#scale_cookie').val().trim();
const api_key_scale = String($('#api_key_scale').val()).trim();
const scale_cookie = String($('#scale_cookie').val()).trim();
if (api_key_scale.length) {
await writeSecret(SECRET_KEYS.SCALE, api_key_scale);
@@ -2758,7 +2774,7 @@ async function onConnectButtonClick(e) {
}
if (oai_settings.chat_completion_source == chat_completion_sources.CLAUDE) {
const api_key_claude = $('#api_key_claude').val().trim();
const api_key_claude = String($('#api_key_claude').val()).trim();
if (api_key_claude.length) {
await writeSecret(SECRET_KEYS.CLAUDE, api_key_claude);
@@ -2771,7 +2787,7 @@ async function onConnectButtonClick(e) {
}
if (oai_settings.chat_completion_source == chat_completion_sources.OPENAI) {
const api_key_openai = $('#api_key_openai').val().trim();
const api_key_openai = String($('#api_key_openai').val()).trim();
if (api_key_openai.length) {
await writeSecret(SECRET_KEYS.OPENAI, api_key_openai);
@@ -2784,7 +2800,7 @@ async function onConnectButtonClick(e) {
}
if (oai_settings.chat_completion_source == chat_completion_sources.AI21) {
const api_key_ai21 = $('#api_key_ai21').val().trim();
const api_key_ai21 = String($('#api_key_ai21').val()).trim();
if (api_key_ai21.length) {
await writeSecret(SECRET_KEYS.AI21, api_key_ai21);
@@ -2924,14 +2940,14 @@ $(document).ready(async function () {
});
$(document).on('input', '#openai_max_context', function () {
oai_settings.openai_max_context = parseInt($(this).val());
oai_settings.openai_max_context = Number($(this).val());
$('#openai_max_context_counter').text(`${$(this).val()}`);
calculateOpenRouterCost();
saveSettingsDebounced();
});
$(document).on('input', '#openai_max_tokens', function () {
oai_settings.openai_max_tokens = parseInt($(this).val());
oai_settings.openai_max_tokens = Number($(this).val());
calculateOpenRouterCost();
saveSettingsDebounced();
});
@@ -2966,42 +2982,42 @@ $(document).ready(async function () {
});
$("#send_if_empty_textarea").on('input', function () {
oai_settings.send_if_empty = $('#send_if_empty_textarea').val();
oai_settings.send_if_empty = String($('#send_if_empty_textarea').val());
saveSettingsDebounced();
});
$("#impersonation_prompt_textarea").on('input', function () {
oai_settings.impersonation_prompt = $('#impersonation_prompt_textarea').val();
oai_settings.impersonation_prompt = String($('#impersonation_prompt_textarea').val());
saveSettingsDebounced();
});
$("#newchat_prompt_textarea").on('input', function () {
oai_settings.new_chat_prompt = $('#newchat_prompt_textarea').val();
oai_settings.new_chat_prompt = String($('#newchat_prompt_textarea').val());
saveSettingsDebounced();
});
$("#newgroupchat_prompt_textarea").on('input', function () {
oai_settings.new_group_chat_prompt = $('#newgroupchat_prompt_textarea').val();
oai_settings.new_group_chat_prompt = String($('#newgroupchat_prompt_textarea').val());
saveSettingsDebounced();
});
$("#newexamplechat_prompt_textarea").on('input', function () {
oai_settings.new_example_chat_prompt = $('#newexamplechat_prompt_textarea').val();
oai_settings.new_example_chat_prompt = String($('#newexamplechat_prompt_textarea').val());
saveSettingsDebounced();
});
$("#continue_nudge_prompt_textarea").on('input', function () {
oai_settings.continue_nudge_prompt = $('#continue_nudge_prompt_textarea').val();
oai_settings.continue_nudge_prompt = String($('#continue_nudge_prompt_textarea').val());
saveSettingsDebounced();
});
$("#nsfw_avoidance_prompt_textarea").on('input', function () {
oai_settings.nsfw_avoidance_prompt = $('#nsfw_avoidance_prompt_textarea').val();
oai_settings.nsfw_avoidance_prompt = String($('#nsfw_avoidance_prompt_textarea').val());
saveSettingsDebounced();
});
$("#wi_format_textarea").on('input', function () {
oai_settings.wi_format = $('#wi_format_textarea').val();
oai_settings.wi_format = String($('#wi_format_textarea').val());
saveSettingsDebounced();
});
@@ -3082,7 +3098,7 @@ $(document).ready(async function () {
});
$('#chat_completion_source').on('change', function () {
oai_settings.chat_completion_source = $(this).find(":selected").val();
oai_settings.chat_completion_source = String($(this).find(":selected").val());
toggleChatCompletionForms();
saveSettingsDebounced();
@@ -3100,7 +3116,7 @@ $(document).ready(async function () {
});
$('#api_url_scale').on('input', function () {
oai_settings.api_url_scale = $(this).val();
oai_settings.api_url_scale = String($(this).val());
saveSettingsDebounced();
});
@@ -3111,12 +3127,12 @@ $(document).ready(async function () {
});
$('#openai_proxy_password').on('input', function () {
oai_settings.proxy_password = $(this).val();
oai_settings.proxy_password = String($(this).val());
saveSettingsDebounced();
});
$('#claude_assistant_prefill').on('input', function () {
oai_settings.assistant_prefill = $(this).val();
oai_settings.assistant_prefill = String($(this).val());
saveSettingsDebounced();
});

View File

@@ -282,11 +282,11 @@ function setNameCallback(_, name) {
setUserName(name); //this prevented quickReply usage
}
function setNarratorName(_, text) {
async function setNarratorName(_, text) {
const name = text || NARRATOR_NAME_DEFAULT;
chat_metadata[NARRATOR_NAME_KEY] = name;
toastr.info(`System narrator name set to ${name}`);
saveChatConditional();
await saveChatConditional();
}
async function sendMessageAs(_, text) {
@@ -341,7 +341,7 @@ async function sendMessageAs(_, text) {
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
await saveChatConditional();
}
async function sendNarratorMessage(_, text) {
@@ -373,7 +373,7 @@ async function sendNarratorMessage(_, text) {
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
await saveChatConditional();
}
async function sendCommentMessage(_, text) {
@@ -399,7 +399,7 @@ async function sendCommentMessage(_, text) {
await eventSource.emit(event_types.MESSAGE_SENT, (chat.length - 1));
addOneMessage(message);
await eventSource.emit(event_types.USER_MESSAGE_RENDERED, (chat.length - 1));
saveChatConditional();
await saveChatConditional();
}
function helpCommandCallback(_, type) {

View File

@@ -233,9 +233,9 @@ async function generateTextGenWithStreaming(generate_data, signal) {
}
}
export function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate, cfgValues) {
export function getTextGenGenerationData(finalPrompt, this_amount_gen, isImpersonate, cfgValues) {
return {
'prompt': finalPromt,
'prompt': finalPrompt,
'max_new_tokens': this_amount_gen,
'do_sample': textgenerationwebui_settings.do_sample,
'temperature': textgenerationwebui_settings.temp,

View File

@@ -134,7 +134,7 @@ export function getTokenCount(str, padding = undefined) {
const cacheObject = getTokenCacheObject();
const hash = getStringHash(str);
const cacheKey = `${tokenizerType}-${hash}`;
const cacheKey = `${tokenizerType}-${hash}+${padding}`;
if (typeof cacheObject[cacheKey] === 'number') {
return cacheObject[cacheKey];

View File

@@ -202,6 +202,10 @@ table.responsiveTable {
color: var(--white50a);
}
.mes[is_system="true"] .mes_text br {
display: none;
}
.mes_text table {
border-spacing: 0;
border-collapse: collapse;
@@ -373,7 +377,7 @@ hr {
left: 0;
right: 0;
display: inline-block;
height: 40px;
height: 35px;
position: absolute;
border-bottom: 1px solid var(--grey30a);
box-shadow: 0 2px 20px 0 var(--black70a);
@@ -386,14 +390,14 @@ hr {
#sheld {
display: grid;
grid-template-rows: auto min-content;
height: calc(100vh - 42px);
height: calc(100svh - 42px);
height: calc(100vh - 36px);
height: calc(100svh - 36px);
overflow-x: hidden;
/* max-width: 50vw; */
position: absolute;
left: calc((100vw - var(--sheldWidth))/2);
left: calc((100svw - var(--sheldWidth))/2);
top: 41px;
top: 36px;
margin: 0 auto;
left: 0;
right: 0;
@@ -509,19 +513,19 @@ hr {
#send_but_sheld {
padding: 0;
border: 0;
height: 40px;
height: 30px;
position: relative;
background-position: center;
display: flex;
flex-direction: row;
column-gap: 5px;
font-size: 30px;
font-size: 25px;
overflow: hidden;
}
#send_but_sheld>div {
width: 40px;
height: 40px;
width: 30px;
height: 30px;
margin: 0;
outline: none;
border: none;
@@ -549,8 +553,8 @@ hr {
}
#options_button {
width: 40px;
height: 40px;
width: 30px;
height: 30px;
margin: 0;
outline: none;
border: none;
@@ -558,10 +562,10 @@ hr {
opacity: 0.7;
cursor: pointer;
z-index: 2001;
padding-left: 10px;
padding-top: 0;
margin-left: 10px;
padding: 0;
transition: 0.3s;
font-size: 30px;
font-size: 25px;
display: flex;
align-items: center;
}
@@ -872,17 +876,17 @@ select {
}
#send_textarea {
min-height: 40px;
min-height: 30px;
max-height: 50vh;
max-height: 50svh;
word-wrap: break-word;
height: 40px;
height: 30px;
resize: vertical;
display: block;
background-color: rgba(255, 0, 0, 0);
border: 0;
box-shadow: none;
padding: 6px;
padding-top: 6px;
font-family: "Noto Sans", "Noto Color Emoji", sans-serif;
margin: 0;
text-shadow: 0px 0px calc(var(--shadowWidth) * 1px) var(--SmartThemeShadowColor);
@@ -2161,7 +2165,7 @@ input[type='checkbox']:not(#nav-toggle):not(#rm_button_panel_pin):not(#lm_button
width: max-content;
margin-left: 5px;
margin-right: 15px;
font-size: calc(var(--mainFontSize) * 0.8);
font-size: calc(var(--mainFontSize) * 0.85);
color: var(--SmartThemeEmColor);
text-align: center;
}
@@ -3016,8 +3020,8 @@ a {
#top-settings-holder {
margin: 0 auto;
padding-top: 5px;
height: 40px;
padding-top: 2px;
height: 30px;
/* max-width: var(--sheldWidth); */
justify-content: center;
display: grid;
@@ -3039,7 +3043,7 @@ a {
.drawer-icon {
display: inline-block;
cursor: pointer;
font-size: 1.8rem;
font-size: 25px;
}
.drawer-icon.openIcon {
@@ -3164,7 +3168,7 @@ a {
.drawer-content select {
width: 100%;
font-size: calc(var(--mainFontSize) * 0.9);
font-size: calc(var(--mainFontSize) * 0.95);
}
.settingsSectionWrap {