This commit is contained in:
SillyLossy
2023-05-14 19:47:34 +03:00
7 changed files with 733 additions and 108 deletions

View File

@@ -101,6 +101,7 @@ const default_settings = {
openai_model: 'gpt-3.5-turbo',
jailbreak_system: false,
reverse_proxy: '',
oai_breakdown: false,
};
const oai_settings = {
@@ -125,6 +126,7 @@ const oai_settings = {
openai_model: 'gpt-3.5-turbo',
jailbreak_system: false,
reverse_proxy: '',
oai_breakdown: false,
};
let openai_setting_names;
@@ -305,16 +307,18 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
let openai_msgs_tosend = [];
// todo: static value, maybe include in the initial context calculation
const handler_instance = new TokenHandler(countTokens);
let new_chat_msg = { "role": "system", "content": "[Start a new chat]" };
let start_chat_count = countTokens([new_chat_msg], true);
let start_chat_count = handler_instance.count([new_chat_msg], true, 'start_chat');
await delay(1);
let total_count = countTokens([prompt_msg], true) + start_chat_count;
let total_count = handler_instance.count([prompt_msg], true, 'prompt') + start_chat_count;
await delay(1);
if (bias && bias.trim().length) {
let bias_msg = { "role": "system", "content": bias.trim() };
openai_msgs.push(bias_msg);
total_count += countTokens([bias_msg], true);
total_count += handler_instance.count([bias_msg], true, 'bias');
await delay(1);
}
@@ -331,13 +335,14 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
openai_msgs.push(group_nudge);
// add a group nudge count
let group_nudge_count = countTokens([group_nudge], true);
let group_nudge_count = handler_instance.count([group_nudge], true, 'nudge');
await delay(1);
total_count += group_nudge_count;
// recount tokens for new start message
total_count -= start_chat_count
start_chat_count = countTokens([new_chat_msg], true);
handler_instance.uncount(start_chat_count, 'start_chat');
start_chat_count = handler_instance.count([new_chat_msg], true);
await delay(1);
total_count += start_chat_count;
}
@@ -346,7 +351,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
const jailbreakMessage = { "role": "system", "content": substituteParams(oai_settings.jailbreak_prompt) };
openai_msgs.push(jailbreakMessage);
total_count += countTokens([jailbreakMessage], true);
total_count += handler_instance.count([jailbreakMessage], true, 'jailbreak');
await delay(1);
}
@@ -354,7 +359,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
const impersonateMessage = { "role": "system", "content": substituteParams(oai_settings.impersonation_prompt) };
openai_msgs.push(impersonateMessage);
total_count += countTokens([impersonateMessage], true);
total_count += handler_instance.count([impersonateMessage], true, 'impersonate');
await delay(1);
}
@@ -377,12 +382,12 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
examples_tosend.push(example);
}
}
total_count += countTokens(examples_tosend, true);
total_count += handler_instance.count(examples_tosend, true, 'examples');
await delay(1);
// go from newest message to oldest, because we want to delete the older ones from the context
for (let j = openai_msgs.length - 1; j >= 0; j--) {
let item = openai_msgs[j];
let item_count = countTokens(item, true);
let item_count = handler_instance.count(item, true, 'conversation');
await delay(1);
// If we have enough space for this message, also account for the max assistant reply size
if ((total_count + item_count) < (this_max_context - oai_settings.openai_max_tokens)) {
@@ -391,13 +396,14 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
}
else {
// early break since if we still have more messages, they just won't fit anyway
handler_instance.uncount(item_count, 'conversation');
break;
}
}
} else {
for (let j = openai_msgs.length - 1; j >= 0; j--) {
let item = openai_msgs[j];
let item_count = countTokens(item, true);
let item_count = handler_instance.count(item, true, 'conversation');
await delay(1);
// If we have enough space for this message, also account for the max assistant reply size
if ((total_count + item_count) < (this_max_context - oai_settings.openai_max_tokens)) {
@@ -406,11 +412,12 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
}
else {
// early break since if we still have more messages, they just won't fit anyway
handler_instance.uncount(item_count, 'conversation');
break;
}
}
console.log(total_count);
//console.log(total_count);
// each example block contains multiple user/bot messages
for (let example_block of openai_msgs_example) {
@@ -420,7 +427,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
example_block = [new_chat_msg, ...example_block];
// add the block only if there is enough space for all its messages
const example_count = countTokens(example_block, true);
const example_count = handler_instance.count(example_block, true, 'examples');
await delay(1);
if ((total_count + example_count) < (this_max_context - oai_settings.openai_max_tokens)) {
examples_tosend.push(...example_block)
@@ -428,6 +435,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
}
else {
// early break since more examples probably won't fit anyway
handler_instance.uncount(example_count, 'examples');
break;
}
}
@@ -439,10 +447,14 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
openai_msgs_tosend.reverse();
openai_msgs_tosend = [prompt_msg, ...examples_tosend, new_chat_msg, ...openai_msgs_tosend]
console.log("We're sending this:")
console.log(openai_msgs_tosend);
console.log(`Calculated the total context to be ${total_count} tokens`);
return openai_msgs_tosend;
//console.log("We're sending this:")
//console.log(openai_msgs_tosend);
//console.log(`Calculated the total context to be ${total_count} tokens`);
handler_instance.log();
return [
openai_msgs_tosend,
oai_settings.oai_breakdown ? handler_instance.counts : false,
];
}
function getSystemPrompt(nsfw_toggle_prompt, enhance_definitions_prompt, wiBefore, storyString, wiAfter, extensionPrompt, isImpersonate) {
@@ -604,9 +616,42 @@ async function calculateLogitBias() {
}
}
class TokenHandler {
constructor(countTokenFn) {
this.countTokenFn = countTokenFn;
this.counts = {
'start_chat': 0,
'prompt': 0,
'bias': 0,
'nudge': 0,
'jailbreak': 0,
'impersonate': 0,
'examples': 0,
'conversation': 0,
};
}
uncount(value, type) {
this.counts[type] -= value;
}
count(messages, full, type) {
//console.log(messages);
const token_count = this.countTokenFn(messages, full);
this.counts[type] += token_count;
return token_count;
}
log() {
const total = Object.values(this.counts).reduce((a, b) => a + b);
console.table({ ...this.counts, 'total': total });
}
}
function countTokens(messages, full = false) {
let chatId = 'undefined';
try {
if (selected_group) {
chatId = groups.find(x => x.id == selected_group)?.chat_id;
@@ -693,6 +738,7 @@ function loadOpenAISettings(data, settings) {
if (settings.nsfw_first !== undefined) oai_settings.nsfw_first = !!settings.nsfw_first;
if (settings.openai_model !== undefined) oai_settings.openai_model = settings.openai_model;
if (settings.jailbreak_system !== undefined) oai_settings.jailbreak_system = !!settings.jailbreak_system;
if (settings.oai_breakdown !== undefined) oai_settings.oai_breakdown = !!settings.oai_breakdown;
$('#stream_toggle').prop('checked', oai_settings.stream_openai);
@@ -708,6 +754,7 @@ function loadOpenAISettings(data, settings) {
$('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes);
$('#nsfw_first').prop('checked', oai_settings.nsfw_first);
$('#jailbreak_system').prop('checked', oai_settings.jailbreak_system);
$('#oai_breakdown').prop('checked', oai_settings.oai_breakdown);
if (settings.main_prompt !== undefined) oai_settings.main_prompt = settings.main_prompt;
if (settings.nsfw_prompt !== undefined) oai_settings.nsfw_prompt = settings.nsfw_prompt;
@@ -827,6 +874,7 @@ async function saveOpenAIPreset(name, settings) {
jailbreak_system: settings.jailbreak_system,
impersonation_prompt: settings.impersonation_prompt,
bias_preset_selected: settings.bias_preset_selected,
oai_breakdown: settings.oai_breakdown,
};
const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, {
@@ -1034,7 +1082,7 @@ async function onDeletePresetClick() {
const response = await fetch('/deletepreset_openai', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({name: nameToDelete}),
body: JSON.stringify({ name: nameToDelete }),
});
if (!response.ok) {
@@ -1085,6 +1133,7 @@ function onSettingsPresetChange() {
wrap_in_quotes: ['#wrap_in_quotes', 'wrap_in_quotes', true],
nsfw_first: ['#nsfw_first', 'nsfw_first', true],
jailbreak_system: ['#jailbreak_system', 'jailbreak_system', true],
oai_breakdown: ['#oai_breakdown', 'oai_breakdown', true],
main_prompt: ['#main_prompt_textarea', 'main_prompt', false],
nsfw_prompt: ['#nsfw_prompt_textarea', 'nsfw_prompt', false],
jailbreak_prompt: ['#jailbreak_prompt_textarea', 'jailbreak_prompt', false],
@@ -1151,7 +1200,7 @@ function onReverseProxyInput() {
async function onConnectButtonClick(e) {
e.stopPropagation();
const api_key_openai = $('#api_key_openai').val().trim();
if (api_key_openai.length) {
await writeSecret(SECRET_KEYS.OPENAI, api_key_openai);
}
@@ -1257,6 +1306,16 @@ $(document).ready(function () {
saveSettingsDebounced();
});
$("#oai_breakdown").on('change', function () {
oai_settings.oai_breakdown = !!$(this).prop("checked");
if (!oai_settings.oai_breakdown) {
$("#token_breakdown").css('display', 'none');
} else {
$("#token_breakdown").css('display', 'flex');
}
saveSettingsDebounced();
});
// auto-select a preset based on character/group name
$(document).on("click", ".character_select", function () {
const chid = $(this).attr('chid');
@@ -1310,18 +1369,18 @@ $(document).ready(function () {
saveSettingsDebounced();
});
$("#api_button_openai").on('click', onConnectButtonClick);
$("#openai_reverse_proxy").on('input', onReverseProxyInput);
$("#model_openai_select").on('change', onModelChange);
$("#settings_perset_openai").on('change', onSettingsPresetChange);
$("#new_oai_preset").on('click', onNewPresetClick);
$("#delete_oai_preset").on('click', onDeletePresetClick);
$("#openai_api_usage").on('click', showApiKeyUsage);
$('#openai_logit_bias_preset').on('change', onLogitBiasPresetChange);
$('#openai_logit_bias_new_preset').on('click', createNewLogitBiasPreset);
$('#openai_logit_bias_new_entry').on('click', createNewLogitBiasEntry);
$('#openai_logit_bias_import_file').on('input', onLogitBiasPresetImportFileChange);
$('#openai_logit_bias_import_preset').on('click', onLogitBiasPresetImportClick);
$('#openai_logit_bias_export_preset').on('click', onLogitBiasPresetExportClick);
$('#openai_logit_bias_delete_preset').on('click', onLogitBiasPresetDeleteClick);
$("#api_button_openai").on("click", onConnectButtonClick);
$("#openai_reverse_proxy").on("input", onReverseProxyInput);
$("#model_openai_select").on("change", onModelChange);
$("#settings_perset_openai").on("change", onSettingsPresetChange);
$("#new_oai_preset").on("click", onNewPresetClick);
$("#delete_oai_preset").on("click", onDeletePresetClick);
$("#openai_api_usage").on("click", showApiKeyUsage);
$("#openai_logit_bias_preset").on("change", onLogitBiasPresetChange);
$("#openai_logit_bias_new_preset").on("click", createNewLogitBiasPreset);
$("#openai_logit_bias_new_entry").on("click", createNewLogitBiasEntry);
$("#openai_logit_bias_import_file").on("input", onLogitBiasPresetImportFileChange);
$("#openai_logit_bias_import_preset").on("click", onLogitBiasPresetImportClick);
$("#openai_logit_bias_export_preset").on("click", onLogitBiasPresetExportClick);
$("#openai_logit_bias_delete_preset").on("click", onLogitBiasPresetDeleteClick);
});

303
public/scripts/uniqolor.js Normal file
View File

@@ -0,0 +1,303 @@
const SATURATION_BOUND = [0, 100];
const LIGHTNESS_BOUND = [0, 100];
const pad2 = str => `${str.length === 1 ? '0' : ''}${str}`;
const clamp = (num, min, max) => Math.max(Math.min(num, max), min);
const random = (min, max) => Math.floor(Math.random() * ((max - min) + 1)) + min;
const randomExclude = (min, max, exclude) => {
const r = random(min, max);
for (let i = 0; i < exclude?.length; i++) {
const value = exclude[i];
if (value?.length === 2 && r >= value[0] && r <= value[1]) {
return randomExclude(min, max, exclude);
}
}
return r;
};
/**
* Generate hashCode
* @param {string} str
* @return {number}
*/
const hashCode = str => {
const len = str.length;
let hash = 0;
for (let i = 0; i < len; i++) {
hash = ((hash << 5) - hash) + str.charCodeAt(i);
hash &= hash; // Convert to 32bit integer
}
return hash;
};
/**
* Clamps `num` within the inclusive `range` bounds
* @param {number} num
* @param {number|Array} range
* @return {number}
*/
const boundHashCode = (num, range) => {
if (typeof range === 'number') {
return range;
}
return (num % Math.abs(range[1] - range[0])) + range[0];
};
/**
* Sanitizing the `range`
* @param {number|Array} range
* @param {Array} bound
* @return {number|Array}
*/
const sanitizeRange = (range, bound) => {
if (typeof range === 'number') {
return clamp(Math.abs(range), ...bound);
}
if (range.length === 1 || range[0] === range[1]) {
return clamp(Math.abs(range[0]), ...bound);
}
return [
Math.abs(clamp(range[0], ...bound)),
clamp(Math.abs(range[1]), ...bound),
];
};
/**
* @param {number} p
* @param {number} q
* @param {number} t
* @return {number}
*/
const hueToRgb = (p, q, t) => {
if (t < 0) {
t += 1;
} else if (t > 1) {
t -= 1;
}
if (t < 1 / 6) {
return p + ((q - p) * 6 * t);
}
if (t < 1 / 2) {
return q;
}
if (t < 2 / 3) {
return p + ((q - p) * ((2 / 3) - t) * 6);
}
return p;
};
/**
* Converts an HSL color to RGB
* @param {number} h Hue
* @param {number} s Saturation
* @param {number} l Lightness
* @return {Array}
*/
const hslToRgb = (h, s, l) => {
let r;
let g;
let b;
h /= 360;
s /= 100;
l /= 100;
if (s === 0) {
// achromatic
r = g = b = l;
} else {
const q = l < 0.5
? l * (1 + s)
: (l + s) - (l * s);
const p = (2 * l) - q;
r = hueToRgb(p, q, h + (1 / 3));
g = hueToRgb(p, q, h);
b = hueToRgb(p, q, h - (1 / 3));
}
return [
Math.round(r * 255),
Math.round(g * 255),
Math.round(b * 255),
];
};
/**
* Determines whether the RGB color is light or not
* http://www.w3.org/TR/AERT#color-contrast
* @param {number} r Red
* @param {number} g Green
* @param {number} b Blue
* @param {number} differencePoint
* @return {boolean}
*/
const rgbIsLight = (r, g, b, differencePoint) => ((r * 299) + (g * 587) + (b * 114)) / 1000 >= differencePoint; // eslint-disable-line max-len
/**
* Converts an HSL color to string format
* @param {number} h Hue
* @param {number} s Saturation
* @param {number} l Lightness
* @return {string}
*/
const hslToString = (h, s, l) => `hsl(${h}, ${s}%, ${l}%)`;
/**
* Converts RGB color to string format
* @param {number} r Red
* @param {number} g Green
* @param {number} b Blue
* @param {string} format Color format
* @return {string}
*/
const rgbFormat = (r, g, b, format) => {
switch (format) {
case 'rgb':
return `rgb(${r}, ${g}, ${b})`;
case 'hex':
default:
return `#${pad2(r.toString(16))}${pad2(g.toString(16))}${pad2(b.toString(16))}`;
}
};
/**
* Generate unique color from `value`
* @param {string|number} value
* @param {Object} [options={}]
* @param {string} [options.format='hex']
* The color format, it can be one of `hex`, `rgb` or `hsl`
* @param {number|Array} [options.saturation=[50, 55]]
* Determines the color saturation, it can be a number or a range between 0 and 100
* @param {number|Array} [options.lightness=[50, 60]]
* Determines the color lightness, it can be a number or a range between 0 and 100
* @param {number} [options.differencePoint=130]
* Determines the color brightness difference point. We use it to obtain the `isLight` value
* in the output, it can be a number between 0 and 255
* @return {Object}
* @example
*
* ```js
* uniqolor('Hello world!')
* // { color: "#5cc653", isLight: true }
*
* uniqolor('Hello world!', { format: 'rgb' })
* // { color: "rgb(92, 198, 83)", isLight: true }
*
* uniqolor('Hello world!', {
* saturation: 30,
* lightness: [70, 80],
* })
* // { color: "#afd2ac", isLight: true }
*
* uniqolor('Hello world!', {
* saturation: 30,
* lightness: [70, 80],
* differencePoint: 200,
* })
* // { color: "#afd2ac", isLight: false }
* ```
*/
const uniqolor = (value, {
format = 'hex',
saturation = [50, 55],
lightness = [50, 60],
differencePoint = 130,
} = {}) => {
const hash = Math.abs(hashCode(String(value)));
const h = boundHashCode(hash, [0, 360]);
const s = boundHashCode(hash, sanitizeRange(saturation, SATURATION_BOUND));
const l = boundHashCode(hash, sanitizeRange(lightness, LIGHTNESS_BOUND));
const [r, g, b] = hslToRgb(h, s, l);
return {
color: format === 'hsl'
? hslToString(h, s, l)
: rgbFormat(r, g, b, format),
isLight: rgbIsLight(r, g, b, differencePoint),
};
};
/**
* Generate random color
* @param {Object} [options={}]
* @param {string} [options.format='hex']
* The color format, it can be one of `hex`, `rgb` or `hsl`
* @param {number|Array} [options.saturation=[50, 55]]
* Determines the color saturation, it can be a number or a range between 0 and 100
* @param {number|Array} [options.lightness=[50, 60]]
* Determines the color lightness, it can be a number or a range between 0 and 100
* @param {number} [options.differencePoint=130]
* Determines the color brightness difference point. We use it to obtain the `isLight` value
* in the output, it can be a number between 0 and 255
* @param {Array} [options.excludeHue]
* Exclude certain hue ranges. For example to exclude red color range: `[[0, 20], [325, 359]]`
* @return {Object}
* @example
*
* ```js
* // Generate random color
* uniqolor.random()
* // { color: "#644cc8", isLight: false }
*
* // Generate a random color with HSL format
* uniqolor.random({ format: 'hsl' })
* // { color: "hsl(89, 55%, 60%)", isLight: true }
*
* // Generate a random color in specific saturation and lightness
* uniqolor.random({
* saturation: 80,
* lightness: [70, 80],
* })
* // { color: "#c7b9da", isLight: true }
*
* // Generate a random color but exclude red color range
* uniqolor.random({
* excludeHue: [[0, 20], [325, 359]],
* })
* // {color: '#53caab', isLight: true}
* ```
*/
uniqolor.random = ({
format = 'hex',
saturation = [50, 55],
lightness = [50, 60],
differencePoint = 130,
excludeHue,
} = {}) => {
saturation = sanitizeRange(saturation, SATURATION_BOUND);
lightness = sanitizeRange(lightness, LIGHTNESS_BOUND);
const h = excludeHue ? randomExclude(0, 359, excludeHue) : random(0, 359);
const s = typeof saturation === 'number'
? saturation
: random(...saturation);
const l = typeof lightness === 'number'
? lightness
: random(...lightness);
const [r, g, b] = hslToRgb(h, s, l);
return {
color: format === 'hsl'
? hslToString(h, s, l)
: rgbFormat(r, g, b, format),
isLight: rgbIsLight(r, g, b, differencePoint),
};
};
export default uniqolor;