Merge branch 'SillyTavern:staging' into staging

This commit is contained in:
Tony Ribeiro 2023-08-16 14:58:14 +02:00 committed by GitHub
commit 9e49ad0a77
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 86 additions and 72 deletions

View File

@ -334,20 +334,20 @@
]
},
"nai_settings": {
"temperature": 0.63,
"repetition_penalty": 1.148125,
"temperature": 1.5,
"repetition_penalty": 2.25,
"repetition_penalty_range": 2048,
"repetition_penalty_slope": 0.09,
"repetition_penalty_frequency": 0,
"repetition_penalty_presence": 0,
"repetition_penalty_presence": 0.005,
"tail_free_sampling": 0.975,
"top_k": 0,
"top_p": 0.975,
"top_a": 1,
"typical_p": 1,
"top_k": 10,
"top_p": 0.75,
"top_a": 0.08,
"typical_p": 0.975,
"min_length": 1,
"model_novel": "euterpe-v2",
"preset_settings_novel": "Classic-Euterpe",
"model_novel": "clio-v1",
"preset_settings_novel": "Talker-Chat-Clio",
"streaming_novel": false
},
"kai_settings": {

View File

@ -1,16 +1,16 @@
{
"order": [5, 0, 1, 3, 7],
"temperature": 1.35,
"order": [5, 0, 1, 3],
"temperature": 1.16,
"max_length": 300,
"min_length": 1,
"top_k": 225,
"top_g": 8,
"typical_p": 0.975,
"tail_free_sampling": 0.984,
"repetition_penalty": 1.7,
"repetition_penalty_range": 3200,
"top_k": 175,
"typical_p": 0.96,
"tail_free_sampling": 0.994,
"repetition_penalty": 1.68,
"repetition_penalty_range": 2240,
"repetition_penalty_slope": 1.5,
"repetition_penalty_frequency": 0,
"repetition_penalty_presence": 0.02,
"repetition_penalty_presence": 0.005,
"use_cache": false,
"return_full_text": false,
"prefix": "vanilla",

View File

@ -0,0 +1,20 @@
{
"order": [8, 6, 5, 0, 3],
"temperature": 0.9,
"max_length": 300,
"min_length": 1,
"typical_p": 0.95,
"tail_free_sampling": 0.92,
"mirostat_lr": 0.22,
"mirostat_tau": 4.95,
"repetition_penalty": 3,
"repetition_penalty_range": 4000,
"repetition_penalty_frequency": 0,
"repetition_penalty_presence": 0,
"use_cache": false,
"return_full_text": false,
"prefix": "vanilla",
"phrase_rep_pen": "off",
"cfg_scale": 1.48,
"max_context": 7800
}

View File

@ -3,15 +3,15 @@
"temperature": 2.5,
"max_length": 300,
"min_length": 1,
"typical_p": 0.966,
"tail_free_sampling": 0.933,
"typical_p": 0.969,
"tail_free_sampling": 0.941,
"repetition_penalty": 1,
"repetition_penalty_range": 2048,
"repetition_penalty_range": 1024,
"repetition_penalty_frequency": 0,
"repetition_penalty_presence": 0,
"use_cache": false,
"return_full_text": false,
"prefix": "vanilla",
"phrase_rep_pen": "aggressive",
"phrase_rep_pen": "medium",
"max_context": 7800
}

View File

@ -7,9 +7,9 @@
"top_p": 0.95,
"typical_p": 0.95,
"tail_free_sampling": 0.95,
"mirostat_lr": 0.2,
"mirostat_lr": 0.25,
"mirostat_tau": 5,
"repetition_penalty": 1.6,
"repetition_penalty": 1.625,
"repetition_penalty_range": 2016,
"repetition_penalty_frequency": 0,
"repetition_penalty_presence": 0,

View File

@ -877,7 +877,7 @@
</div>
</div>
<div class="toggle-description justifyLeft" data-i18n="Use style tags to modify the writing style of the output">
Use style tags to modify the writing style of the output
Use style tags to modify the writing style of the output.
</div>
<div class="wide100p">
<textarea id="nai_preamble_textarea" class="text_pole textarea_compact" name="nai_preamble" rows="2" placeholder=""></textarea>
@ -967,21 +967,6 @@
</div>
</div>
</div>
<div class="range-block">
<div class="range-block-title" data-i18n="Top G">
Top G
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="top_g_novel" name="volume" min="0" max="20" step="1">
</div>
<div class="range-block-counter">
<div contenteditable="true" data-for="top_g_novel" id="top_g_counter_novel">
select
</div>
</div>
</div>
</div>
<div class="range-block">
<div class="range-block-title" data-i18n="Mirostat Tau">
Mirostat Tau

View File

@ -4021,7 +4021,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) {
chat[chat.length - 1]['gen_started'] = generation_started;
chat[chat.length - 1]['gen_finished'] = generationFinished;
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]['extra']['api'] = main_api;
chat[chat.length - 1]['extra']['api'] = getGeneratingApi();
chat[chat.length - 1]['extra']['model'] = getGeneratingModel();
addOneMessage(chat[chat.length - 1], { type: 'swipe' });
} else {
@ -4035,7 +4035,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) {
chat[chat.length - 1]['gen_started'] = generation_started;
chat[chat.length - 1]['gen_finished'] = generationFinished;
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]["extra"]["api"] = main_api;
chat[chat.length - 1]["extra"]["api"] = getGeneratingApi();
chat[chat.length - 1]["extra"]["model"] = getGeneratingModel();
addOneMessage(chat[chat.length - 1], { type: 'swipe' });
} else if (type === 'appendFinal') {
@ -4046,7 +4046,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) {
chat[chat.length - 1]['gen_started'] = generation_started;
chat[chat.length - 1]['gen_finished'] = generationFinished;
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]["extra"]["api"] = main_api;
chat[chat.length - 1]["extra"]["api"] = getGeneratingApi();
chat[chat.length - 1]["extra"]["model"] = getGeneratingModel();
addOneMessage(chat[chat.length - 1], { type: 'swipe' });
@ -4058,7 +4058,7 @@ function saveReply(type, getMessage, this_mes_is_name, title) {
chat[chat.length - 1]['is_user'] = false;
chat[chat.length - 1]['is_name'] = this_mes_is_name;
chat[chat.length - 1]['send_date'] = getMessageTimeStamp();
chat[chat.length - 1]["extra"]["api"] = main_api;
chat[chat.length - 1]["extra"]["api"] = getGeneratingApi();
chat[chat.length - 1]["extra"]["model"] = getGeneratingModel();
if (power_user.trim_spaces) {
getMessage = getMessage.trim();
@ -4121,6 +4121,15 @@ function saveImageToMessage(img, mes) {
}
}
function getGeneratingApi() {
switch (main_api) {
case 'openai':
return oai_settings.chat_completion_source || 'openai';
default:
return main_api;
}
}
function getGeneratingModel(mes) {
let model = '';
switch (main_api) {

View File

@ -1,8 +1,9 @@
import {
getRequestHeaders,
saveSettingsDebounced,
getStoppingStrings,
getTextTokens
getTextTokens,
novelai_setting_names,
saveSettingsDebounced
} from "../script.js";
import { getCfg } from "./extensions/cfg/util.js";
import { tokenizers } from "./power-user.js";
@ -17,23 +18,28 @@ export {
const default_preamble = "[ Style: chat, complex, sensory, visceral ]";
const maximum_output_length = 150;
const default_presets = {
"euterpe-v2": "Classic-Euterpe",
"krake-v2": "Classic-Krake",
"clio-v1": "Talker-Chat-Clio",
"kayra-v1": "Carefree-Kayra"
}
const nai_settings = {
temperature: 0.5,
repetition_penalty: 1,
repetition_penalty_range: 100,
repetition_penalty_slope: 0,
temperature: 1.5,
repetition_penalty: 2.25,
repetition_penalty_range: 2048,
repetition_penalty_slope: 0.09,
repetition_penalty_frequency: 0,
repetition_penalty_presence: 0,
tail_free_sampling: 0.68,
top_k: 0,
top_p: 1,
top_a: 1,
top_g: 0,
typical_p: 1,
min_length: 0,
model_novel: "euterpe-v2",
preset_settings_novel: "Classic-Euterpe",
repetition_penalty_presence: 0.005,
tail_free_sampling: 0.975,
top_k: 10,
top_p: 0.75,
top_a: 0.08,
typical_p: 0.975,
min_length: 1,
model_novel: "clio-v1",
preset_settings_novel: "Talker-Chat-Clio",
streaming_novel: false,
nai_preamble: default_preamble,
prefix: '',
@ -97,7 +103,6 @@ function loadNovelPreset(preset) {
nai_settings.min_length = preset.min_length;
nai_settings.cfg_scale = preset.cfg_scale;
nai_settings.phrase_rep_pen = preset.phrase_rep_pen;
nai_settings.top_g = preset.top_g;
nai_settings.mirostat_lr = preset.mirostat_lr;
nai_settings.mirostat_tau = preset.mirostat_tau;
nai_settings.prefix = preset.prefix;
@ -128,7 +133,6 @@ function loadNovelSettings(settings) {
nai_settings.min_length = settings.min_length;
nai_settings.phrase_rep_pen = settings.phrase_rep_pen;
nai_settings.cfg_scale = settings.cfg_scale;
nai_settings.top_g = settings.top_g;
nai_settings.mirostat_lr = settings.mirostat_lr;
nai_settings.mirostat_tau = settings.mirostat_tau;
nai_settings.streaming_novel = !!settings.streaming_novel;
@ -165,8 +169,6 @@ function loadNovelSettingsUi(ui_settings) {
$("#cfg_scale_novel").val(ui_settings.cfg_scale);
$("#cfg_scale_counter_novel").text(Number(ui_settings.cfg_scale).toFixed(2));
$("#phrase_rep_pen_novel").val(ui_settings.phrase_rep_pen || "off");
$("#top_g_novel").val(ui_settings.top_g);
$("#top_g_counter_novel").text(Number(ui_settings.top_g).toFixed(0));
$("#mirostat_lr_novel").val(ui_settings.mirostat_lr);
$("#mirostat_lr_counter_novel").text(Number(ui_settings.mirostat_lr).toFixed(2));
$("#mirostat_tau_novel").val(ui_settings.mirostat_tau);
@ -248,12 +250,6 @@ const sliders = [
format: (val) => Number(val).toFixed(2),
setValue: (val) => { nai_settings.typical_p = Number(val).toFixed(2); },
},
{
sliderId: "#top_g_novel",
counterId: "#top_g_counter_novel",
format: (val) => Number(val).toFixed(0),
setValue: (val) => { nai_settings.top_g = Number(val).toFixed(0); },
},
{
sliderId: "#mirostat_tau_novel",
counterId: "#mirostat_tau_counter_novel",
@ -411,7 +407,6 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
"top_p": parseFloat(nai_settings.top_p),
"top_k": parseInt(nai_settings.top_k),
"typical_p": parseFloat(nai_settings.typical_p),
"top_g": parseFloat(nai_settings.top_g),
"mirostat_lr": parseFloat(nai_settings.mirostat_lr),
"mirostat_tau": parseFloat(nai_settings.mirostat_tau),
"cfg_scale": cfgSettings?.guidanceScale ?? parseFloat(nai_settings.cfg_scale),
@ -520,6 +515,12 @@ $(document).ready(function () {
$("#model_novel_select").change(function () {
nai_settings.model_novel = $("#model_novel_select").find(":selected").val();
saveSettingsDebounced();
// Update the selected preset to something appropriate
const default_preset = default_presets[nai_settings.model_novel];
$(`#settings_perset_novel`).val(novelai_setting_names[default_preset]);
$(`#settings_perset_novel option[value=${novelai_setting_names[default_preset]}]`).attr("selected", "true")
$(`#settings_perset_novel`).trigger("change");
});
$("#nai_prefix").on('change', function () {

View File

@ -1883,7 +1883,6 @@ app.post("/generate_novelai", jsonParser, async function (request, response_gene
"top_p": request.body.top_p,
"top_k": request.body.top_k,
"typical_p": request.body.typical_p,
"top_g": request.body.top_g,
"mirostat_lr": request.body.mirostat_lr,
"mirostat_tau": request.body.mirostat_tau,
"cfg_scale": request.body.cfg_scale,