diff --git a/public/index.html b/public/index.html index 99a9913c4..08e10f774 100644 --- a/public/index.html +++ b/public/index.html @@ -417,6 +417,21 @@ +
+
+ Top-p +
+
+
+ +
+
+
+ select +
+
+
+
diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 92f4f0d50..4d3cdcbaf 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -79,6 +79,7 @@ const default_settings = { temp_openai: 0.9, freq_pen_openai: 0.7, pres_pen_openai: 0.7, + top_p_openai: 1.0, stream_openai: false, openai_max_context: gpt3_max, openai_max_tokens: 300, @@ -103,6 +104,7 @@ const oai_settings = { temp_openai: 1.0, freq_pen_openai: 0, pres_pen_openai: 0, + top_p_openai: 1.0, stream_openai: false, openai_max_context: gpt3_max, openai_max_tokens: 300, @@ -511,6 +513,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { "temperature": parseFloat(oai_settings.temp_openai), "frequency_penalty": parseFloat(oai_settings.freq_pen_openai), "presence_penalty": parseFloat(oai_settings.pres_pen_openai), + "top_p": parseFloat(oai_settings.top_p_openai), "max_tokens": oai_settings.openai_max_tokens, "stream": stream, "reverse_proxy": oai_settings.reverse_proxy, @@ -660,6 +663,7 @@ function loadOpenAISettings(data, settings) { oai_settings.temp_openai = settings.temp_openai ?? default_settings.temp_openai; oai_settings.freq_pen_openai = settings.freq_pen_openai ?? default_settings.freq_pen_openai; oai_settings.pres_pen_openai = settings.pres_pen_openai ?? default_settings.pres_pen_openai; + oai_settings.top_p_openai = settings.top_p_openai ?? default_settings.top_p_openai; oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai; oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context; oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens; @@ -707,6 +711,9 @@ function loadOpenAISettings(data, settings) { $('#pres_pen_openai').val(oai_settings.pres_pen_openai); $('#pres_pen_counter_openai').text(Number(oai_settings.pres_pen_openai).toFixed(2)); + $('#top_p_openai').val(oai_settings.top_p_openai); + $('#top_p_counter_openai').text(Number(oai_settings.top_p_openai).toFixed(2)); + if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy; $('#openai_reverse_proxy').val(oai_settings.reverse_proxy); @@ -792,6 +799,7 @@ async function saveOpenAIPreset(name, settings) { temperature: settings.temp_openai, frequency_penalty: settings.freq_pen_openai, presence_penalty: settings.pres_pen_openai, + top_p: settings.top_p_openai, openai_max_context: settings.openai_max_context, openai_max_tokens: settings.openai_max_tokens, nsfw_toggle: settings.nsfw_toggle, @@ -1056,6 +1064,7 @@ function onSettingsPresetChange() { temperature: ['#temp_openai', 'temp_openai', false], frequency_penalty: ['#freq_pen_openai', 'freq_pen_openai', false], presence_penalty: ['#pres_pen_openai', 'pres_pen_openai', false], + top_p: ['#top_p_openai', 'top_p_openai', false], openai_model: ['#model_openai_select', 'openai_model', false], openai_max_context: ['#openai_max_context', 'openai_max_context', false], openai_max_tokens: ['#openai_max_tokens', 'openai_max_tokens', false], @@ -1160,6 +1169,13 @@ $(document).ready(function () { }); + $(document).on('input', '#top_p_openai', function () { + oai_settings.top_p_openai = $(this).val(); + $('#top_p_counter_openai').text(Number($(this).val()).toFixed(2)); + saveSettingsDebounced(); + + }); + $(document).on('input', '#openai_max_context', function () { oai_settings.openai_max_context = parseInt($(this).val()); $('#openai_max_context_counter').text(`${$(this).val()}`);