diff --git a/public/index.html b/public/index.html index 8bdd53465..8c686b095 100644 --- a/public/index.html +++ b/public/index.html @@ -573,9 +573,24 @@ -
+
+
+ Top K +
+
+
+ +
+
+
+ select +
+
+
+
+
- Top-p + Top P
diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 5ee9bd2b2..3c6c7d6eb 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -102,6 +102,7 @@ const default_settings = { freq_pen_openai: 0.7, pres_pen_openai: 0.7, top_p_openai: 1.0, + top_k_openai: 0, stream_openai: false, openai_max_context: gpt3_max, openai_max_tokens: 300, @@ -132,6 +133,7 @@ const oai_settings = { freq_pen_openai: 0, pres_pen_openai: 0, top_p_openai: 1.0, + top_k_openai: 0, stream_openai: false, openai_max_context: gpt3_max, openai_max_tokens: 300, @@ -673,6 +675,7 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { "frequency_penalty": parseFloat(oai_settings.freq_pen_openai), "presence_penalty": parseFloat(oai_settings.pres_pen_openai), "top_p": parseFloat(oai_settings.top_p_openai), + "top_k": parseFloat(oai_settings.top_k_openai), "max_tokens": oai_settings.openai_max_tokens, "stream": stream, "reverse_proxy": oai_settings.reverse_proxy, @@ -926,6 +929,7 @@ function loadOpenAISettings(data, settings) { oai_settings.freq_pen_openai = settings.freq_pen_openai ?? default_settings.freq_pen_openai; oai_settings.pres_pen_openai = settings.pres_pen_openai ?? default_settings.pres_pen_openai; oai_settings.top_p_openai = settings.top_p_openai ?? default_settings.top_p_openai; + oai_settings.top_k_openai = settings.top_k_openai ?? default_settings.top_k_openai; oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai; oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context; oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens; @@ -986,6 +990,9 @@ function loadOpenAISettings(data, settings) { $('#top_p_openai').val(oai_settings.top_p_openai); $('#top_p_counter_openai').text(Number(oai_settings.top_p_openai).toFixed(2)); + $('#top_k_openai').val(oai_settings.top_k_openai); + $('#top_k_counter_openai').text(Number(oai_settings.top_k_openai).toFixed(0)); + if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy; $('#openai_reverse_proxy').val(oai_settings.reverse_proxy); @@ -1103,6 +1110,7 @@ async function saveOpenAIPreset(name, settings) { frequency_penalty: settings.freq_pen_openai, presence_penalty: settings.pres_pen_openai, top_p: settings.top_p_openai, + top_k: settings.top_k_openai, openai_max_context: settings.openai_max_context, openai_max_tokens: settings.openai_max_tokens, nsfw_toggle: settings.nsfw_toggle, @@ -1374,6 +1382,7 @@ function onSettingsPresetChange() { frequency_penalty: ['#freq_pen_openai', 'freq_pen_openai', false], presence_penalty: ['#pres_pen_openai', 'pres_pen_openai', false], top_p: ['#top_p_openai', 'top_p_openai', false], + top_k: ['#top_k_openai', 'top_k_openai', false], max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true], openai_model: ['#model_openai_select', 'openai_model', false], claude_model: ['#model_claude_select', 'claude_model', false], @@ -1569,7 +1578,12 @@ $(document).ready(function () { oai_settings.top_p_openai = $(this).val(); $('#top_p_counter_openai').text(Number($(this).val()).toFixed(2)); saveSettingsDebounced(); + }); + $(document).on('input', '#top_k_openai', function () { + oai_settings.top_k_openai = $(this).val(); + $('#top_k_counter_openai').text(Number($(this).val()).toFixed(0)); + saveSettingsDebounced(); }); $(document).on('input', '#openai_max_context', function () { diff --git a/server.js b/server.js index 08b1f1df1..f3a00497f 100644 --- a/server.js +++ b/server.js @@ -2770,6 +2770,8 @@ async function sendClaudeRequest(request, response) { max_tokens_to_sample: request.body.max_tokens, stop_sequences: ["\n\nHuman:", "\n\nSystem:", "\n\nAssistant:"], temperature: request.body.temperature, + top_p: request.body.top_p, + top_k: request.body.top_k, stream: request.body.stream, }), headers: {