From e33ac6a78abc940e395d7fbe9cdcc90f50a371ee Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Fri, 12 Jan 2024 17:15:13 +0200 Subject: [PATCH] Add min_p and top_a for OpenRouter --- public/index.html | 26 ++++++++++++ public/scripts/openai.js | 46 +++++++++++++++++----- src/endpoints/backends/chat-completions.js | 8 ++++ 3 files changed, 71 insertions(+), 9 deletions(-) diff --git a/public/index.html b/public/index.html index 0fd9b2906..46995ef0c 100644 --- a/public/index.html +++ b/public/index.html @@ -523,6 +523,32 @@ +
+
+ Min P +
+
+
+ +
+
+ +
+
+
+
+
+ Top A +
+
+
+ +
+
+ +
+
+
Quick Prompts Edit diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 8ac693cb9..85a650901 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -188,6 +188,8 @@ const default_settings = { count_pen: 0.0, top_p_openai: 1.0, top_k_openai: 0, + min_p_openai: 0, + top_a_openai: 1, stream_openai: false, openai_max_context: max_4k, openai_max_tokens: 300, @@ -253,6 +255,8 @@ const oai_settings = { count_pen: 0.0, top_p_openai: 1.0, top_k_openai: 0, + min_p_openai: 0, + top_a_openai: 1, stream_openai: false, openai_max_context: max_4k, openai_max_tokens: 300, @@ -1299,7 +1303,7 @@ function getChatCompletionModel() { } } -function getOpenRouterModelTemplate(option){ +function getOpenRouterModelTemplate(option) { const model = model_list.find(x => x.id === option?.element?.value); if (!option.id || !model) { @@ -1600,6 +1604,8 @@ async function sendOpenAIRequest(type, messages, signal) { if (isOpenRouter) { generate_data['top_k'] = Number(oai_settings.top_k_openai); + generate_data['min_p'] = Number(oai_settings.min_p_openai); + generate_data['top_a'] = Number(oai_settings.top_a_openai); generate_data['use_fallback'] = oai_settings.openrouter_use_fallback; if (isTextCompletion) { @@ -2361,6 +2367,8 @@ function loadOpenAISettings(data, settings) { oai_settings.count_pen = settings.count_pen ?? default_settings.count_pen; oai_settings.top_p_openai = settings.top_p_openai ?? default_settings.top_p_openai; oai_settings.top_k_openai = settings.top_k_openai ?? default_settings.top_k_openai; + oai_settings.top_a_openai = settings.top_a_openai ?? default_settings.top_a_openai; + oai_settings.min_p_openai = settings.min_p_openai ?? default_settings.min_p_openai; oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai; oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context; oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens; @@ -2492,6 +2500,10 @@ function loadOpenAISettings(data, settings) { $('#top_k_openai').val(oai_settings.top_k_openai); $('#top_k_counter_openai').val(Number(oai_settings.top_k_openai).toFixed(0)); + $('#top_a_openai').val(oai_settings.top_a_openai); + $('#top_a_counter_openai').val(Number(oai_settings.top_a_openai)); + $('#min_p_openai').val(oai_settings.min_p_openai); + $('#min_p_counter_openai').val(Number(oai_settings.min_p_openai)); $('#seed_openai').val(oai_settings.seed); if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy; @@ -2636,6 +2648,8 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) { count_penalty: settings.count_pen, top_p: settings.top_p_openai, top_k: settings.top_k_openai, + top_a: settings.top_a_openai, + min_p: settings.min_p_openai, openai_max_context: settings.openai_max_context, openai_max_tokens: settings.openai_max_tokens, wrap_in_quotes: settings.wrap_in_quotes, @@ -2995,6 +3009,8 @@ function onSettingsPresetChange() { count_penalty: ['#count_pen', 'count_pen', false], top_p: ['#top_p_openai', 'top_p_openai', false], top_k: ['#top_k_openai', 'top_k_openai', false], + top_a: ['#top_a_openai', 'top_a_openai', false], + min_p: ['#min_p_openai', 'min_p_openai', false], max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true], openai_model: ['#model_openai_select', 'openai_model', false], claude_model: ['#model_claude_select', 'claude_model', false], @@ -3683,50 +3699,62 @@ $(document).ready(async function () { updateScaleForm(); }); - $(document).on('input', '#temp_openai', function () { + $('#temp_openai').on('input', function () { oai_settings.temp_openai = Number($(this).val()); $('#temp_counter_openai').val(Number($(this).val()).toFixed(2)); saveSettingsDebounced(); }); - $(document).on('input', '#freq_pen_openai', function () { + $('#freq_pen_openai').on('input', function () { oai_settings.freq_pen_openai = Number($(this).val()); $('#freq_pen_counter_openai').val(Number($(this).val()).toFixed(2)); saveSettingsDebounced(); }); - $(document).on('input', '#pres_pen_openai', function () { + $('#pres_pen_openai').on('input', function () { oai_settings.pres_pen_openai = Number($(this).val()); $('#pres_pen_counter_openai').val(Number($(this).val()).toFixed(2)); saveSettingsDebounced(); }); - $(document).on('input', '#count_pen', function () { + $('#count_pen').on('input', function () { oai_settings.count_pen = Number($(this).val()); $('#count_pen_counter').val(Number($(this).val()).toFixed(2)); saveSettingsDebounced(); }); - $(document).on('input', '#top_p_openai', function () { + $('#top_p_openai').on('input', function () { oai_settings.top_p_openai = Number($(this).val()); $('#top_p_counter_openai').val(Number($(this).val()).toFixed(2)); saveSettingsDebounced(); }); - $(document).on('input', '#top_k_openai', function () { + $('#top_k_openai').on('input', function () { oai_settings.top_k_openai = Number($(this).val()); $('#top_k_counter_openai').val(Number($(this).val()).toFixed(0)); saveSettingsDebounced(); }); - $(document).on('input', '#openai_max_context', function () { + $('#top_a_openai').on('input', function () { + oai_settings.top_a_openai = Number($(this).val()); + $('#top_a_counter_openai').val(Number($(this).val())); + saveSettingsDebounced(); + }); + + $('#min_p_openai').on('input', function () { + oai_settings.min_p_openai = Number($(this).val()); + $('#min_p_counter_openai').val(Number($(this).val())); + saveSettingsDebounced(); + }); + + $('#openai_max_context').on('input', function () { oai_settings.openai_max_context = Number($(this).val()); $('#openai_max_context_counter').val(`${$(this).val()}`); calculateOpenRouterCost(); saveSettingsDebounced(); }); - $(document).on('input', '#openai_max_tokens', function () { + $('#openai_max_tokens').on('input', function () { oai_settings.openai_max_tokens = Number($(this).val()); calculateOpenRouterCost(); saveSettingsDebounced(); diff --git a/src/endpoints/backends/chat-completions.js b/src/endpoints/backends/chat-completions.js index 247f16612..3685f3bb8 100644 --- a/src/endpoints/backends/chat-completions.js +++ b/src/endpoints/backends/chat-completions.js @@ -721,6 +721,14 @@ router.post('/generate', jsonParser, function (request, response) { headers = { 'HTTP-Referer': request.headers.referer }; bodyParams = { 'transforms': ['middle-out'] }; + if (request.body.min_p !== undefined) { + bodyParams['min_p'] = request.body.min_p; + } + + if (request.body.top_a !== undefined) { + bodyParams['top_a'] = request.body.top_a; + } + if (request.body.use_fallback) { bodyParams['route'] = 'fallback'; }