Add min_p and top_a for OpenRouter

This commit is contained in:
Cohee 2024-01-12 17:15:13 +02:00
parent 1367642eb4
commit e33ac6a78a
3 changed files with 71 additions and 9 deletions

View File

@ -523,6 +523,32 @@
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openrouter">
<div class="range-block-title" data-i18n="Min P">
Min P
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="min_p_openai" name="volume" min="0" max="1" step="0.001">
</div>
<div class="range-block-counter">
<input type="number" min="0" max="1" step="0.001" data-for="min_p_openai" id="min_p_counter_openai">
</div>
</div>
</div>
<div data-newbie-hidden class="range-block" data-source="openrouter">
<div class="range-block-title" data-i18n="Top A">
Top A
</div>
<div class="range-block-range-and-counter">
<div class="range-block-range">
<input type="range" id="top_a_openai" name="volume" min="0" max="1" step="0.001">
</div>
<div class="range-block-counter">
<input type="number" min="0" max="1" step="0.001" data-for="top_a_openai" id="top_a_counter_openai">
</div>
</div>
</div>
<div class="inline-drawer m-t-1 wide100p">
<div class="inline-drawer-toggle inline-drawer-header">
<b data-i18n="Quick Edit">Quick Prompts Edit</b>

View File

@ -188,6 +188,8 @@ const default_settings = {
count_pen: 0.0,
top_p_openai: 1.0,
top_k_openai: 0,
min_p_openai: 0,
top_a_openai: 1,
stream_openai: false,
openai_max_context: max_4k,
openai_max_tokens: 300,
@ -253,6 +255,8 @@ const oai_settings = {
count_pen: 0.0,
top_p_openai: 1.0,
top_k_openai: 0,
min_p_openai: 0,
top_a_openai: 1,
stream_openai: false,
openai_max_context: max_4k,
openai_max_tokens: 300,
@ -1299,7 +1303,7 @@ function getChatCompletionModel() {
}
}
function getOpenRouterModelTemplate(option){
function getOpenRouterModelTemplate(option) {
const model = model_list.find(x => x.id === option?.element?.value);
if (!option.id || !model) {
@ -1600,6 +1604,8 @@ async function sendOpenAIRequest(type, messages, signal) {
if (isOpenRouter) {
generate_data['top_k'] = Number(oai_settings.top_k_openai);
generate_data['min_p'] = Number(oai_settings.min_p_openai);
generate_data['top_a'] = Number(oai_settings.top_a_openai);
generate_data['use_fallback'] = oai_settings.openrouter_use_fallback;
if (isTextCompletion) {
@ -2361,6 +2367,8 @@ function loadOpenAISettings(data, settings) {
oai_settings.count_pen = settings.count_pen ?? default_settings.count_pen;
oai_settings.top_p_openai = settings.top_p_openai ?? default_settings.top_p_openai;
oai_settings.top_k_openai = settings.top_k_openai ?? default_settings.top_k_openai;
oai_settings.top_a_openai = settings.top_a_openai ?? default_settings.top_a_openai;
oai_settings.min_p_openai = settings.min_p_openai ?? default_settings.min_p_openai;
oai_settings.stream_openai = settings.stream_openai ?? default_settings.stream_openai;
oai_settings.openai_max_context = settings.openai_max_context ?? default_settings.openai_max_context;
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
@ -2492,6 +2500,10 @@ function loadOpenAISettings(data, settings) {
$('#top_k_openai').val(oai_settings.top_k_openai);
$('#top_k_counter_openai').val(Number(oai_settings.top_k_openai).toFixed(0));
$('#top_a_openai').val(oai_settings.top_a_openai);
$('#top_a_counter_openai').val(Number(oai_settings.top_a_openai));
$('#min_p_openai').val(oai_settings.min_p_openai);
$('#min_p_counter_openai').val(Number(oai_settings.min_p_openai));
$('#seed_openai').val(oai_settings.seed);
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
@ -2636,6 +2648,8 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
count_penalty: settings.count_pen,
top_p: settings.top_p_openai,
top_k: settings.top_k_openai,
top_a: settings.top_a_openai,
min_p: settings.min_p_openai,
openai_max_context: settings.openai_max_context,
openai_max_tokens: settings.openai_max_tokens,
wrap_in_quotes: settings.wrap_in_quotes,
@ -2995,6 +3009,8 @@ function onSettingsPresetChange() {
count_penalty: ['#count_pen', 'count_pen', false],
top_p: ['#top_p_openai', 'top_p_openai', false],
top_k: ['#top_k_openai', 'top_k_openai', false],
top_a: ['#top_a_openai', 'top_a_openai', false],
min_p: ['#min_p_openai', 'min_p_openai', false],
max_context_unlocked: ['#oai_max_context_unlocked', 'max_context_unlocked', true],
openai_model: ['#model_openai_select', 'openai_model', false],
claude_model: ['#model_claude_select', 'claude_model', false],
@ -3683,50 +3699,62 @@ $(document).ready(async function () {
updateScaleForm();
});
$(document).on('input', '#temp_openai', function () {
$('#temp_openai').on('input', function () {
oai_settings.temp_openai = Number($(this).val());
$('#temp_counter_openai').val(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
});
$(document).on('input', '#freq_pen_openai', function () {
$('#freq_pen_openai').on('input', function () {
oai_settings.freq_pen_openai = Number($(this).val());
$('#freq_pen_counter_openai').val(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
});
$(document).on('input', '#pres_pen_openai', function () {
$('#pres_pen_openai').on('input', function () {
oai_settings.pres_pen_openai = Number($(this).val());
$('#pres_pen_counter_openai').val(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
});
$(document).on('input', '#count_pen', function () {
$('#count_pen').on('input', function () {
oai_settings.count_pen = Number($(this).val());
$('#count_pen_counter').val(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
});
$(document).on('input', '#top_p_openai', function () {
$('#top_p_openai').on('input', function () {
oai_settings.top_p_openai = Number($(this).val());
$('#top_p_counter_openai').val(Number($(this).val()).toFixed(2));
saveSettingsDebounced();
});
$(document).on('input', '#top_k_openai', function () {
$('#top_k_openai').on('input', function () {
oai_settings.top_k_openai = Number($(this).val());
$('#top_k_counter_openai').val(Number($(this).val()).toFixed(0));
saveSettingsDebounced();
});
$(document).on('input', '#openai_max_context', function () {
$('#top_a_openai').on('input', function () {
oai_settings.top_a_openai = Number($(this).val());
$('#top_a_counter_openai').val(Number($(this).val()));
saveSettingsDebounced();
});
$('#min_p_openai').on('input', function () {
oai_settings.min_p_openai = Number($(this).val());
$('#min_p_counter_openai').val(Number($(this).val()));
saveSettingsDebounced();
});
$('#openai_max_context').on('input', function () {
oai_settings.openai_max_context = Number($(this).val());
$('#openai_max_context_counter').val(`${$(this).val()}`);
calculateOpenRouterCost();
saveSettingsDebounced();
});
$(document).on('input', '#openai_max_tokens', function () {
$('#openai_max_tokens').on('input', function () {
oai_settings.openai_max_tokens = Number($(this).val());
calculateOpenRouterCost();
saveSettingsDebounced();

View File

@ -721,6 +721,14 @@ router.post('/generate', jsonParser, function (request, response) {
headers = { 'HTTP-Referer': request.headers.referer };
bodyParams = { 'transforms': ['middle-out'] };
if (request.body.min_p !== undefined) {
bodyParams['min_p'] = request.body.min_p;
}
if (request.body.top_a !== undefined) {
bodyParams['top_a'] = request.body.top_a;
}
if (request.body.use_fallback) {
bodyParams['route'] = 'fallback';
}