#1782 OpenAI multiswipe
This commit is contained in:
parent
4c148c2da2
commit
7ac6ed267f
|
@ -427,6 +427,14 @@
|
|||
<input type="number" id="openai_max_tokens" name="openai_max_tokens" class="text_pole" min="50" max="8000">
|
||||
</div>
|
||||
</div>
|
||||
<div class="range-block" data-source="openai,custom">
|
||||
<div class="range-block-title" data-i18n="Multiple swipes per generation">
|
||||
Multiple swipes per generation
|
||||
</div>
|
||||
<div class="wide100p">
|
||||
<input type="number" id="n_openai" name="n_openai" class="text_pole" min="1" value="1">
|
||||
</div>
|
||||
</div>
|
||||
<div data-source="openrouter">
|
||||
Max prompt cost: <span id="openrouter_max_prompt_cost">Unknown</span>
|
||||
</div>
|
||||
|
|
|
@ -4466,11 +4466,11 @@ function extractMessageFromData(data) {
|
|||
case 'koboldhorde':
|
||||
return data.text;
|
||||
case 'textgenerationwebui':
|
||||
return data.choices?.[0]?.text ?? data.content ?? data.response;
|
||||
return data.choices?.[0]?.text ?? data.content ?? data.response ?? '';
|
||||
case 'novel':
|
||||
return data.output;
|
||||
case 'openai':
|
||||
return data;
|
||||
return data?.choices?.[0]?.message?.content ?? data?.choices?.[0]?.text ?? '';
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
|
@ -4485,11 +4485,19 @@ function extractMessageFromData(data) {
|
|||
function extractMultiSwipes(data, type) {
|
||||
const swipes = [];
|
||||
|
||||
if (!data) {
|
||||
return swipes;
|
||||
}
|
||||
|
||||
if (type === 'continue' || type === 'impersonate' || type === 'quiet') {
|
||||
return swipes;
|
||||
}
|
||||
|
||||
if (main_api === 'textgenerationwebui' && textgen_settings.type === textgen_types.APHRODITE) {
|
||||
if (main_api === 'openai' || (main_api === 'textgenerationwebui' && textgen_settings.type === textgen_types.APHRODITE)) {
|
||||
if (!Array.isArray(data.choices)) {
|
||||
return swipes;
|
||||
}
|
||||
|
||||
const multiSwipeCount = data.choices.length - 1;
|
||||
|
||||
if (multiSwipeCount <= 0) {
|
||||
|
@ -4497,8 +4505,9 @@ function extractMultiSwipes(data, type) {
|
|||
}
|
||||
|
||||
for (let i = 1; i < data.choices.length; i++) {
|
||||
const text = cleanUpMessage(data.choices[i].text, false, false, false);
|
||||
swipes.push(text);
|
||||
const text = data?.choices[i]?.message?.content ?? data?.choices[i]?.text ?? '';
|
||||
const cleanedText = cleanUpMessage(text, false, false, false);
|
||||
swipes.push(cleanedText);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -247,6 +247,7 @@ const default_settings = {
|
|||
bypass_status_check: false,
|
||||
continue_prefill: false,
|
||||
seed: -1,
|
||||
n: 1,
|
||||
};
|
||||
|
||||
const oai_settings = {
|
||||
|
@ -315,6 +316,7 @@ const oai_settings = {
|
|||
bypass_status_check: false,
|
||||
continue_prefill: false,
|
||||
seed: -1,
|
||||
n: 1,
|
||||
};
|
||||
|
||||
export let proxies = [
|
||||
|
@ -1545,6 +1547,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||
const isContinue = type === 'continue';
|
||||
const stream = oai_settings.stream_openai && !isQuiet && !isScale && !isAI21 && !(isGoogle && oai_settings.google_model.includes('bison'));
|
||||
const useLogprobs = !!power_user.request_token_probabilities;
|
||||
const canMultiSwipe = oai_settings.n > 1 && !isContinue && !isImpersonate && !isQuiet && (isOAI || isCustom);
|
||||
|
||||
if (isTextCompletion && isOpenRouter) {
|
||||
messages = convertChatCompletionToInstruct(messages, type);
|
||||
|
@ -1592,6 +1595,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||
'logit_bias': logit_bias,
|
||||
'stop': getCustomStoppingStrings(openai_max_stop_strings),
|
||||
'chat_completion_source': oai_settings.chat_completion_source,
|
||||
'n': canMultiSwipe ? oai_settings.n : undefined,
|
||||
};
|
||||
|
||||
// Empty array will produce a validation error
|
||||
|
@ -1699,6 +1703,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||
return async function* streamData() {
|
||||
let text = '';
|
||||
let utf8Decoder = new TextDecoder();
|
||||
const swipes = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) return;
|
||||
|
@ -1706,8 +1711,15 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||
if (isSSEStream && rawData === '[DONE]') return;
|
||||
tryParseStreamingError(response, rawData);
|
||||
const parsed = JSON.parse(rawData);
|
||||
|
||||
if (Array.isArray(parsed?.choices) && parsed?.choices?.[0]?.index > 0) {
|
||||
const swipeIndex = parsed.choices[0].index - 1;
|
||||
swipes[swipeIndex] = (swipes[swipeIndex] || '') + getStreamingReply(parsed);
|
||||
} else {
|
||||
text += getStreamingReply(parsed);
|
||||
yield { text, swipes: [], logprobs: parseChatCompletionLogprobs(parsed) };
|
||||
}
|
||||
|
||||
yield { text, swipes: swipes, logprobs: parseChatCompletionLogprobs(parsed) };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -1729,7 +1741,7 @@ async function sendOpenAIRequest(type, messages, signal) {
|
|||
delay(1).then(() => saveLogprobsForActiveMessage(logprobs, null));
|
||||
}
|
||||
|
||||
return !isTextCompletion ? data.choices[0]['message']['content'] : data.choices[0]['text'];
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2524,6 +2536,8 @@ function loadOpenAISettings(data, settings) {
|
|||
oai_settings.human_sysprompt_message = settings.human_sysprompt_message ?? default_settings.human_sysprompt_message;
|
||||
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
|
||||
oai_settings.bypass_status_check = settings.bypass_status_check ?? default_settings.bypass_status_check;
|
||||
oai_settings.seed = settings.seed ?? default_settings.seed;
|
||||
oai_settings.n = settings.n ?? default_settings.n;
|
||||
|
||||
oai_settings.prompts = settings.prompts ?? default_settings.prompts;
|
||||
oai_settings.prompt_order = settings.prompt_order ?? default_settings.prompt_order;
|
||||
|
@ -2628,6 +2642,7 @@ function loadOpenAISettings(data, settings) {
|
|||
$('#repetition_penalty_openai').val(oai_settings.repetition_penalty_openai);
|
||||
$('#repetition_penalty_counter_openai').val(Number(oai_settings.repetition_penalty_openai));
|
||||
$('#seed_openai').val(oai_settings.seed);
|
||||
$('#n_openai').val(oai_settings.n);
|
||||
|
||||
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
|
||||
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
|
||||
|
@ -2812,6 +2827,7 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
|||
bypass_status_check: settings.bypass_status_check,
|
||||
continue_prefill: settings.continue_prefill,
|
||||
seed: settings.seed,
|
||||
n: settings.n,
|
||||
};
|
||||
|
||||
const savePresetSettings = await fetch(`/api/presets/save-openai?name=${name}`, {
|
||||
|
@ -3187,6 +3203,7 @@ function onSettingsPresetChange() {
|
|||
image_inlining: ['#openai_image_inlining', 'image_inlining', true],
|
||||
continue_prefill: ['#continue_prefill', 'continue_prefill', true],
|
||||
seed: ['#seed_openai', 'seed', false],
|
||||
n: ['#n_openai', 'n', false],
|
||||
};
|
||||
|
||||
const presetName = $('#settings_preset_openai').find(':selected').text();
|
||||
|
@ -4244,6 +4261,11 @@ $(document).ready(async function () {
|
|||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#n_openai').on('input', function () {
|
||||
oai_settings.n = Number($(this).val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#custom_api_url_text').on('input', function () {
|
||||
oai_settings.custom_url = String($(this).val());
|
||||
saveSettingsDebounced();
|
||||
|
|
|
@ -794,6 +794,7 @@ router.post('/generate', jsonParser, function (request, response) {
|
|||
'stop': isTextCompletion === false ? request.body.stop : undefined,
|
||||
'logit_bias': request.body.logit_bias,
|
||||
'seed': request.body.seed,
|
||||
'n': request.body.n,
|
||||
...bodyParams,
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in New Issue