mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Added switch to unbrick streaming on some unsupported proxies
This commit is contained in:
@ -366,6 +366,17 @@
|
|||||||
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="100" />
|
<input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="100" />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="range-block">
|
||||||
|
<div class="range-block-title justifyLeft">
|
||||||
|
<label for="legacy_streaming" class="checkbox_label">
|
||||||
|
<input id="legacy_streaming" type="checkbox" />
|
||||||
|
Legacy Streaming Processing
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<div class="toggle-description justifyLeft">
|
||||||
|
Enable this if the streaming doesn't work with your proxy.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="range-block">
|
<div class="range-block">
|
||||||
<div class="range-block-title">
|
<div class="range-block-title">
|
||||||
Context Size (tokens)
|
Context Size (tokens)
|
||||||
|
@ -101,6 +101,7 @@ const default_settings = {
|
|||||||
openai_model: 'gpt-3.5-turbo',
|
openai_model: 'gpt-3.5-turbo',
|
||||||
jailbreak_system: false,
|
jailbreak_system: false,
|
||||||
reverse_proxy: '',
|
reverse_proxy: '',
|
||||||
|
legacy_streaming: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
const oai_settings = {
|
const oai_settings = {
|
||||||
@ -125,6 +126,7 @@ const oai_settings = {
|
|||||||
openai_model: 'gpt-3.5-turbo',
|
openai_model: 'gpt-3.5-turbo',
|
||||||
jailbreak_system: false,
|
jailbreak_system: false,
|
||||||
reverse_proxy: '',
|
reverse_proxy: '',
|
||||||
|
legacy_streaming: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let openai_setting_names;
|
let openai_setting_names;
|
||||||
@ -562,12 +564,18 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
|
|||||||
|
|
||||||
tryParseStreamingError(response);
|
tryParseStreamingError(response);
|
||||||
|
|
||||||
|
let eventList = [];
|
||||||
|
|
||||||
// ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks
|
// ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks
|
||||||
// We need to buffer chunks until we have one or more full messages (separated by double newlines)
|
// We need to buffer chunks until we have one or more full messages (separated by double newlines)
|
||||||
messageBuffer += response;
|
if (!oai_settings.legacy_streaming) {
|
||||||
let eventList = messageBuffer.split("\n\n");
|
messageBuffer += response;
|
||||||
// Last element will be an empty string or a leftover partial message
|
eventList = messageBuffer.split("\n\n");
|
||||||
messageBuffer = eventList.pop();
|
// Last element will be an empty string or a leftover partial message
|
||||||
|
messageBuffer = eventList.pop();
|
||||||
|
} else {
|
||||||
|
eventList = response.split("\n");
|
||||||
|
}
|
||||||
|
|
||||||
for (let event of eventList) {
|
for (let event of eventList) {
|
||||||
if (!event.startsWith("data"))
|
if (!event.startsWith("data"))
|
||||||
@ -703,6 +711,7 @@ function loadOpenAISettings(data, settings) {
|
|||||||
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
|
oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens;
|
||||||
oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected;
|
oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected;
|
||||||
oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets;
|
oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets;
|
||||||
|
oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming;
|
||||||
|
|
||||||
if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle;
|
if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle;
|
||||||
if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue;
|
if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue;
|
||||||
@ -726,6 +735,7 @@ function loadOpenAISettings(data, settings) {
|
|||||||
$('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes);
|
$('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes);
|
||||||
$('#nsfw_first').prop('checked', oai_settings.nsfw_first);
|
$('#nsfw_first').prop('checked', oai_settings.nsfw_first);
|
||||||
$('#jailbreak_system').prop('checked', oai_settings.jailbreak_system);
|
$('#jailbreak_system').prop('checked', oai_settings.jailbreak_system);
|
||||||
|
$('#legacy_streaming').prop('checked', oai_settings.legacy_streaming);
|
||||||
|
|
||||||
if (settings.main_prompt !== undefined) oai_settings.main_prompt = settings.main_prompt;
|
if (settings.main_prompt !== undefined) oai_settings.main_prompt = settings.main_prompt;
|
||||||
if (settings.nsfw_prompt !== undefined) oai_settings.nsfw_prompt = settings.nsfw_prompt;
|
if (settings.nsfw_prompt !== undefined) oai_settings.nsfw_prompt = settings.nsfw_prompt;
|
||||||
@ -845,6 +855,7 @@ async function saveOpenAIPreset(name, settings) {
|
|||||||
jailbreak_system: settings.jailbreak_system,
|
jailbreak_system: settings.jailbreak_system,
|
||||||
impersonation_prompt: settings.impersonation_prompt,
|
impersonation_prompt: settings.impersonation_prompt,
|
||||||
bias_preset_selected: settings.bias_preset_selected,
|
bias_preset_selected: settings.bias_preset_selected,
|
||||||
|
legacy_streaming: settings.legacy_streaming,
|
||||||
};
|
};
|
||||||
|
|
||||||
const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, {
|
const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, {
|
||||||
@ -1108,6 +1119,7 @@ function onSettingsPresetChange() {
|
|||||||
jailbreak_prompt: ['#jailbreak_prompt_textarea', 'jailbreak_prompt', false],
|
jailbreak_prompt: ['#jailbreak_prompt_textarea', 'jailbreak_prompt', false],
|
||||||
impersonation_prompt: ['#impersonation_prompt_textarea', 'impersonation_prompt', false],
|
impersonation_prompt: ['#impersonation_prompt_textarea', 'impersonation_prompt', false],
|
||||||
bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false],
|
bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false],
|
||||||
|
legacy_streaming: ['#legacy_streaming', 'legacy_streaming', false],
|
||||||
};
|
};
|
||||||
|
|
||||||
for (const [key, [selector, setting, isCheckbox]] of Object.entries(settingsToUpdate)) {
|
for (const [key, [selector, setting, isCheckbox]] of Object.entries(settingsToUpdate)) {
|
||||||
@ -1328,6 +1340,11 @@ $(document).ready(function () {
|
|||||||
saveSettingsDebounced();
|
saveSettingsDebounced();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
$('#legacy_streaming').on('input', function () {
|
||||||
|
oai_settings.legacy_streaming = !!$(this).prop('checked');
|
||||||
|
saveSettingsDebounced();
|
||||||
|
});
|
||||||
|
|
||||||
$("#api_button_openai").on('click', onConnectButtonClick);
|
$("#api_button_openai").on('click', onConnectButtonClick);
|
||||||
$("#openai_reverse_proxy").on('input', onReverseProxyInput);
|
$("#openai_reverse_proxy").on('input', onReverseProxyInput);
|
||||||
$("#model_openai_select").on('change', onModelChange);
|
$("#model_openai_select").on('change', onModelChange);
|
||||||
|
Reference in New Issue
Block a user