From f0c7c96d3c25707ab8463f0694672361e9acb2ae Mon Sep 17 00:00:00 2001 From: SillyLossy Date: Sun, 21 May 2023 01:36:35 +0300 Subject: [PATCH] Added switch to unbrick streaming on some unsupported proxies --- public/index.html | 13 ++++++++++++- public/scripts/openai.js | 37 +++++++++++++++++++++++++++---------- 2 files changed, 39 insertions(+), 11 deletions(-) diff --git a/public/index.html b/public/index.html index fe3b2d347..df8c33efc 100644 --- a/public/index.html +++ b/public/index.html @@ -366,6 +366,17 @@ +
+
+ +
+
+ Enable this if the streaming doesn't work with your proxy. +
+
Context Size (tokens) @@ -2489,4 +2500,4 @@
- \ No newline at end of file + diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 82a0c7849..ef5f41cd6 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -101,6 +101,7 @@ const default_settings = { openai_model: 'gpt-3.5-turbo', jailbreak_system: false, reverse_proxy: '', + legacy_streaming: false, }; const oai_settings = { @@ -125,6 +126,7 @@ const oai_settings = { openai_model: 'gpt-3.5-turbo', jailbreak_system: false, reverse_proxy: '', + legacy_streaming: false, }; let openai_setting_names; @@ -561,13 +563,19 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { let response = decoder.decode(value); tryParseStreamingError(response); - + + let eventList = []; + // ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks // We need to buffer chunks until we have one or more full messages (separated by double newlines) - messageBuffer += response; - let eventList = messageBuffer.split("\n\n"); - // Last element will be an empty string or a leftover partial message - messageBuffer = eventList.pop(); + if (!oai_settings.legacy_streaming) { + messageBuffer += response; + eventList = messageBuffer.split("\n\n"); + // Last element will be an empty string or a leftover partial message + messageBuffer = eventList.pop(); + } else { + eventList = response.split("\n"); + } for (let event of eventList) { if (!event.startsWith("data")) @@ -624,7 +632,7 @@ async function calculateLogitBias() { function countTokens(messages, full = false) { let chatId = 'undefined'; - + try { if (selected_group) { chatId = groups.find(x => x.id == selected_group)?.chat_id; @@ -656,7 +664,7 @@ function countTokens(messages, full = false) { else { jQuery.ajax({ async: false, - type: 'POST', // + type: 'POST', // url: `/tokenize_openai?model=${oai_settings.openai_model}`, data: JSON.stringify([message]), dataType: "json", @@ -703,6 +711,7 @@ function loadOpenAISettings(data, settings) { oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens; oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected; oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets; + oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming; if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle; if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue; @@ -726,6 +735,7 @@ function loadOpenAISettings(data, settings) { $('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes); $('#nsfw_first').prop('checked', oai_settings.nsfw_first); $('#jailbreak_system').prop('checked', oai_settings.jailbreak_system); + $('#legacy_streaming').prop('checked', oai_settings.legacy_streaming); if (settings.main_prompt !== undefined) oai_settings.main_prompt = settings.main_prompt; if (settings.nsfw_prompt !== undefined) oai_settings.nsfw_prompt = settings.nsfw_prompt; @@ -774,8 +784,8 @@ async function getStatusOpen() { }; return jQuery.ajax({ - type: 'POST', // - url: '/getstatus_openai', // + type: 'POST', // + url: '/getstatus_openai', // data: JSON.stringify(data), beforeSend: function () { if (oai_settings.reverse_proxy) { @@ -845,6 +855,7 @@ async function saveOpenAIPreset(name, settings) { jailbreak_system: settings.jailbreak_system, impersonation_prompt: settings.impersonation_prompt, bias_preset_selected: settings.bias_preset_selected, + legacy_streaming: settings.legacy_streaming, }; const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, { @@ -1108,6 +1119,7 @@ function onSettingsPresetChange() { jailbreak_prompt: ['#jailbreak_prompt_textarea', 'jailbreak_prompt', false], impersonation_prompt: ['#impersonation_prompt_textarea', 'impersonation_prompt', false], bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false], + legacy_streaming: ['#legacy_streaming', 'legacy_streaming', false], }; for (const [key, [selector, setting, isCheckbox]] of Object.entries(settingsToUpdate)) { @@ -1169,7 +1181,7 @@ function onReverseProxyInput() { async function onConnectButtonClick(e) { e.stopPropagation(); const api_key_openai = $('#api_key_openai').val().trim(); - + if (api_key_openai.length) { await writeSecret(SECRET_KEYS.OPENAI, api_key_openai); } @@ -1328,6 +1340,11 @@ $(document).ready(function () { saveSettingsDebounced(); }); + $('#legacy_streaming').on('input', function () { + oai_settings.legacy_streaming = !!$(this).prop('checked'); + saveSettingsDebounced(); + }); + $("#api_button_openai").on('click', onConnectButtonClick); $("#openai_reverse_proxy").on('input', onReverseProxyInput); $("#model_openai_select").on('change', onModelChange);