mirror of
				https://github.com/SillyTavern/SillyTavern.git
				synced 2025-06-05 21:59:27 +02:00 
			
		
		
		
	Merge branch 'main' into dev
This commit is contained in:
		| @@ -370,6 +370,17 @@ | ||||
|                                         <input id="openai_reverse_proxy" type="text" class="text_pole" placeholder="https://api.openai.com/v1" maxlength="100" /> | ||||
|                                     </div> | ||||
|                                 </div> | ||||
|                                 <div class="range-block"> | ||||
|                                     <div class="range-block-title justifyLeft"> | ||||
|                                         <label for="legacy_streaming" class="checkbox_label"> | ||||
|                                             <input id="legacy_streaming" type="checkbox" /> | ||||
|                                             Legacy Streaming Processing | ||||
|                                         </label> | ||||
|                                     </div> | ||||
|                                     <div class="toggle-description justifyLeft"> | ||||
|                                         Enable this if the streaming doesn't work with your proxy. | ||||
|                                     </div> | ||||
|                                 </div> | ||||
|                                 <div class="range-block"> | ||||
|                                     <div class="range-block-title"> | ||||
|                                         Context Size (tokens) | ||||
|   | ||||
| @@ -103,6 +103,7 @@ const default_settings = { | ||||
|     openai_model: 'gpt-3.5-turbo', | ||||
|     jailbreak_system: false, | ||||
|     reverse_proxy: '', | ||||
|     legacy_streaming: false, | ||||
| }; | ||||
|  | ||||
| const oai_settings = { | ||||
| @@ -127,6 +128,7 @@ const oai_settings = { | ||||
|     openai_model: 'gpt-3.5-turbo', | ||||
|     jailbreak_system: false, | ||||
|     reverse_proxy: '', | ||||
|     legacy_streaming: false, | ||||
| }; | ||||
|  | ||||
| let openai_setting_names; | ||||
| @@ -567,12 +569,18 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { | ||||
|  | ||||
|                 tryParseStreamingError(response); | ||||
|  | ||||
|                 let eventList = []; | ||||
|  | ||||
|                 // ReadableStream's buffer is not guaranteed to contain full SSE messages as they arrive in chunks | ||||
|                 // We need to buffer chunks until we have one or more full messages (separated by double newlines) | ||||
|                 if (!oai_settings.legacy_streaming) { | ||||
|                     messageBuffer += response; | ||||
|                 let eventList = messageBuffer.split("\n\n"); | ||||
|                     eventList = messageBuffer.split("\n\n"); | ||||
|                     // Last element will be an empty string or a leftover partial message | ||||
|                     messageBuffer = eventList.pop(); | ||||
|                 } else { | ||||
|                     eventList = response.split("\n"); | ||||
|                 } | ||||
|  | ||||
|                 for (let event of eventList) { | ||||
|                     if (!event.startsWith("data")) | ||||
| @@ -741,6 +749,7 @@ function loadOpenAISettings(data, settings) { | ||||
|     oai_settings.openai_max_tokens = settings.openai_max_tokens ?? default_settings.openai_max_tokens; | ||||
|     oai_settings.bias_preset_selected = settings.bias_preset_selected ?? default_settings.bias_preset_selected; | ||||
|     oai_settings.bias_presets = settings.bias_presets ?? default_settings.bias_presets; | ||||
|     oai_settings.legacy_streaming = settings.legacy_streaming ?? default_settings.legacy_streaming; | ||||
|  | ||||
|     if (settings.nsfw_toggle !== undefined) oai_settings.nsfw_toggle = !!settings.nsfw_toggle; | ||||
|     if (settings.keep_example_dialogue !== undefined) oai_settings.keep_example_dialogue = !!settings.keep_example_dialogue; | ||||
| @@ -764,6 +773,7 @@ function loadOpenAISettings(data, settings) { | ||||
|     $('#wrap_in_quotes').prop('checked', oai_settings.wrap_in_quotes); | ||||
|     $('#nsfw_first').prop('checked', oai_settings.nsfw_first); | ||||
|     $('#jailbreak_system').prop('checked', oai_settings.jailbreak_system); | ||||
|     $('#legacy_streaming').prop('checked', oai_settings.legacy_streaming); | ||||
|  | ||||
|     if (settings.main_prompt !== undefined) oai_settings.main_prompt = settings.main_prompt; | ||||
|     if (settings.nsfw_prompt !== undefined) oai_settings.nsfw_prompt = settings.nsfw_prompt; | ||||
| @@ -884,6 +894,7 @@ async function saveOpenAIPreset(name, settings) { | ||||
|         impersonation_prompt: settings.impersonation_prompt, | ||||
|         bias_preset_selected: settings.bias_preset_selected, | ||||
|         reverse_proxy: settings.reverse_proxy, | ||||
|         legacy_streaming: settings.legacy_streaming, | ||||
|     }; | ||||
|  | ||||
|     const savePresetSettings = await fetch(`/savepreset_openai?name=${name}`, { | ||||
| @@ -1148,6 +1159,7 @@ function onSettingsPresetChange() { | ||||
|         impersonation_prompt: ['#impersonation_prompt_textarea', 'impersonation_prompt', false], | ||||
|         bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false], | ||||
|         reverse_proxy: ['#openai_reverse_proxy', 'reverse_proxy', false], | ||||
|         legacy_streaming: ['#legacy_streaming', 'legacy_streaming', false], | ||||
|     }; | ||||
|  | ||||
|     for (const [key, [selector, setting, isCheckbox]] of Object.entries(settingsToUpdate)) { | ||||
| @@ -1368,6 +1380,11 @@ $(document).ready(function () { | ||||
|         saveSettingsDebounced(); | ||||
|     }); | ||||
|  | ||||
|     $('#legacy_streaming').on('input', function () { | ||||
|         oai_settings.legacy_streaming = !!$(this).prop('checked'); | ||||
|         saveSettingsDebounced(); | ||||
|     }); | ||||
|  | ||||
|     $("#api_button_openai").on("click", onConnectButtonClick); | ||||
|     $("#openai_reverse_proxy").on("input", onReverseProxyInput); | ||||
|     $("#model_openai_select").on("change", onModelChange); | ||||
|   | ||||
		Reference in New Issue
	
	Block a user