From f633f620655a66a5003250d6af01493cea1477f8 Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Wed, 23 Aug 2023 03:36:04 +0300 Subject: [PATCH 01/13] Don't save null values to OpenAI logit bias --- public/scripts/openai.js | 103 ++++++++++++++++++++++----------------- 1 file changed, 57 insertions(+), 46 deletions(-) diff --git a/public/scripts/openai.js b/public/scripts/openai.js index aef41beb8..1b62a8336 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -165,6 +165,7 @@ const default_settings = { new_group_chat_prompt: default_new_group_chat_prompt, new_example_chat_prompt: default_new_example_chat_prompt, continue_nudge_prompt: default_continue_nudge_prompt, + nsfw_avoidance_prompt: default_nsfw_avoidance_prompt, bias_preset_selected: default_bias, bias_presets: default_bias_presets, wi_format: default_wi_format, @@ -228,6 +229,7 @@ const oai_settings = { use_ai21_tokenizer: false, exclude_assistant: false, use_alt_scale: false, + nsfw_avoidance_prompt: default_nsfw_avoidance_prompt, }; let openai_setting_names; @@ -772,6 +774,7 @@ function preparePromptsForChatCompletion(Scenario, charPersonality, name2, world * @param {string} content.bias - The bias to be added in the conversation. * @param {string} content.type - The type of the chat, can be 'impersonate'. * @param {string} content.quietPrompt - The quiet prompt to be used in the conversation. + * @param {string} content.cyclePrompt - The last prompt used for chat message continuation. * @param {Array} content.extensionPrompts - An array of additional prompts. * @param dryRun - Whether this is a live call or not. * @returns {(*[]|boolean)[]} An array where the first element is the prepared chat and the second element is a boolean flag. @@ -882,7 +885,7 @@ async function sendWindowAIRequest(openai_msgs_tosend, signal, stream) { let finished = false; const currentModel = await window.ai.getCurrentModel(); - let temperature = parseFloat(oai_settings.temp_openai); + let temperature = Number(oai_settings.temp_openai); if ((currentModel.includes('claude') || currentModel.includes('palm-2')) && temperature > claude_max_temp) { console.warn(`Claude and PaLM models only supports temperature up to ${claude_max_temp}. Clamping ${temperature} to ${claude_max_temp}.`); @@ -1014,7 +1017,7 @@ function saveModelList(data) { $('#model_openrouter_select').empty(); $('#model_openrouter_select').append($('