From ba925f388c4ce270feb0c258b1b00bb0d3bab195 Mon Sep 17 00:00:00 2001 From: based Date: Tue, 22 Aug 2023 21:29:18 +1000 Subject: [PATCH] added more options to the scale request --- public/index.html | 4 ++-- public/scripts/openai.js | 36 ++++++++++++++++++++++++------------ server.js | 9 +++------ 3 files changed, 29 insertions(+), 20 deletions(-) diff --git a/public/index.html b/public/index.html index d61fb0250..14327cb34 100644 --- a/public/index.html +++ b/public/index.html @@ -710,7 +710,7 @@ -
+
Top P
@@ -1578,7 +1578,7 @@
-
+
Logit Bias
diff --git a/public/scripts/openai.js b/public/scripts/openai.js index 0d64843aa..535660ac8 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -1084,23 +1084,34 @@ function saveModelList(data) { } } -async function sendAltScaleRequest(openai_msgs_tosend, signal) { +async function sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) { const generate_url = '/generate_altscale'; - let firstMsg = substituteParams(openai_msgs_tosend[0].content); - let subsequentMsgs = openai_msgs_tosend.slice(1); + let firstSysMsgs = [] + for(let msg of openai_msgs_tosend){ + if(msg.role === 'system') { + firstSysMsgs.push(substituteParams(msg.content)); + } else { + break; + } + } - const joinedMsgs = subsequentMsgs.reduce((acc, obj) => { + let subsequentMsgs = openai_msgs_tosend.slice(firstSysMsgs.length); + + const joinedSysMsgs = substituteParams(firstSysMsgs.join("\n")); + const joinedSubsequentMsgs = subsequentMsgs.reduce((acc, obj) => { return acc + obj.role + ": " + obj.content + "\n"; }, ""); - openai_msgs_tosend = substituteParams(joinedMsgs); - console.log(openai_msgs_tosend) + + openai_msgs_tosend = substituteParams(joinedSubsequentMsgs); const generate_data = { - sysprompt: firstMsg, + sysprompt: joinedSysMsgs, prompt: openai_msgs_tosend, temp: parseFloat(oai_settings.temp_openai), + top_p: parseFloat(oai_settings.top_p_openai), max_tokens: parseFloat(oai_settings.openai_max_tokens), + logit_bias: logit_bias, } const response = await fetch(generate_url, { @@ -1109,6 +1120,7 @@ async function sendAltScaleRequest(openai_msgs_tosend, signal) { headers: getRequestHeaders(), signal: signal }); + const data = await response.json(); return data.output; } @@ -1143,17 +1155,13 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { openai_msgs_tosend = substituteParams(joinedMsgs); } - if (isScale && !!$('#scale-alt').prop('checked')) { - return sendAltScaleRequest(openai_msgs_tosend, signal) - } - // If we're using the window.ai extension, use that instead // Doesn't support logit bias yet if (oai_settings.chat_completion_source == chat_completion_sources.WINDOWAI) { return sendWindowAIRequest(openai_msgs_tosend, signal, stream); } - const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER]; + const logitBiasSources = [chat_completion_sources.OPENAI, chat_completion_sources.OPENROUTER, chat_completion_sources.SCALE]; if (oai_settings.bias_preset_selected && logitBiasSources.includes(oai_settings.chat_completion_source) && Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected]) @@ -1162,6 +1170,10 @@ async function sendOpenAIRequest(type, openai_msgs_tosend, signal) { biasCache = logit_bias; } + if (isScale && oai_settings.use_alt_scale) { + return sendAltScaleRequest(openai_msgs_tosend, logit_bias, signal) + } + const model = getChatCompletionModel(); const generate_data = { "messages": openai_msgs_tosend, diff --git a/server.js b/server.js index fea7e4a37..f990e049a 100644 --- a/server.js +++ b/server.js @@ -3329,11 +3329,11 @@ app.post("/generate_altscale", jsonParser, function (request, response_generate_ modelType: 'OpenAi', maxTokens: request.body.max_tokens, temperature: request.body.temp, - stop: null, + stop: "user:", suffix: null, - topP: null, + topP: request.body.top_p, logprobs: null, - logitBias: null + logitBias: request.body.logit_bias }, inputs: [ { @@ -3348,11 +3348,8 @@ app.post("/generate_altscale", jsonParser, function (request, response_generate_ values: { 'variant.taxonomy': ['undefined'], 'prompt.variablesSourceDataId': ['undefined'], - 'modelParameters.stop': ['undefined'], 'modelParameters.suffix': ['undefined'], - 'modelParameters.topP': ['undefined'], 'modelParameters.logprobs': ['undefined'], - 'modelParameters.logitBias': ['undefined'] } } })