diff --git a/public/script.js b/public/script.js index fdfc4c058..c4719cf22 100644 --- a/public/script.js +++ b/public/script.js @@ -104,6 +104,7 @@ export { nai_settings, token, name1, + name2, is_send_press, api_server_textgenerationwebui, count_view_mes, @@ -1457,7 +1458,7 @@ async function Generate(type, automatic_trigger, force_name2) {//encode("dsfs"). if (main_api == 'openai') { - let prompt = prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, extension_prompt); + let prompt = prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, extension_prompt, promptBias); sendOpenAIRequest(prompt).then(onSuccess).catch(onError); } else { @@ -3277,6 +3278,10 @@ $(document).ready(function () { } }, error: function (jqXHR, exception) { + if (jqXHR.status == 403) { + callPopup(`Character can't be imported due to invalid name. Please choose other name`, 'text'); + } + //alert('ERROR: '+xhr.status+ ' Status Text: '+xhr.statusText+' '+xhr.responseText); $("#create_button").removeAttr("disabled"); }, @@ -3941,6 +3946,9 @@ $(document).ready(function () { } }, error: function (jqXHR, exception) { + if (jqXHR.status == 403) { + callPopup(`Character can't be imported due to invalid name. Please choose other name`, 'text'); + } $("#create_button").removeAttr("disabled"); }, }); diff --git a/public/scripts/openai.js b/public/scripts/openai.js index fa4766130..e7454c2c8 100644 --- a/public/scripts/openai.js +++ b/public/scripts/openai.js @@ -15,6 +15,7 @@ import { setOnlineStatus, token, name1, + name2, } from "../script.js"; import { @@ -184,7 +185,7 @@ function formatWorldInfo(value) { return `[Details of the fictional world the RP set in:\n${value}\n]`; } -function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, extensionPrompt) { +function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, extensionPrompt, bias) { let this_max_context = oai_settings.openai_max_context; let nsfw_toggle_prompt = ""; let enhance_definitions_prompt = ""; @@ -220,6 +221,11 @@ function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAft let start_chat_count = countTokens([new_chat_msg]); let total_count = countTokens([prompt_msg], true) + start_chat_count; + if (bias) { + let bias_msg = { "role": "system", "content": bias }; + openai_msgs.push(bias_msg); + } + // The user wants to always have all example messages in the context if (pin_examples) { // first we send *all* example messages @@ -314,6 +320,7 @@ async function sendOpenAIRequest(openai_msgs_tosend) { }; const generate_url = '/generate_openai'; + // TODO: fix streaming const streaming = oai_settings.stream_openai; const last_view_mes = count_view_mes; @@ -392,7 +399,7 @@ function countTokens(messages, full = false) { jQuery.ajax({ async: false, type: 'POST', // - url: '/tokenize_openai', // + url: `/tokenize_openai?model=${oai_settings.openai_model}`, data: JSON.stringify(messages), dataType: "json", contentType: "application/json", diff --git a/server.js b/server.js index 5553b5c9a..9280a2a80 100644 --- a/server.js +++ b/server.js @@ -557,7 +557,7 @@ app.post("/createcharacter", urlencodedParser, function (request, response) { if (request.body.ch_name !== sanitize(request.body.ch_name)) { console.error('Malicious character name prevented'); - return response.send(400); + return response.sendStatus(403); } console.log('/createcharacter -- looking for -- ' + (charactersPath + request.body.ch_name + '.png')); @@ -636,7 +636,7 @@ app.post("/deletecharacter", urlencodedParser, function (request, response) { if (request.body.avatar_url !== sanitize(request.body.avatar_url)) { console.error('Malicious filename prevented'); - return response.sendStatus(400); + return response.sendStatus(403); } const avatarPath = charactersPath + request.body.avatar_url; @@ -649,7 +649,7 @@ app.post("/deletecharacter", urlencodedParser, function (request, response) { if (dir_name !== sanitize(dir_name)) { console.error('Malicious dirname prevented'); - return response.sendStatus(400); + return response.sendStatus(403); } rimraf(path.join(chatsPath, sanitize(dir_name)), (err) => { @@ -797,7 +797,7 @@ app.post("/delbackground", jsonParser, function (request, response) { if (request.body.bg !== sanitize(request.body.bg)) { console.error('Malicious bg name prevented'); - return response.sendStatus(400); + return response.sendStatus(403); } const fileName = path.join('public/backgrounds/', sanitize(request.body.bg)); @@ -1239,7 +1239,7 @@ app.post("/importcharacter", urlencodedParser, async function (request, response if (jsonData.name !== undefined) { if (jsonData.name !== sanitize(jsonData.name)) { console.error('Malicious character name prevented'); - return response.send(400); + return response.sendStatus(403); } png_name = getPngName(jsonData.name); @@ -1249,7 +1249,7 @@ app.post("/importcharacter", urlencodedParser, async function (request, response } else if (jsonData.char_name !== undefined) {//json Pygmalion notepad if (jsonData.char_name !== sanitize(jsonData.char_name)) { console.error('Malicious character name prevented'); - return response.send(400); + return response.sendStatus(403); } png_name = getPngName(jsonData.char_name); @@ -1269,7 +1269,7 @@ app.post("/importcharacter", urlencodedParser, async function (request, response if (jsonData.name !== sanitize(jsonData.name)) { console.error('Malicious character name prevented'); - return response.send(400); + return response.sendStatus(403); } png_name = getPngName(jsonData.name); @@ -1687,16 +1687,31 @@ app.post("/generate_openai", jsonParser, function(request, response_generate_ope }); }); -const turbo_encoder = tiktoken.get_encoding("cl100k_base"); +const tokenizers = { + 'gpt-3.5-turbo-0301': tiktoken.encoding_for_model('gpt-3.5-turbo-0301'), +}; + +function getTokenizer(model) { + let tokenizer = tokenizers[model]; + + if (!tokenizer) { + tokenizer = tiktoken.encoding_for_model(model); + tokenizers[tokenizer]; + } + + return tokenizer; +} app.post("/tokenize_openai", jsonParser, function(request, response_tokenize_openai = response){ if(!request.body) return response_tokenize_openai.sendStatus(400); + const tokenizer = getTokenizer(request.query.model); + let num_tokens = 0; for (var msg of request.body) { num_tokens += 4; for (const [key, value] of Object.entries(msg)) { - num_tokens += turbo_encoder.encode(value).length; + num_tokens += tokenizer.encode(value).length; if (key == "name") { num_tokens += -1; }