Properly highlight last OpenAI message

This commit is contained in:
SillyLossy
2023-05-02 18:05:24 +03:00
parent a310c03082
commit d159ddf4d1
2 changed files with 17 additions and 11 deletions

View File

@@ -70,7 +70,7 @@ import {
generateOpenAIPromptCache, generateOpenAIPromptCache,
oai_settings, oai_settings,
is_get_status_openai, is_get_status_openai,
openai_msgs, openai_messages_count,
} from "./scripts/openai.js"; } from "./scripts/openai.js";
import { import {
@@ -1724,16 +1724,7 @@ async function Generate(type, automatic_trigger, force_name2) {
arrMes[arrMes.length] = item; arrMes[arrMes.length] = item;
} else { } else {
$("#chat").children().removeClass('lastInContext'); setInContextMessages(arrMes.length, type);
let lastmsg = arrMes.length;
if (type === 'swipe') {
lastmsg++;
}
//console.log(arrMes.length);
//console.log(lastmsg);
$(`#chat .mes:nth-last-child(${lastmsg} of :not([is_system="true"])`).addClass('lastInContext');
break; break;
} }
@@ -2005,6 +1996,7 @@ async function Generate(type, automatic_trigger, force_name2) {
if (main_api == 'openai') { if (main_api == 'openai') {
let prompt = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type); let prompt = await prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldInfoAfter, afterScenarioAnchor, promptBias, type);
setInContextMessages(openai_messages_count, type);
if (isStreamingEnabled()) { if (isStreamingEnabled()) {
streamingProcessor.generator = await sendOpenAIRequest(prompt, streamingProcessor.abortController.signal); streamingProcessor.generator = await sendOpenAIRequest(prompt, streamingProcessor.abortController.signal);
@@ -2174,6 +2166,16 @@ async function Generate(type, automatic_trigger, force_name2) {
//console.log('generate ending'); //console.log('generate ending');
} //generate ends } //generate ends
function setInContextMessages(lastmsg, type) {
$("#chat").children().removeClass('lastInContext');
if (type === 'swipe') {
lastmsg++;
}
$(`#chat .mes:nth-last-child(${lastmsg} of :not([is_system="true"])`).addClass('lastInContext');
}
// TODO: move to textgen-settings.js // TODO: move to textgen-settings.js
function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate) { function getTextGenGenerationData(finalPromt, this_amount_gen, isImpersonate) {
return { return {

View File

@@ -33,6 +33,7 @@ import {
export { export {
is_get_status_openai, is_get_status_openai,
openai_msgs, openai_msgs,
openai_messages_count,
oai_settings, oai_settings,
loadOpenAISettings, loadOpenAISettings,
setOpenAIMessages, setOpenAIMessages,
@@ -45,6 +46,7 @@ export {
let openai_msgs = []; let openai_msgs = [];
let openai_msgs_example = []; let openai_msgs_example = [];
let openai_messages_count = 0;
let is_get_status_openai = false; let is_get_status_openai = false;
let is_api_button_press_openai = false; let is_api_button_press_openai = false;
@@ -414,6 +416,8 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
} }
} }
} }
openai_messages_count = openai_msgs_tosend.filter(x => x.role == "user" || x.role == "assistant").length;
// reverse the messages array because we had the newest at the top to remove the oldest, // reverse the messages array because we had the newest at the top to remove the oldest,
// now we want proper order // now we want proper order
openai_msgs_tosend.reverse(); openai_msgs_tosend.reverse();