Move chat renaming logic to server side. Add "quiet" reply generation mode.

This commit is contained in:
SillyLossy
2023-05-03 21:02:23 +03:00
parent bcce613a5d
commit e639666b34
5 changed files with 138 additions and 44 deletions

View File

@@ -1159,6 +1159,25 @@ export async function openGroupChat(groupId, chatId) {
await getGroupChat(groupId);
}
export async function renameGroupChat(groupId, oldChatId, newChatId) {
const group = groups.find(x => x.id === groupId);
if (!group || !group.chats.includes(oldChatId)) {
return;
}
if (group.chat_id === oldChatId) {
group.chat_id = newChatId;
}
group.chats.splice(group.chats.indexOf(oldChatId), 1);
group.chats.push(newChatId);
group.past_metadata[newChatId] = (group.past_metadata[oldChatId] || {});
delete group.past_metadata[oldChatId];
await editGroup(groupId, true, true);
}
export async function deleteGroupChat(groupId, chatId) {
const group = groups.find(x => x.id === groupId);

View File

@@ -144,7 +144,7 @@ function setOpenAIOnlineStatus(value) {
is_get_status_openai = value;
}
function setOpenAIMessages(chat) {
function setOpenAIMessages(chat, quietPrompt) {
let j = 0;
// clean openai msgs
openai_msgs = [];
@@ -176,6 +176,10 @@ function setOpenAIMessages(chat) {
openai_msgs.splice(i, 0, { "role": 'system', 'content': anchor.trim() })
}
}
if (quietPrompt) {
openai_msgs.splice(0, 0, { role: 'system', content: quietPrompt });
}
}
function setOpenAIMessageExamples(mesExamplesArray) {
@@ -481,7 +485,7 @@ function checkQuotaError(data) {
}
}
async function sendOpenAIRequest(openai_msgs_tosend, signal) {
async function sendOpenAIRequest(type, openai_msgs_tosend, signal) {
// Provide default abort signal
if (!signal) {
signal = new AbortController().signal;
@@ -492,6 +496,7 @@ async function sendOpenAIRequest(openai_msgs_tosend, signal) {
}
let logit_bias = {};
const stream = type !== 'quiet' && oai_settings.stream_openai;
if (oai_settings.bias_preset_selected
&& Array.isArray(oai_settings.bias_presets[oai_settings.bias_preset_selected])
@@ -507,7 +512,7 @@ async function sendOpenAIRequest(openai_msgs_tosend, signal) {
"frequency_penalty": parseFloat(oai_settings.freq_pen_openai),
"presence_penalty": parseFloat(oai_settings.pres_pen_openai),
"max_tokens": oai_settings.openai_max_tokens,
"stream": oai_settings.stream_openai,
"stream": stream,
"reverse_proxy": oai_settings.reverse_proxy,
"logit_bias": logit_bias,
};
@@ -520,7 +525,7 @@ async function sendOpenAIRequest(openai_msgs_tosend, signal) {
signal: signal,
});
if (oai_settings.stream_openai) {
if (stream) {
return async function* streamData() {
const decoder = new TextDecoder();
const reader = response.body.getReader();

View File

@@ -115,7 +115,8 @@ async function generatePoe(type, finalPrompt, signal) {
console.log('Could not jailbreak the bot');
}
const isImpersonate = type == 'impersonate';
const isImpersonate = type === 'impersonate';
const isQuiet = type === 'quiet';
if (poe_settings.character_nudge && !isImpersonate) {
let characterNudge = '\n' + substituteParams(poe_settings.character_nudge_message);
@@ -136,7 +137,7 @@ async function generatePoe(type, finalPrompt, signal) {
finalPrompt = sentences.join('');
}
const reply = await sendMessage(finalPrompt, true, signal);
const reply = await sendMessage(finalPrompt, !isQuiet, signal);
got_reply = true;
return reply;
}