mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Compare commits
6 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
77f76ae315 | ||
|
c3af38f5b8 | ||
|
3cf23f277c | ||
|
a74828df15 | ||
|
bf7f04e3b2 | ||
|
5df7d2d1dc |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "sillytavern",
|
||||
"version": "1.6.2",
|
||||
"version": "1.6.5",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "sillytavern",
|
||||
"version": "1.6.2",
|
||||
"version": "1.6.5",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@dqbd/tiktoken": "^1.0.2",
|
||||
|
@@ -46,7 +46,7 @@
|
||||
"type": "git",
|
||||
"url": "https://github.com/Cohee1207/SillyTavern.git"
|
||||
},
|
||||
"version": "1.6.2",
|
||||
"version": "1.6.5",
|
||||
"scripts": {
|
||||
"start": "node server.js",
|
||||
"pkg": "pkg --compress Gzip --no-bytecode --public ."
|
||||
|
@@ -1764,8 +1764,9 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
||||
abortController = new AbortController();
|
||||
}
|
||||
|
||||
// OpenAI doesn't need instruct mode. Use OAI main prompt instead.
|
||||
const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
|
||||
const isImpersonate = type == "impersonate";
|
||||
const isInstruct = power_user.instruct.enabled;
|
||||
|
||||
message_already_generated = isImpersonate ? `${name1}: ` : `${name2}: `;
|
||||
// Name for the multigen prefix
|
||||
@@ -2031,7 +2032,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
||||
let mesSend = [];
|
||||
console.log('calling runGenerate');
|
||||
streamingProcessor = isStreamingEnabled() ? new StreamingProcessor(type, force_name2) : false;
|
||||
await runGenerate();
|
||||
runGenerate();
|
||||
|
||||
async function runGenerate(cycleGenerationPromt = '') {
|
||||
is_send_press = true;
|
||||
@@ -2385,7 +2386,7 @@ async function Generate(type, { automatic_trigger, force_name2, resolve, reject,
|
||||
$('#send_textarea').val(extract.getMessage).trigger('input');
|
||||
}
|
||||
|
||||
if (shouldContinueMultigen(getMessage, isImpersonate)) {
|
||||
if (shouldContinueMultigen(getMessage, isImpersonate, isInstruct)) {
|
||||
hideSwipeButtons();
|
||||
tokens_already_generated += this_amount_gen; // add new gen amt to any prev gen counter..
|
||||
getMessage = message_already_generated;
|
||||
@@ -3024,8 +3025,8 @@ function getGenerateUrl() {
|
||||
return generate_url;
|
||||
}
|
||||
|
||||
function shouldContinueMultigen(getMessage, isImpersonate) {
|
||||
if (power_user.instruct.enabled && power_user.instruct.stop_sequence) {
|
||||
function shouldContinueMultigen(getMessage, isImpersonate, isInstruct) {
|
||||
if (isInstruct && power_user.instruct.stop_sequence) {
|
||||
if (message_already_generated.indexOf(power_user.instruct.stop_sequence) !== -1) {
|
||||
return false;
|
||||
}
|
||||
@@ -3151,17 +3152,17 @@ function cleanUpMessage(getMessage, isImpersonate, displayIncompleteSentences =
|
||||
}
|
||||
if (getMessage.indexOf('<|endoftext|>') != -1) {
|
||||
getMessage = getMessage.substr(0, getMessage.indexOf('<|endoftext|>'));
|
||||
|
||||
}
|
||||
if (power_user.instruct.enabled && power_user.instruct.stop_sequence) {
|
||||
const isInstruct = power_user.instruct.enabled && main_api !== 'openai';
|
||||
if (isInstruct && power_user.instruct.stop_sequence) {
|
||||
if (getMessage.indexOf(power_user.instruct.stop_sequence) != -1) {
|
||||
getMessage = getMessage.substring(0, getMessage.indexOf(power_user.instruct.stop_sequence));
|
||||
}
|
||||
}
|
||||
if (power_user.instruct.enabled && power_user.instruct.input_sequence && isImpersonate) {
|
||||
if (isInstruct && power_user.instruct.input_sequence && isImpersonate) {
|
||||
getMessage = getMessage.replaceAll(power_user.instruct.input_sequence, '');
|
||||
}
|
||||
if (power_user.instruct.enabled && power_user.instruct.output_sequence && !isImpersonate) {
|
||||
if (isInstruct && power_user.instruct.output_sequence && !isImpersonate) {
|
||||
getMessage = getMessage.replaceAll(power_user.instruct.output_sequence, '');
|
||||
}
|
||||
// clean-up group message from excessive generations
|
||||
|
@@ -258,7 +258,7 @@ async function convertSoloToGroupChat() {
|
||||
}
|
||||
|
||||
// Skip messages we don't care about
|
||||
if (message.is_user || message.is_system) {
|
||||
if (message.is_user || message.is_system || message.extra?.type === system_message_types.NARRATOR || message.force_avatar !== undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@@ -512,7 +512,7 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
||||
setCharacterId(chId);
|
||||
setCharacterName(characters[chId].name)
|
||||
|
||||
Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
|
||||
await Generate(generateType, { automatic_trigger: by_auto_mode, ...(params || {}) });
|
||||
|
||||
if (type !== "swipe" && type !== "impersonate" && !isMultigenEnabled() && !isStreamingEnabled()) {
|
||||
// update indicator and scroll down
|
||||
@@ -602,6 +602,14 @@ async function generateGroupWrapper(by_auto_mode, type = null, params = {}) {
|
||||
await delay(100);
|
||||
}
|
||||
}
|
||||
else if (isStreamingEnabled()) {
|
||||
if (streamingProcessor && !streamingProcessor.isFinished) {
|
||||
await delay(100);
|
||||
} else {
|
||||
messagesBefore++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
messagesBefore++;
|
||||
break;
|
||||
|
@@ -56,6 +56,7 @@ export {
|
||||
let openai_msgs = [];
|
||||
let openai_msgs_example = [];
|
||||
let openai_messages_count = 0;
|
||||
let openai_narrator_messages_count = 0;
|
||||
|
||||
let is_get_status_openai = false;
|
||||
let is_api_button_press_openai = false;
|
||||
@@ -175,6 +176,7 @@ function setOpenAIMessages(chat) {
|
||||
let j = 0;
|
||||
// clean openai msgs
|
||||
openai_msgs = [];
|
||||
openai_narrator_messages_count = 0;
|
||||
for (let i = chat.length - 1; i >= 0; i--) {
|
||||
let role = chat[j]['is_user'] ? 'user' : 'assistant';
|
||||
let content = chat[j]['mes'];
|
||||
@@ -182,6 +184,7 @@ function setOpenAIMessages(chat) {
|
||||
// 100% legal way to send a message as system
|
||||
if (chat[j].extra?.type === system_message_types.NARRATOR) {
|
||||
role = 'system';
|
||||
openai_narrator_messages_count++;
|
||||
}
|
||||
|
||||
// for groups or sendas command - prepend a character's name
|
||||
@@ -457,7 +460,7 @@ async function prepareOpenAIMessages(name2, storyString, worldInfoBefore, worldI
|
||||
}
|
||||
}
|
||||
|
||||
openai_messages_count = openai_msgs_tosend.filter(x => x.role == "user" || x.role == "assistant").length;
|
||||
openai_messages_count = openai_msgs_tosend.filter(x => x.role == "user" || x.role == "assistant").length + openai_narrator_messages_count;
|
||||
// reverse the messages array because we had the newest at the top to remove the oldest,
|
||||
// now we want proper order
|
||||
openai_msgs_tosend.reverse();
|
||||
|
Reference in New Issue
Block a user