Fix system messages using incorrect timezone

This commit is contained in:
Cohee
2023-08-22 18:13:03 +03:00
parent 7f4a47c1b0
commit ea4d4a8fd6
5 changed files with 29 additions and 27 deletions

View File

@ -1910,7 +1910,7 @@ function sendSystemMessage(type, text, extra = {}) {
return; return;
} }
const newMessage = { ...systemMessage, send_date: humanizedDateTime() }; const newMessage = { ...systemMessage, send_date: getMessageTimeStamp() };
if (text) { if (text) {
newMessage.mes = text; newMessage.mes = text;

View File

@ -1,6 +1,7 @@
import { getBase64Async } from "../../utils.js"; import { getBase64Async } from "../../utils.js";
import { getContext, getApiUrl, doExtrasFetch, extension_settings } from "../../extensions.js"; import { getContext, getApiUrl, doExtrasFetch, extension_settings } from "../../extensions.js";
import { callPopup, saveSettingsDebounced } from "../../../script.js"; import { callPopup, saveSettingsDebounced } from "../../../script.js";
import { getMessageTimeStamp } from "../../RossAscends-mods.js";
export { MODULE_NAME }; export { MODULE_NAME };
const MODULE_NAME = 'caption'; const MODULE_NAME = 'caption';
@ -52,7 +53,7 @@ async function sendCaptionedMessage(caption, image) {
name: context.name1, name: context.name1,
is_user: true, is_user: true,
is_name: true, is_name: true,
send_date: Date.now(), send_date: getMessageTimeStamp(),
mes: messageText, mes: messageText,
extra: { extra: {
image: image, image: image,

View File

@ -4,11 +4,12 @@ TODO:
*/ */
import { saveSettingsDebounced } from "../../../script.js"; import { saveSettingsDebounced } from "../../../script.js";
import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper, doExtrasFetch } from "../../extensions.js"; import { getContext, extension_settings, ModuleWorkerWrapper } from "../../extensions.js";
import { VoskSttProvider } from './vosk.js' import { VoskSttProvider } from './vosk.js'
import { WhisperSttProvider } from './whisper.js' import { WhisperSttProvider } from './whisper.js'
import { BrowserSttProvider } from './browser.js' import { BrowserSttProvider } from './browser.js'
import { StreamingSttProvider } from './streaming.js' import { StreamingSttProvider } from './streaming.js'
import { getMessageTimeStamp } from "../../RossAscends-mods.js";
export { MODULE_NAME }; export { MODULE_NAME };
const MODULE_NAME = 'Speech Recognition'; const MODULE_NAME = 'Speech Recognition';
@ -61,10 +62,10 @@ async function moduleWorker() {
let messageStart = -1; let messageStart = -1;
if (extension_settings.speech_recognition.Streaming.triggerWordsEnabled) { if (extension_settings.speech_recognition.Streaming.triggerWordsEnabled) {
for (const triggerWord of extension_settings.speech_recognition.Streaming.triggerWords) { for (const triggerWord of extension_settings.speech_recognition.Streaming.triggerWords) {
const triggerPos = userMessageRaw.indexOf(triggerWord.toLowerCase()); const triggerPos = userMessageRaw.indexOf(triggerWord.toLowerCase());
// Trigger word not found or not starting message and just a substring // Trigger word not found or not starting message and just a substring
if (triggerPos == -1){ // | (triggerPos > 0 & userMessageFormatted[triggerPos-1] != " ")) { if (triggerPos == -1){ // | (triggerPos > 0 & userMessageFormatted[triggerPos-1] != " ")) {
console.debug(DEBUG_PREFIX+"trigger word not found: ", triggerWord); console.debug(DEBUG_PREFIX+"trigger word not found: ", triggerWord);
@ -152,12 +153,12 @@ async function processTranscript(transcript) {
name: context.name1, name: context.name1,
is_user: true, is_user: true,
is_name: true, is_name: true,
send_date: Date.now(), send_date: getMessageTimeStamp(),
mes: messageText, mes: messageText,
}; };
context.chat.push(message); context.chat.push(message);
context.addOneMessage(message); context.addOneMessage(message);
await context.generate(); await context.generate();
$('#debug_output').text("<SST-module DEBUG>: message sent: \""+ transcriptFormatted +"\""); $('#debug_output').text("<SST-module DEBUG>: message sent: \""+ transcriptFormatted +"\"");
@ -191,10 +192,10 @@ async function processTranscript(transcript) {
function loadNavigatorAudioRecording() { function loadNavigatorAudioRecording() {
if (navigator.mediaDevices.getUserMedia) { if (navigator.mediaDevices.getUserMedia) {
console.debug(DEBUG_PREFIX+' getUserMedia supported by browser.'); console.debug(DEBUG_PREFIX+' getUserMedia supported by browser.');
let onSuccess = function(stream) { let onSuccess = function(stream) {
const mediaRecorder = new MediaRecorder(stream); const mediaRecorder = new MediaRecorder(stream);
$("#microphone_button").off('click').on("click", function() { $("#microphone_button").off('click').on("click", function() {
if (!audioRecording) { if (!audioRecording) {
mediaRecorder.start(); mediaRecorder.start();
@ -211,30 +212,30 @@ function loadNavigatorAudioRecording() {
$("#microphone_button").toggleClass('fa-microphone fa-microphone-slash'); $("#microphone_button").toggleClass('fa-microphone fa-microphone-slash');
} }
}); });
mediaRecorder.onstop = async function() { mediaRecorder.onstop = async function() {
console.debug(DEBUG_PREFIX+"data available after MediaRecorder.stop() called: ", audioChunks.length, " chunks"); console.debug(DEBUG_PREFIX+"data available after MediaRecorder.stop() called: ", audioChunks.length, " chunks");
const audioBlob = new Blob(audioChunks, { type: "audio/wav; codecs=0" }); const audioBlob = new Blob(audioChunks, { type: "audio/wav; codecs=0" });
audioChunks = []; audioChunks = [];
const transcript = await sttProvider.processAudio(audioBlob); const transcript = await sttProvider.processAudio(audioBlob);
// TODO: lock and release recording while processing? // TODO: lock and release recording while processing?
console.debug(DEBUG_PREFIX+"received transcript:", transcript); console.debug(DEBUG_PREFIX+"received transcript:", transcript);
processTranscript(transcript); processTranscript(transcript);
} }
mediaRecorder.ondataavailable = function(e) { mediaRecorder.ondataavailable = function(e) {
audioChunks.push(e.data); audioChunks.push(e.data);
} }
} }
let onError = function(err) { let onError = function(err) {
console.debug(DEBUG_PREFIX+"The following error occured: " + err); console.debug(DEBUG_PREFIX+"The following error occured: " + err);
} }
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError); navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else { } else {
console.debug(DEBUG_PREFIX+"getUserMedia not supported on your browser!"); console.debug(DEBUG_PREFIX+"getUserMedia not supported on your browser!");
toastr.error("getUserMedia not supported", DEBUG_PREFIX+"not supported for your browser.", { timeOut: 10000, extendedTimeOut: 20000, preventDuplicates: true }); toastr.error("getUserMedia not supported", DEBUG_PREFIX+"not supported for your browser.", { timeOut: 10000, extendedTimeOut: 20000, preventDuplicates: true });
@ -257,7 +258,7 @@ function loadSttProvider(provider) {
console.warn(`Provider ${sttProviderName} not in Extension Settings, initiatilizing provider in settings`); console.warn(`Provider ${sttProviderName} not in Extension Settings, initiatilizing provider in settings`);
extension_settings.speech_recognition[sttProviderName] = {}; extension_settings.speech_recognition[sttProviderName] = {};
} }
$('#speech_recognition_provider').val(sttProviderName); $('#speech_recognition_provider').val(sttProviderName);
if (sttProviderName == "None") { if (sttProviderName == "None") {
@ -287,13 +288,13 @@ function loadSttProvider(provider) {
loadNavigatorAudioRecording(); loadNavigatorAudioRecording();
$("#microphone_button").show(); $("#microphone_button").show();
} }
if (sttProviderName == "Streaming") { if (sttProviderName == "Streaming") {
sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]); sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]);
$("#microphone_button").off('click'); $("#microphone_button").off('click');
$("#microphone_button").hide(); $("#microphone_button").hide();
} }
} }
function onSttProviderChange() { function onSttProviderChange() {
@ -365,7 +366,7 @@ async function onMessageMappingChange() {
console.debug(DEBUG_PREFIX+"Wrong syntax for message mapping, no '=' found in:", text); console.debug(DEBUG_PREFIX+"Wrong syntax for message mapping, no '=' found in:", text);
} }
} }
$("#speech_recognition_message_mapping_status").text("Message mapping updated to: "+JSON.stringify(extension_settings.speech_recognition.messageMapping)) $("#speech_recognition_message_mapping_status").text("Message mapping updated to: "+JSON.stringify(extension_settings.speech_recognition.messageMapping))
console.debug(DEBUG_PREFIX+"Updated message mapping", extension_settings.speech_recognition.messageMapping); console.debug(DEBUG_PREFIX+"Updated message mapping", extension_settings.speech_recognition.messageMapping);
extension_settings.speech_recognition.messageMappingText = $('#speech_recognition_message_mapping').val() extension_settings.speech_recognition.messageMappingText = $('#speech_recognition_message_mapping').val()
@ -425,7 +426,7 @@ $(document).ready(function () {
$('#speech_recognition_message_mode').on('change', onMessageModeChange); $('#speech_recognition_message_mode').on('change', onMessageModeChange);
$('#speech_recognition_message_mapping').on('change', onMessageMappingChange); $('#speech_recognition_message_mapping').on('change', onMessageMappingChange);
$('#speech_recognition_message_mapping_enabled').on('click', onMessageMappingEnabledClick); $('#speech_recognition_message_mapping_enabled').on('click', onMessageMappingEnabledClick);
const $button = $('<div id="microphone_button" class="fa-solid fa-microphone speech-toggle" title="Click to speak"></div>'); const $button = $('<div id="microphone_button" class="fa-solid fa-microphone speech-toggle" title="Click to speak"></div>');
$('#send_but_sheld').prepend($button); $('#send_but_sheld').prepend($button);

View File

@ -9,7 +9,7 @@ import {
saveBase64AsFile, saveBase64AsFile,
PAGINATION_TEMPLATE, PAGINATION_TEMPLATE,
} from './utils.js'; } from './utils.js';
import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap } from "./RossAscends-mods.js"; import { RA_CountCharTokens, humanizedDateTime, dragElement, favsToHotswap, getMessageTimeStamp } from "./RossAscends-mods.js";
import { loadMovingUIState, sortEntitiesList } from './power-user.js'; import { loadMovingUIState, sortEntitiesList } from './power-user.js';
import { import {
@ -202,7 +202,7 @@ function getFirstCharacterMessage(character) {
mes["is_system"] = false; mes["is_system"] = false;
mes["name"] = character.name; mes["name"] = character.name;
mes["is_name"] = true; mes["is_name"] = true;
mes["send_date"] = humanizedDateTime(); mes["send_date"] = getMessageTimeStamp();
mes["original_avatar"] = character.avatar; mes["original_avatar"] = character.avatar;
mes["extra"] = { "gen_id": Date.now() * Math.random() * 1000000 }; mes["extra"] = { "gen_id": Date.now() * Math.random() * 1000000 };
mes["mes"] = messageText mes["mes"] = messageText

View File

@ -22,7 +22,7 @@ import {
reloadCurrentChat, reloadCurrentChat,
sendMessageAsUser, sendMessageAsUser,
} from "../script.js"; } from "../script.js";
import { humanizedDateTime } from "./RossAscends-mods.js"; import { getMessageTimeStamp } from "./RossAscends-mods.js";
import { resetSelectedGroup } from "./group-chats.js"; import { resetSelectedGroup } from "./group-chats.js";
import { getRegexedString, regex_placement } from "./extensions/regex/engine.js"; import { getRegexedString, regex_placement } from "./extensions/regex/engine.js";
import { chat_styles, power_user } from "./power-user.js"; import { chat_styles, power_user } from "./power-user.js";
@ -327,7 +327,7 @@ async function sendMessageAs(_, text) {
is_user: false, is_user: false,
is_name: true, is_name: true,
is_system: isSystem, is_system: isSystem,
send_date: humanizedDateTime(), send_date: getMessageTimeStamp(),
mes: substituteParams(mesText), mes: substituteParams(mesText),
force_avatar: force_avatar, force_avatar: force_avatar,
original_avatar: original_avatar, original_avatar: original_avatar,
@ -358,7 +358,7 @@ async function sendNarratorMessage(_, text) {
is_user: false, is_user: false,
is_name: false, is_name: false,
is_system: isSystem, is_system: isSystem,
send_date: humanizedDateTime(), send_date: getMessageTimeStamp(),
mes: substituteParams(text.trim()), mes: substituteParams(text.trim()),
force_avatar: system_avatar, force_avatar: system_avatar,
extra: { extra: {
@ -384,7 +384,7 @@ async function sendCommentMessage(_, text) {
is_user: false, is_user: false,
is_name: true, is_name: true,
is_system: true, is_system: true,
send_date: humanizedDateTime(), send_date: getMessageTimeStamp(),
mes: substituteParams(text.trim()), mes: substituteParams(text.trim()),
force_avatar: comment_avatar, force_avatar: comment_avatar,
extra: { extra: {