Merge branch 'staging' of https://github.com/SillyLossy/TavernAI into staging

This commit is contained in:
Cohee 2023-08-01 11:58:42 +03:00
commit 78d62d7be2
5 changed files with 421 additions and 112 deletions

View File

@ -9,6 +9,7 @@ const MODULE_NAME = 'expressions';
const UPDATE_INTERVAL = 2000; const UPDATE_INTERVAL = 2000;
const FALLBACK_EXPRESSION = 'joy'; const FALLBACK_EXPRESSION = 'joy';
const DEFAULT_EXPRESSIONS = [ const DEFAULT_EXPRESSIONS = [
"live2d",
"admiration", "admiration",
"amusement", "amusement",
"anger", "anger",
@ -44,6 +45,9 @@ let lastCharacter = undefined;
let lastMessage = null; let lastMessage = null;
let spriteCache = {}; let spriteCache = {};
let inApiCall = false; let inApiCall = false;
let live2d_var = false;
let previousSrc = null;
function isVisualNovelMode() { function isVisualNovelMode() {
return Boolean(!isMobile() && power_user.waifuMode && getContext().groupId); return Boolean(!isMobile() && power_user.waifuMode && getContext().groupId);
@ -392,6 +396,56 @@ function onExpressionsShowDefaultInput() {
} }
} }
function loadLiveChar(value_name) {
let url = `${getApiUrl()}/api/live2d/load?loadchar=${location.origin}/characters/${value_name}`;
doExtrasFetch(url, {
method: 'GET',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Bypass-Tunnel-Reminder': 'bypass',
},
})
.then(response => response.text())
.then(data => console.log(data))
.catch((error) => console.error('Error:', error));
}
function handleImageChange(isChecked) {
const imgElement = document.querySelector('img#expression-image.expression');
if (!imgElement) {
console.log("Cannot find addExpressionImage()");
return;
}
if (isChecked) {
// Method get IP of endpoint
if (imgElement.src !== getApiUrl() + '/api/live2d/result_feed') {
const expressionListItemElement = document.querySelector('#live2d');
const expressionImageElement = expressionListItemElement.querySelector('.expression_list_image');
const newSrc = expressionImageElement.src;
doExtrasFetch(newSrc, {
method: 'HEAD',
})
.then(response => {
if (response.ok) {
imgElement.src = getApiUrl() + '/api/live2d/result_feed';
}
})
.catch(error => {
console.error(error); // Log the error if necessary
});
} else if (previousSrc) {
imgElement.src = previousSrc; // Revert the src to its previous value
}
} else if (previousSrc !== null) {
imgElement.src = previousSrc; // Revert the src to its previous value
}
live2d_var = isChecked;
}
async function moduleWorker() { async function moduleWorker() {
const context = getContext(); const context = getContext();
@ -405,6 +459,21 @@ async function moduleWorker() {
if (context.groupId !== lastCharacter && context.characterId !== lastCharacter) { if (context.groupId !== lastCharacter && context.characterId !== lastCharacter) {
removeExpression(); removeExpression();
spriteCache = {}; spriteCache = {};
previousSrc = null;
//uncheck live image
let checkbox = document.getElementById('image_type_toggle');
if (checkbox.checked) {
checkbox.click();
}
//clear expression
let imgElement = document.getElementById('expression-image');
imgElement.src = "";
//Load new char
loadLiveChar(context.name2 + ".png");
} }
const vnMode = isVisualNovelMode(); const vnMode = isVisualNovelMode();
@ -654,7 +723,6 @@ async function getSpritesList(name) {
try { try {
const result = await fetch(`/get_sprites?name=${encodeURIComponent(name)}`); const result = await fetch(`/get_sprites?name=${encodeURIComponent(name)}`);
let sprites = result.ok ? (await result.json()) : []; let sprites = result.ok ? (await result.json()) : [];
return sprites; return sprites;
} }
@ -697,6 +765,8 @@ async function getExpressionsList() {
} }
async function setExpression(character, expression, force) { async function setExpression(character, expression, force) {
if (live2d_var == false) {
console.debug('entered setExpressions'); console.debug('entered setExpressions');
await validateImages(character); await validateImages(character);
const img = $('img.expression'); const img = $('img.expression');
@ -789,22 +859,32 @@ async function setExpression(character, expression, force) {
setDefault(); setDefault();
} }
}); });
}
} else { } else {
if (extension_settings.expressions.showDefault) { if (extension_settings.expressions.showDefault) {
setDefault(); setDefault();
} }
} }
}
function setDefault() { function setDefault() {
console.debug('setting default'); console.debug('setting default');
const defImgUrl = `/img/default-expressions/${expression}.png`; const defImgUrl = `/img/default-expressions/${expression}.png`;
console.log(defImgUrl); //console.log(defImgUrl);
img.attr('src', defImgUrl); img.attr('src', defImgUrl);
img.addClass('default'); img.addClass('default');
} }
document.getElementById("expression-holder").style.display = ''; document.getElementById("expression-holder").style.display = '';
if (live2d_var == true) {
// Find the <img> element with id="expression-image" and class="expression"
const imgElement = document.querySelector('img#expression-image.expression');
//console.log("searching");
if (imgElement) {
console.log("setting value");
imgElement.src = getApiUrl() + '/api/live2d/result_feed';
}
}
}
} }
function onClickExpressionImage() { function onClickExpressionImage() {
@ -1052,7 +1132,6 @@ function setExpressionOverrideHtml(forceClear = false) {
$('body').append(element); $('body').append(element);
} }
function addSettings() { function addSettings() {
const html = ` const html = `
<div class="expression_settings"> <div class="expression_settings">
<div class="inline-drawer"> <div class="inline-drawer">
@ -1060,7 +1139,15 @@ function setExpressionOverrideHtml(forceClear = false) {
<b>Character Expressions</b> <b>Character Expressions</b>
<div class="inline-drawer-icon fa-solid fa-circle-chevron-down down"></div> <div class="inline-drawer-icon fa-solid fa-circle-chevron-down down"></div>
</div> </div>
<div class="inline-drawer-content"> <div class="inline-drawer-content">
<!-- Toggle button for aituber/static images -->
<div class="toggle_button">
<label class="switch">
<input id="image_type_toggle" type="checkbox">
<span class="slider round"></span>
<label for="image_type_toggle">Image Type - Live2d (extras)</label>
</div>
<div class="offline_mode"> <div class="offline_mode">
<small>You are in offline mode. Click on the image below to set the expression.</small> <small>You are in offline mode. Click on the image below to set the expression.</small>
</div> </div>
@ -1090,6 +1177,7 @@ function setExpressionOverrideHtml(forceClear = false) {
</form> </form>
</div> </div>
`; `;
$('#extensions_settings').append(html); $('#extensions_settings').append(html);
$('#expression_override_button').on('click', onClickExpressionOverrideButton); $('#expression_override_button').on('click', onClickExpressionOverrideButton);
$('#expressions_show_default').on('input', onExpressionsShowDefaultInput); $('#expressions_show_default').on('input', onExpressionsShowDefaultInput);
@ -1105,6 +1193,17 @@ function setExpressionOverrideHtml(forceClear = false) {
$(document).on('click', '.expression_list_delete', onClickExpressionDelete); $(document).on('click', '.expression_list_delete', onClickExpressionDelete);
$(window).on("resize", updateVisualNovelModeDebounced); $(window).on("resize", updateVisualNovelModeDebounced);
$('.expression_settings').hide(); $('.expression_settings').hide();
$('#image_type_toggle').on('change', function () {
const isChecked = this.checked;
const inputElement = document.querySelector('input[name="avatar_url"]');
const value_name = inputElement ? inputElement.value : '';
if (isChecked) {
loadLiveChar(value_name);
}
handleImageChange(isChecked);
});
} }
addExpressionImage(); addExpressionImage();
@ -1116,6 +1215,7 @@ function setExpressionOverrideHtml(forceClear = false) {
moduleWorker(); moduleWorker();
dragElement($("#expression-holder")) dragElement($("#expression-holder"))
eventSource.on(event_types.CHAT_CHANGED, () => { eventSource.on(event_types.CHAT_CHANGED, () => {
//console.log("checked: " + live2d_var);
setExpressionOverrideHtml(); setExpressionOverrideHtml();
if (isVisualNovelMode()) { if (isVisualNovelMode()) {

View File

@ -72,6 +72,9 @@ function getTaskByIdRecurse(taskId, task) {
function substituteParamsPrompts(content) { function substituteParamsPrompts(content) {
content = content.replace(/{{objective}}/gi, currentObjective.description) content = content.replace(/{{objective}}/gi, currentObjective.description)
content = content.replace(/{{task}}/gi, currentTask.description) content = content.replace(/{{task}}/gi, currentTask.description)
if (currentTask.parent){
content = content.replace(/{{parent}}/gi, currentTask.parent.description)
}
content = substituteParams(content) content = substituteParams(content)
return content return content
} }
@ -97,7 +100,7 @@ async function generateTasks() {
updateUiTaskList(); updateUiTaskList();
setCurrentTask(); setCurrentTask();
console.info(`Response for Objective: '${taskTree.description}' was \n'${taskResponse}', \nwhich created tasks \n${JSON.stringify(globalTasks.map(v => {return v.toSaveState()}), null, 2)} `) console.info(`Response for Objective: '${taskTree.description}' was \n'${taskResponse}', \nwhich created tasks \n${JSON.stringify(globalTasks.map(v => {return v.toSaveState()}), null, 2)} `)
toastr.success(`Generated ${globalTasks.length} tasks`, 'Done!'); toastr.success(`Generated ${taskTree.length} tasks`, 'Done!');
} }
// Call Quiet Generate to check if a task is completed // Call Quiet Generate to check if a task is completed
@ -126,7 +129,7 @@ async function checkTaskCompleted() {
function getNextIncompleteTaskRecurse(task){ function getNextIncompleteTaskRecurse(task){
if (task.completed === false // Return task if incomplete if (task.completed === false // Return task if incomplete
&& task.children.length === 0 // Ensure task has no children, it's subtasks will determine completeness && task.children.length === 0 // Ensure task has no children, it's subtasks will determine completeness
&& task.parentId // Must have parent id. Only root task will be missing this and we dont want that && task.parentId !== "" // Must have parent id. Only root task will be missing this and we dont want that
){ ){
return task return task
} }

View File

@ -8,16 +8,21 @@ import { getContext, getApiUrl, modules, extension_settings, ModuleWorkerWrapper
import { VoskSttProvider } from './vosk.js' import { VoskSttProvider } from './vosk.js'
import { WhisperSttProvider } from './whisper.js' import { WhisperSttProvider } from './whisper.js'
import { BrowserSttProvider } from './browser.js' import { BrowserSttProvider } from './browser.js'
import { StreamingSttProvider } from './streaming.js'
export { MODULE_NAME }; export { MODULE_NAME };
const MODULE_NAME = 'Speech Recognition'; const MODULE_NAME = 'Speech Recognition';
const DEBUG_PREFIX = "<Speech Recognition module> " const DEBUG_PREFIX = "<Speech Recognition module> "
const UPDATE_INTERVAL = 100;
let inApiCall = false;
let sttProviders = { let sttProviders = {
None: null, None: null,
Browser: BrowserSttProvider, Browser: BrowserSttProvider,
Whisper: WhisperSttProvider, Whisper: WhisperSttProvider,
Vosk: VoskSttProvider, Vosk: VoskSttProvider,
Streaming: StreamingSttProvider,
} }
let sttProvider = null let sttProvider = null
@ -27,6 +32,82 @@ let audioRecording = false
const constraints = { audio: { sampleSize: 16, channelCount: 1, sampleRate: 16000 } }; const constraints = { audio: { sampleSize: 16, channelCount: 1, sampleRate: 16000 } };
let audioChunks = []; let audioChunks = [];
async function moduleWorker() {
if (sttProviderName != "Streaming") {
return;
}
// API is busy
if (inApiCall) {
return;
}
try {
inApiCall = true;
const userMessageOriginal = await sttProvider.getUserMessage();
let userMessageFormatted = userMessageOriginal.trim();
if (userMessageFormatted.length > 0)
{
console.debug(DEBUG_PREFIX+"recorded transcript: \""+userMessageFormatted+"\"");
let userMessageLower = userMessageFormatted.toLowerCase();
// remove punctuation
let userMessageRaw = userMessageLower.replace(/[^\w\s\']|_/g, "").replace(/\s+/g, " ");
console.debug(DEBUG_PREFIX+"raw transcript:",userMessageRaw);
// Detect trigger words
let messageStart = -1;
if (extension_settings.speech_recognition.Streaming.triggerWordsEnabled) {
for (const triggerWord of extension_settings.speech_recognition.Streaming.triggerWords) {
const triggerPos = userMessageRaw.indexOf(triggerWord.toLowerCase());
// Trigger word not found or not starting message and just a substring
if (triggerPos == -1){ // | (triggerPos > 0 & userMessageFormatted[triggerPos-1] != " ")) {
console.debug(DEBUG_PREFIX+"trigger word not found: ", triggerWord);
}
else {
console.debug(DEBUG_PREFIX+"Found trigger word: ", triggerWord, " at index ", triggerPos);
if (triggerPos < messageStart | messageStart == -1) { // & (triggerPos + triggerWord.length) < userMessageFormatted.length)) {
messageStart = triggerPos; // + triggerWord.length + 1;
}
}
}
} else {
messageStart = 0;
}
if (messageStart == -1) {
console.debug(DEBUG_PREFIX+"message ignored, no trigger word preceding a message. Voice transcript: \""+ userMessageOriginal +"\"");
if (extension_settings.speech_recognition.Streaming.debug) {
toastr.info(
"No trigger word preceding a message. Voice transcript: \""+ userMessageOriginal +"\"",
DEBUG_PREFIX+"message ignored.",
{ timeOut: 10000, extendedTimeOut: 20000, preventDuplicates: true },
);
}
}
else{
userMessageFormatted = userMessageFormatted.substring(messageStart);
processTranscript(userMessageFormatted);
}
}
else
{
console.debug(DEBUG_PREFIX+"Received empty transcript, ignored");
}
}
catch (error) {
console.debug(error);
}
finally {
inApiCall = false;
}
}
async function processTranscript(transcript) { async function processTranscript(transcript) {
try { try {
const transcriptOriginal = transcript; const transcriptOriginal = transcript;
@ -198,13 +279,21 @@ function loadSttProvider(provider) {
if (sttProviderName == "Browser") { if (sttProviderName == "Browser") {
sttProvider.processTranscriptFunction = processTranscript; sttProvider.processTranscriptFunction = processTranscript;
sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]); sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]);
}
else {
sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]);
loadNavigatorAudioRecording();
$("#microphone_button").show(); $("#microphone_button").show();
} }
if (sttProviderName == "Vosk" | sttProviderName == "Whisper") {
sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]);
loadNavigatorAudioRecording();
$("#microphone_button").show();
}
if (sttProviderName == "Streaming") {
sttProvider.loadSettings(extension_settings.speech_recognition[sttProviderName]);
$("#microphone_button").off('click');
$("#microphone_button").hide();
}
} }
function onSttProviderChange() { function onSttProviderChange() {
@ -231,7 +320,7 @@ const defaultSettings = {
messageMode: "append", messageMode: "append",
messageMappingText: "", messageMappingText: "",
messageMapping: [], messageMapping: [],
messageMappingEnabled: false messageMappingEnabled: false,
} }
function loadSettings() { function loadSettings() {
@ -344,8 +433,7 @@ $(document).ready(function () {
addExtensionControls(); // No init dependencies addExtensionControls(); // No init dependencies
loadSettings(); // Depends on Extension Controls and loadTtsProvider loadSettings(); // Depends on Extension Controls and loadTtsProvider
loadSttProvider(extension_settings.speech_recognition.currentProvider); // No dependencies loadSttProvider(extension_settings.speech_recognition.currentProvider); // No dependencies
const wrapper = new ModuleWorkerWrapper(moduleWorker);
//const wrapper = new ModuleWorkerWrapper(moduleWorker); setInterval(wrapper.update.bind(wrapper), UPDATE_INTERVAL); // Init depends on all the things
//setInterval(wrapper.update.bind(wrapper), UPDATE_INTERVAL); // Init depends on all the things moduleWorker();
//moduleWorker();
}) })

View File

@ -0,0 +1,102 @@
import { getApiUrl, doExtrasFetch, modules } from "../../extensions.js";
export { StreamingSttProvider }
const DEBUG_PREFIX = "<Speech Recognition module (streaming)> "
class StreamingSttProvider {
//########//
// Config //
//########//
settings
defaultSettings = {
triggerWordsText: "",
triggerWords : [],
triggerWordsEnabled : false,
debug : false,
}
get settingsHtml() {
let html = '\
<div id="speech_recognition_streaming_trigger_words_div">\
<span>Trigger words</span>\
<textarea id="speech_recognition_streaming_trigger_words" class="text_pole textarea_compact" type="text" rows="4" placeholder="Enter comma separated words that triggers new message, example:\nhey, hey aqua, record, listen"></textarea>\
<label class="checkbox_label" for="speech_recognition_streaming_trigger_words_enabled">\
<input type="checkbox" id="speech_recognition_streaming_trigger_words_enabled" name="speech_recognition_trigger_words_enabled">\
<small>Enable trigger words</small>\
</label>\
<label class="checkbox_label" for="speech_recognition_streaming_debug">\
<input type="checkbox" id="speech_recognition_streaming_debug" name="speech_recognition_streaming_debug">\
<small>Enable debug pop ups</small>\
</label>\
</div>\
'
return html
}
onSettingsChange() {
this.settings.triggerWordsText = $('#speech_recognition_streaming_trigger_words').val();
let array = $('#speech_recognition_streaming_trigger_words').val().split(",");
array = array.map(element => {return element.trim().toLowerCase();});
array = array.filter((str) => str !== '');
this.settings.triggerWords = array;
this.settings.triggerWordsEnabled = $("#speech_recognition_streaming_trigger_words_enabled").is(':checked');
this.settings.debug = $("#speech_recognition_streaming_debug").is(':checked');
console.debug(DEBUG_PREFIX+" Updated settings: ", this.settings);
this.loadSettings(this.settings);
}
loadSettings(settings) {
// Populate Provider UI given input settings
if (Object.keys(settings).length == 0) {
console.debug(DEBUG_PREFIX+"Using default Whisper STT extension settings")
}
// Only accept keys defined in defaultSettings
this.settings = this.defaultSettings
for (const key in settings){
if (key in this.settings){
this.settings[key] = settings[key]
} else {
throw `Invalid setting passed to STT extension: ${key}`
}
}
$("#speech_recognition_streaming_trigger_words").val(this.settings.triggerWordsText);
$("#speech_recognition_streaming_trigger_words_enabled").prop('checked',this.settings.triggerWordsEnabled);
$("#speech_recognition_streaming_debug").prop('checked',this.settings.debug);
console.debug(DEBUG_PREFIX+"streaming STT settings loaded")
}
async getUserMessage() {
// Return if module is not loaded
if (!modules.includes('streaming-stt')) {
console.debug(DEBUG_PREFIX+"Module streaming-stt must be activated in Sillytavern Extras for streaming user voice.")
return "";
}
const url = new URL(getApiUrl());
url.pathname = '/api/speech-recognition/streaming/record-and-transcript';
const apiResult = await doExtrasFetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Bypass-Tunnel-Reminder': 'bypass',
},
body: JSON.stringify({ text: "" }),
});
if (!apiResult.ok) {
toastr.error(apiResult.statusText, DEBUG_PREFIX+'STT Generation Failed (streaming)', { timeOut: 10000, extendedTimeOut: 20000, preventDuplicates: true });
throw new Error(`HTTP ${apiResult.status}: ${await apiResult.text()}`);
}
const data = await apiResult.json();
return data.transcript;
}
}

View File

@ -1,5 +1,5 @@
import { callPopup, cancelTtsPlay, eventSource, event_types, isMultigenEnabled, is_send_press, saveSettingsDebounced } from '../../../script.js' import { callPopup, cancelTtsPlay, eventSource, event_types, isMultigenEnabled, is_send_press, saveSettingsDebounced } from '../../../script.js'
import { ModuleWorkerWrapper, extension_settings, getContext } from '../../extensions.js' import { ModuleWorkerWrapper, doExtrasFetch, extension_settings, getApiUrl, getContext } from '../../extensions.js'
import { escapeRegex, getStringHash } from '../../utils.js' import { escapeRegex, getStringHash } from '../../utils.js'
import { EdgeTtsProvider } from './edge.js' import { EdgeTtsProvider } from './edge.js'
import { ElevenLabsTtsProvider } from './elevenlabs.js' import { ElevenLabsTtsProvider } from './elevenlabs.js'
@ -7,14 +7,13 @@ import { SileroTtsProvider } from './silerotts.js'
import { CoquiTtsProvider } from './coquitts.js' import { CoquiTtsProvider } from './coquitts.js'
import { SystemTtsProvider } from './system.js' import { SystemTtsProvider } from './system.js'
import { NovelTtsProvider } from './novel.js' import { NovelTtsProvider } from './novel.js'
import { isMobile } from '../../RossAscends-mods.js'
import { power_user } from '../../power-user.js' import { power_user } from '../../power-user.js'
const UPDATE_INTERVAL = 1000 const UPDATE_INTERVAL = 1000
let voiceMap = {} // {charName:voiceid, charName2:voiceid2} let voiceMap = {} // {charName:voiceid, charName2:voiceid2}
let audioControl let audioControl
let storedvalue = false;
let lastCharacterId = null let lastCharacterId = null
let lastGroupId = null let lastGroupId = null
let lastChatId = null let lastChatId = null
@ -164,6 +163,20 @@ async function moduleWorker() {
ttsJobQueue.push(message) ttsJobQueue.push(message)
} }
function talkingAnimation(switchValue) {
const apiUrl = getApiUrl();
const animationType = switchValue ? "start" : "stop";
if (switchValue !== storedvalue) {
try {
console.log(animationType + " Talking Animation");
doExtrasFetch(`${apiUrl}/api/live2d/${animationType}_talking`);
storedvalue = switchValue; // Update the storedvalue to the current switchValue
} catch (error) {
// Handle the error here or simply ignore it to prevent logging
}
}
}
function resetTtsPlayback() { function resetTtsPlayback() {
// Stop system TTS utterance // Stop system TTS utterance
@ -291,8 +304,10 @@ function updateUiAudioPlayState() {
// Give user feedback that TTS is active by setting the stop icon if processing or playing // Give user feedback that TTS is active by setting the stop icon if processing or playing
if (!audioElement.paused || isTtsProcessing()) { if (!audioElement.paused || isTtsProcessing()) {
img = 'fa-solid fa-stop-circle extensionsMenuExtensionButton' img = 'fa-solid fa-stop-circle extensionsMenuExtensionButton'
talkingAnimation(true)
} else { } else {
img = 'fa-solid fa-circle-play extensionsMenuExtensionButton' img = 'fa-solid fa-circle-play extensionsMenuExtensionButton'
talkingAnimation(false)
} }
$('#tts_media_control').attr('class', img); $('#tts_media_control').attr('class', img);
} else { } else {
@ -354,6 +369,7 @@ async function processAudioJobQueue() {
audioQueueProcessorReady = false audioQueueProcessorReady = false
currentAudioJob = audioJobQueue.pop() currentAudioJob = audioJobQueue.pop()
playAudioData(currentAudioJob) playAudioData(currentAudioJob)
talkingAnimation(true)
} catch (error) { } catch (error) {
console.error(error) console.error(error)
audioQueueProcessorReady = true audioQueueProcessorReady = true