Add typedefs for TTS audioJob
This commit is contained in:
parent
f3fc3f12a7
commit
e2becdf7a9
|
@ -298,7 +298,7 @@ function debugTtsPlayback() {
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
window.debugTtsPlayback = debugTtsPlayback;
|
window['debugTtsPlayback'] = debugTtsPlayback;
|
||||||
|
|
||||||
//##################//
|
//##################//
|
||||||
// Audio Control //
|
// Audio Control //
|
||||||
|
@ -308,13 +308,25 @@ let audioElement = new Audio();
|
||||||
audioElement.id = 'tts_audio';
|
audioElement.id = 'tts_audio';
|
||||||
audioElement.autoplay = true;
|
audioElement.autoplay = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type AudioJob[] Audio job queue
|
||||||
|
* @typedef {{audioBlob: Blob | string, char: string}} AudioJob Audio job object
|
||||||
|
*/
|
||||||
let audioJobQueue = [];
|
let audioJobQueue = [];
|
||||||
|
/**
|
||||||
|
* @type AudioJob Current audio job
|
||||||
|
*/
|
||||||
let currentAudioJob;
|
let currentAudioJob;
|
||||||
let audioPaused = false;
|
let audioPaused = false;
|
||||||
let audioQueueProcessorReady = true;
|
let audioQueueProcessorReady = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Play audio data from audio job object.
|
||||||
|
* @param {AudioJob} audioJob Audio job object
|
||||||
|
* @returns {Promise<void>} Promise that resolves when audio playback is started
|
||||||
|
*/
|
||||||
async function playAudioData(audioJob) {
|
async function playAudioData(audioJob) {
|
||||||
const audioBlob = audioJob["audioBlob"];
|
const { audioBlob, char } = audioJob;
|
||||||
// Since current audio job can be cancelled, don't playback if it is null
|
// Since current audio job can be cancelled, don't playback if it is null
|
||||||
if (currentAudioJob == null) {
|
if (currentAudioJob == null) {
|
||||||
console.log('Cancelled TTS playback because currentAudioJob was null');
|
console.log('Cancelled TTS playback because currentAudioJob was null');
|
||||||
|
@ -324,7 +336,7 @@ async function playAudioData(audioJob) {
|
||||||
|
|
||||||
// VRM lip sync
|
// VRM lip sync
|
||||||
if (extension_settings.vrm?.enabled && typeof window['vrmLipSync'] === 'function') {
|
if (extension_settings.vrm?.enabled && typeof window['vrmLipSync'] === 'function') {
|
||||||
await window['vrmLipSync'](audioBlob, audioJob["char"]);
|
await window['vrmLipSync'](audioBlob, char);
|
||||||
}
|
}
|
||||||
|
|
||||||
audioElement.src = srcUrl;
|
audioElement.src = srcUrl;
|
||||||
|
@ -343,7 +355,7 @@ async function playAudioData(audioJob) {
|
||||||
window['tts_preview'] = function (id) {
|
window['tts_preview'] = function (id) {
|
||||||
const audio = document.getElementById(id);
|
const audio = document.getElementById(id);
|
||||||
|
|
||||||
if (audio && !$(audio).data('disabled')) {
|
if (audio instanceof HTMLAudioElement && !$(audio).data('disabled')) {
|
||||||
audio.play();
|
audio.play();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -429,13 +441,13 @@ function completeCurrentAudioJob() {
|
||||||
*/
|
*/
|
||||||
async function addAudioJob(response, char) {
|
async function addAudioJob(response, char) {
|
||||||
if (typeof response === 'string') {
|
if (typeof response === 'string') {
|
||||||
audioJobQueue.push({"audioBlob":response, "char":char});
|
audioJobQueue.push({ audioBlob: response, char: char });
|
||||||
} else {
|
} else {
|
||||||
const audioData = await response.blob();
|
const audioData = await response.blob();
|
||||||
if (!audioData.type.startsWith('audio/')) {
|
if (!audioData.type.startsWith('audio/')) {
|
||||||
throw `TTS received HTTP response with invalid data format. Expecting audio/*, got ${audioData.type}`;
|
throw `TTS received HTTP response with invalid data format. Expecting audio/*, got ${audioData.type}`;
|
||||||
}
|
}
|
||||||
audioJobQueue.push({"audioBlob":audioData, "char":char});
|
audioJobQueue.push({ audioBlob: audioData, char: char });
|
||||||
}
|
}
|
||||||
console.debug('Pushed audio job to queue.');
|
console.debug('Pushed audio job to queue.');
|
||||||
}
|
}
|
||||||
|
@ -576,7 +588,7 @@ async function playFullConversation() {
|
||||||
const chat = context.chat;
|
const chat = context.chat;
|
||||||
ttsJobQueue = chat;
|
ttsJobQueue = chat;
|
||||||
}
|
}
|
||||||
window.playFullConversation = playFullConversation;
|
window['playFullConversation'] = playFullConversation;
|
||||||
|
|
||||||
//#############################//
|
//#############################//
|
||||||
// Extension UI and Settings //
|
// Extension UI and Settings //
|
||||||
|
|
|
@ -108,7 +108,7 @@ router.post('/get', jsonParser, async (_, response) => {
|
||||||
|
|
||||||
// VRM assets
|
// VRM assets
|
||||||
if (folder == 'vrm') {
|
if (folder == 'vrm') {
|
||||||
output[folder] = {'model':[], 'animation':[]};
|
output[folder] = { 'model': [], 'animation': [] };
|
||||||
// Extract models
|
// Extract models
|
||||||
const vrm_model_folder = path.normalize(path.join(folderPath, 'vrm', 'model'));
|
const vrm_model_folder = path.normalize(path.join(folderPath, 'vrm', 'model'));
|
||||||
let files = getFiles(vrm_model_folder);
|
let files = getFiles(vrm_model_folder);
|
||||||
|
|
Loading…
Reference in New Issue