2010-04-06 18:57:02 +02:00
|
|
|
/***************************************************************************
|
|
|
|
* Copyright (C) 2003-2005 by Mark Kretschmann <markey@web.de> *
|
|
|
|
* Copyright (C) 2005 by Jakub Stachowski <qbast@go2.pl> *
|
|
|
|
* Copyright (C) 2006 Paul Cifarelli <paul@cifarelli.net> *
|
|
|
|
* *
|
|
|
|
* This program is free software; you can redistribute it and/or modify *
|
|
|
|
* it under the terms of the GNU General Public License as published by *
|
|
|
|
* the Free Software Foundation; either version 2 of the License, or *
|
|
|
|
* (at your option) any later version. *
|
|
|
|
* *
|
|
|
|
* This program is distributed in the hope that it will be useful, *
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
|
|
|
|
* GNU General Public License for more details. *
|
|
|
|
* *
|
|
|
|
* You should have received a copy of the GNU General Public License *
|
|
|
|
* along with this program; if not, write to the *
|
|
|
|
* Free Software Foundation, Inc., *
|
|
|
|
* 51 Franklin Steet, Fifth Floor, Boston, MA 02111-1307, USA. *
|
|
|
|
***************************************************************************/
|
|
|
|
|
|
|
|
#define DEBUG_PREFIX "Gst-Engine"
|
|
|
|
|
|
|
|
#include "gstengine.h"
|
2010-04-07 18:26:04 +02:00
|
|
|
#include "gstequalizer.h"
|
2010-04-11 21:47:21 +02:00
|
|
|
#include "gstenginepipeline.h"
|
2010-04-06 18:57:02 +02:00
|
|
|
|
|
|
|
#include <math.h>
|
|
|
|
#include <unistd.h>
|
|
|
|
#include <vector>
|
|
|
|
|
|
|
|
#include <QTimer>
|
|
|
|
#include <QRegExp>
|
|
|
|
#include <QFile>
|
2010-04-07 15:51:14 +02:00
|
|
|
#include <QSettings>
|
2010-04-06 18:57:02 +02:00
|
|
|
#include <QtDebug>
|
2010-04-11 16:26:30 +02:00
|
|
|
#include <QCoreApplication>
|
2010-04-11 23:40:26 +02:00
|
|
|
#include <QTimeLine>
|
2010-04-06 18:57:02 +02:00
|
|
|
|
|
|
|
#include <gst/gst.h>
|
|
|
|
#include <iostream>
|
|
|
|
|
|
|
|
|
|
|
|
using std::vector;
|
2010-04-11 21:47:21 +02:00
|
|
|
using boost::shared_ptr;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 15:51:14 +02:00
|
|
|
const char* GstEngine::kSettingsGroup = "GstEngine";
|
|
|
|
const char* GstEngine::kAutoSink = "autoaudiosink";
|
2010-04-06 18:57:02 +02:00
|
|
|
|
|
|
|
|
|
|
|
GstEngine::GstEngine()
|
2010-04-07 00:58:41 +02:00
|
|
|
: Engine::Base(),
|
|
|
|
delayq_(g_queue_new()),
|
|
|
|
current_sample_(0),
|
2010-04-21 00:00:02 +02:00
|
|
|
equalizer_enabled_(false)
|
2010-04-06 18:57:02 +02:00
|
|
|
{
|
2010-04-07 15:51:14 +02:00
|
|
|
ReloadSettings();
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
GstEngine::~GstEngine() {
|
2010-04-11 21:47:21 +02:00
|
|
|
current_pipeline_.reset();
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// Destroy scope delay queue
|
2010-04-12 02:20:52 +02:00
|
|
|
ClearScopeBuffers();
|
2010-04-07 00:58:41 +02:00
|
|
|
g_queue_free(delayq_);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// Save configuration
|
|
|
|
gst_deinit();
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
bool GstEngine::Init() {
|
2010-04-07 00:58:41 +02:00
|
|
|
// GStreamer initialization
|
|
|
|
GError *err;
|
|
|
|
if ( !gst_init_check( NULL, NULL, &err ) ) {
|
|
|
|
qWarning("GStreamer could not be initialized");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-04-11 16:26:30 +02:00
|
|
|
#ifdef Q_OS_WIN32
|
|
|
|
// Set the plugin path on windows
|
|
|
|
GstRegistry* registry = gst_registry_get_default();
|
|
|
|
gst_registry_add_path(registry, QString(
|
|
|
|
QCoreApplication::applicationDirPath() + "/gstreamer-plugins").toLocal8Bit().constData());
|
|
|
|
#endif
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
return true;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 15:51:14 +02:00
|
|
|
void GstEngine::ReloadSettings() {
|
2010-04-12 01:03:39 +02:00
|
|
|
Engine::Base::ReloadSettings();
|
|
|
|
|
2010-04-07 15:51:14 +02:00
|
|
|
QSettings s;
|
|
|
|
s.beginGroup(kSettingsGroup);
|
|
|
|
|
|
|
|
sink_ = s.value("sink", kAutoSink).toString();
|
2010-04-08 22:14:11 +02:00
|
|
|
device_ = s.value("device").toString();
|
2010-04-07 15:51:14 +02:00
|
|
|
}
|
|
|
|
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
bool GstEngine::CanDecode(const QUrl &url) {
|
2010-04-07 00:58:41 +02:00
|
|
|
// We had some bug reports claiming that video files cause crashes in canDecode(),
|
|
|
|
// so don't try to decode them
|
|
|
|
if ( url.path().toLower().endsWith( ".mov" ) ||
|
|
|
|
url.path().toLower().endsWith( ".avi" ) ||
|
|
|
|
url.path().toLower().endsWith( ".wmv" ) )
|
|
|
|
return false;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
can_decode_success_ = false;
|
|
|
|
can_decode_last_ = false;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 14:56:05 +02:00
|
|
|
// Create the pipeline
|
2010-04-21 00:00:02 +02:00
|
|
|
GstElement* pipeline = CreateElement("pipeline");
|
|
|
|
GstElement* src = CreateElement("giosrc", pipeline);
|
|
|
|
GstElement* bin = CreateElement("decodebin", pipeline);
|
|
|
|
|
|
|
|
gst_element_link(src, bin);
|
|
|
|
g_signal_connect(G_OBJECT(bin), "new-decoded-pad", G_CALLBACK(CanDecodeNewPadCallback), this);
|
|
|
|
g_signal_connect(G_OBJECT(bin), "no-more-pads", G_CALLBACK(CanDecodeLastCallback), this);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 14:56:05 +02:00
|
|
|
// Set the file we're testing
|
2010-04-21 00:00:02 +02:00
|
|
|
g_object_set(G_OBJECT(src), "location", url.toEncoded().constData(), NULL);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 14:56:05 +02:00
|
|
|
// Start the pipeline playing
|
2010-04-21 00:00:02 +02:00
|
|
|
gst_element_set_state(pipeline, GST_STATE_PLAYING);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// Wait until found audio stream
|
2010-04-07 14:56:05 +02:00
|
|
|
int count = 0;
|
|
|
|
while (!can_decode_success_ && !can_decode_last_ && count < 100) {
|
2010-04-07 00:58:41 +02:00
|
|
|
count++;
|
|
|
|
usleep(1000);
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 14:56:05 +02:00
|
|
|
// Stop playing
|
2010-04-21 00:00:02 +02:00
|
|
|
gst_element_set_state(pipeline, GST_STATE_NULL);
|
|
|
|
gst_object_unref(pipeline);
|
2010-04-07 00:58:41 +02:00
|
|
|
|
|
|
|
return can_decode_success_;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
void GstEngine::CanDecodeNewPadCallback(GstElement*, GstPad* pad, gboolean, gpointer self) {
|
2010-04-12 02:20:52 +02:00
|
|
|
GstEngine* instance = reinterpret_cast<GstEngine*>(self);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
GstCaps* caps = gst_pad_get_caps(pad);
|
|
|
|
if (gst_caps_get_size(caps) > 0) {
|
|
|
|
GstStructure* str = gst_caps_get_structure(caps, 0);
|
|
|
|
if (g_strrstr(gst_structure_get_name( str ), "audio" ))
|
|
|
|
instance->can_decode_success_ = true;
|
|
|
|
}
|
|
|
|
gst_caps_unref(caps);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
void GstEngine::CanDecodeLastCallback(GstElement*, gpointer self) {
|
2010-04-12 02:20:52 +02:00
|
|
|
GstEngine* instance = reinterpret_cast<GstEngine*>(self);
|
2010-04-11 21:47:21 +02:00
|
|
|
instance->can_decode_last_ = true;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
uint GstEngine::position() const {
|
|
|
|
if (!current_pipeline_)
|
|
|
|
return 0;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
return uint(current_pipeline_->position() / GST_MSECOND);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
uint GstEngine::length() const {
|
|
|
|
if (!current_pipeline_)
|
|
|
|
return 0;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
return uint(current_pipeline_->length() / GST_MSECOND);
|
|
|
|
}
|
2010-04-07 00:58:41 +02:00
|
|
|
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
Engine::State GstEngine::state() const {
|
|
|
|
if (!current_pipeline_)
|
2010-04-12 01:24:03 +02:00
|
|
|
return url_.isEmpty() ? Engine::Empty : Engine::Idle;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
switch (current_pipeline_->state()) {
|
2010-04-07 00:58:41 +02:00
|
|
|
case GST_STATE_NULL: return Engine::Empty;
|
|
|
|
case GST_STATE_READY: return Engine::Idle;
|
|
|
|
case GST_STATE_PLAYING: return Engine::Playing;
|
|
|
|
case GST_STATE_PAUSED: return Engine::Paused;
|
|
|
|
default: return Engine::Empty;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-12 02:20:52 +02:00
|
|
|
void GstEngine::AddBufferToScope(GstBuffer* buf) {
|
2010-04-12 18:39:48 +02:00
|
|
|
GstEnginePipeline* pipeline = qobject_cast<GstEnginePipeline*>(sender());
|
2010-04-12 18:41:44 +02:00
|
|
|
if (!pipeline || pipeline != current_pipeline_.get()) {
|
|
|
|
gst_buffer_unref(buf);
|
2010-04-12 18:39:48 +02:00
|
|
|
return;
|
2010-04-12 18:41:44 +02:00
|
|
|
}
|
2010-04-12 18:39:48 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
g_queue_push_tail(delayq_, buf);
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
const Engine::Scope& GstEngine::scope() {
|
|
|
|
UpdateScope();
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
if (current_sample_ >= kScopeSize) {
|
2010-04-07 00:58:41 +02:00
|
|
|
// ok, we have a full buffer now, so give it to the scope
|
2010-04-12 01:24:03 +02:00
|
|
|
for (int i=0; i< kScopeSize; i++)
|
|
|
|
scope_[i] = current_scope_[i];
|
2010-04-07 00:58:41 +02:00
|
|
|
current_sample_ = 0;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
return scope_;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
void GstEngine::UpdateScope() {
|
2010-04-07 02:18:55 +02:00
|
|
|
typedef int16_t sampletype;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// prune the scope and get the current pos of the audio device
|
|
|
|
quint64 pos = PruneScope();
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// head of the delay queue is the most delayed, so we work with that one
|
|
|
|
GstBuffer *buf = reinterpret_cast<GstBuffer *>( g_queue_peek_head(delayq_) );
|
|
|
|
if (!buf)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// start time for this buffer
|
|
|
|
quint64 stime = GST_BUFFER_TIMESTAMP(buf);
|
|
|
|
// duration of the buffer...
|
|
|
|
quint64 dur = GST_BUFFER_DURATION(buf);
|
|
|
|
// therefore we can calculate the end time for the buffer
|
|
|
|
quint64 etime = stime + dur;
|
|
|
|
|
|
|
|
// determine the number of channels
|
|
|
|
GstStructure* structure = gst_caps_get_structure ( GST_BUFFER_CAPS( buf ), 0);
|
|
|
|
int channels = 2;
|
|
|
|
gst_structure_get_int (structure, "channels", &channels);
|
|
|
|
|
|
|
|
// scope does not support >2 channels
|
|
|
|
if (channels > 2)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// if the audio device is playing this buffer now
|
|
|
|
if (pos <= stime || pos >= etime)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// calculate the number of samples in the buffer
|
|
|
|
int sz = GST_BUFFER_SIZE(buf) / sizeof(sampletype);
|
|
|
|
// number of frames is the number of samples in each channel (frames like in the alsa sense)
|
|
|
|
int frames = sz / channels;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// find the offset into the buffer to the sample closest to where the audio device is playing
|
|
|
|
// it is the (time into the buffer cooresponding to the audio device pos) / (the sample rate)
|
|
|
|
// sample rate = duration of the buffer / number of frames in the buffer
|
|
|
|
// then we multiply by the number of channels to find the offset of the left channel sample
|
|
|
|
// of the frame in the buffer
|
|
|
|
int off = channels * (pos - stime) / (dur / frames);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// note that we are assuming 32 bit samples, but this should probably be generalized...
|
|
|
|
sampletype* data = reinterpret_cast<sampletype *>(GST_BUFFER_DATA(buf));
|
|
|
|
if (off >= sz) // better be...
|
|
|
|
return;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
int i = off; // starting at offset
|
|
|
|
|
|
|
|
// loop while we fill the current buffer. If we need another buffer and one is available,
|
|
|
|
// get it and keep filling. If there are no more buffers available (not too likely)
|
|
|
|
// then leave everything in this state and wait until the next time the scope updates
|
2010-04-12 01:24:03 +02:00
|
|
|
while (buf && current_sample_ < kScopeSize && i < sz) {
|
|
|
|
for (int j = 0; j < channels && current_sample_ < kScopeSize; j++) {
|
2010-04-07 00:58:41 +02:00
|
|
|
current_scope_[current_sample_ ++] = data[i + j];
|
|
|
|
}
|
|
|
|
i+=channels; // advance to the next frame
|
|
|
|
|
2010-04-09 15:01:20 +02:00
|
|
|
if (i >= sz - 1) {
|
2010-04-07 00:58:41 +02:00
|
|
|
// here we are out of samples in the current buffer, so we get another one
|
|
|
|
buf = reinterpret_cast<GstBuffer *>( g_queue_pop_head(delayq_) );
|
|
|
|
gst_buffer_unref(buf);
|
|
|
|
buf = reinterpret_cast<GstBuffer *>( g_queue_peek_head(delayq_) );
|
|
|
|
if (buf) {
|
|
|
|
stime = GST_BUFFER_TIMESTAMP(buf);
|
|
|
|
dur = GST_BUFFER_DURATION(buf);
|
|
|
|
etime = stime + dur;
|
|
|
|
i = 0;
|
|
|
|
sz = GST_BUFFER_SIZE(buf) / sizeof(sampletype);
|
|
|
|
data = reinterpret_cast<sampletype *>(GST_BUFFER_DATA(buf));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-21 15:55:30 +02:00
|
|
|
void GstEngine::StartPreloading(const QUrl& url) {
|
|
|
|
preload_pipeline_ = CreatePipeline(url);
|
|
|
|
if (!preload_pipeline_)
|
|
|
|
return;
|
|
|
|
|
|
|
|
preloaded_url_ = url;
|
|
|
|
preload_pipeline_->SetState(GST_STATE_PAUSED);
|
|
|
|
}
|
|
|
|
|
2010-04-12 03:59:21 +02:00
|
|
|
bool GstEngine::Load(const QUrl& url, Engine::TrackChangeType change) {
|
|
|
|
Engine::Base::Load(url, change);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 03:59:21 +02:00
|
|
|
const bool crossfade = current_pipeline_ &&
|
|
|
|
((crossfade_enabled_ && change == Engine::Manual) ||
|
|
|
|
(autocrossfade_enabled_ && change == Engine::Auto));
|
2010-04-12 01:03:39 +02:00
|
|
|
|
2010-04-21 15:55:30 +02:00
|
|
|
shared_ptr<GstEnginePipeline> pipeline;
|
|
|
|
if (preload_pipeline_ && preloaded_url_ == url) {
|
|
|
|
pipeline = preload_pipeline_;
|
|
|
|
} else {
|
|
|
|
pipeline = CreatePipeline(url);
|
|
|
|
if (!pipeline)
|
|
|
|
return false;
|
|
|
|
}
|
2010-04-07 00:58:41 +02:00
|
|
|
|
2010-04-12 03:59:21 +02:00
|
|
|
if (crossfade)
|
2010-04-12 01:03:39 +02:00
|
|
|
StartFadeout();
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
current_pipeline_ = pipeline;
|
2010-04-21 15:55:30 +02:00
|
|
|
preload_pipeline_.reset();
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
SetVolume(volume_);
|
|
|
|
SetEqualizerEnabled(equalizer_enabled_);
|
|
|
|
SetEqualizerParameters(equalizer_preamp_, equalizer_gains_);
|
2010-04-12 01:03:39 +02:00
|
|
|
|
|
|
|
// Maybe fade in this track
|
2010-04-12 03:59:21 +02:00
|
|
|
if (crossfade)
|
2010-04-12 01:03:39 +02:00
|
|
|
current_pipeline_->StartFader(fadeout_duration_, QTimeLine::Forward);
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
return true;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-12 01:03:39 +02:00
|
|
|
void GstEngine::StartFadeout() {
|
|
|
|
fadeout_pipeline_ = current_pipeline_;
|
|
|
|
disconnect(fadeout_pipeline_.get(), 0, 0, 0);
|
2010-04-12 03:59:21 +02:00
|
|
|
fadeout_pipeline_->set_forwards_buffers(false);
|
2010-04-12 02:20:52 +02:00
|
|
|
ClearScopeBuffers();
|
2010-04-12 01:03:39 +02:00
|
|
|
|
|
|
|
fadeout_pipeline_->StartFader(fadeout_duration_, QTimeLine::Backward);
|
|
|
|
connect(fadeout_pipeline_.get(), SIGNAL(FaderFinished()), SLOT(FadeoutFinished()));
|
|
|
|
}
|
|
|
|
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
bool GstEngine::Play( uint offset ) {
|
2010-04-07 00:58:41 +02:00
|
|
|
// Try to play input pipeline; if fails, destroy input bin
|
2010-04-11 21:47:21 +02:00
|
|
|
if (!current_pipeline_->SetState(GST_STATE_PLAYING)) {
|
2010-04-07 00:58:41 +02:00
|
|
|
qWarning() << "Could not set thread to PLAYING.";
|
2010-04-11 21:47:21 +02:00
|
|
|
current_pipeline_.reset();
|
2010-04-07 00:58:41 +02:00
|
|
|
return false;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// If "Resume playback on start" is enabled, we must seek to the last position
|
2010-04-12 01:24:03 +02:00
|
|
|
if (offset) Seek(offset);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
current_sample_ = 0;
|
|
|
|
startTimer(kTimerInterval);
|
2010-04-12 01:24:03 +02:00
|
|
|
emit StateChanged(Engine::Playing);
|
2010-04-07 00:58:41 +02:00
|
|
|
return true;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
void GstEngine::Stop() {
|
|
|
|
url_ = QUrl(); // To ensure we return Empty from state()
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 02:21:41 +02:00
|
|
|
if (fadeout_enabled_ && current_pipeline_)
|
2010-04-12 01:03:39 +02:00
|
|
|
StartFadeout();
|
2010-04-11 23:40:26 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
current_pipeline_.reset();
|
2010-04-12 01:24:03 +02:00
|
|
|
emit StateChanged(Engine::Empty);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-11 23:40:26 +02:00
|
|
|
void GstEngine::FadeoutFinished() {
|
|
|
|
fadeout_pipeline_.reset();
|
|
|
|
}
|
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
void GstEngine::Pause() {
|
2010-04-11 21:47:21 +02:00
|
|
|
if (!current_pipeline_)
|
|
|
|
return;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
if ( current_pipeline_->state() == GST_STATE_PLAYING ) {
|
|
|
|
current_pipeline_->SetState(GST_STATE_PAUSED);
|
2010-04-12 01:24:03 +02:00
|
|
|
emit StateChanged(Engine::Paused);
|
2010-04-07 00:58:41 +02:00
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
void GstEngine::Unpause() {
|
2010-04-11 21:47:21 +02:00
|
|
|
if (!current_pipeline_)
|
|
|
|
return;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
if ( current_pipeline_->state() == GST_STATE_PAUSED ) {
|
|
|
|
current_pipeline_->SetState(GST_STATE_PLAYING);
|
2010-04-12 01:24:03 +02:00
|
|
|
emit StateChanged(Engine::Playing);
|
2010-04-07 00:58:41 +02:00
|
|
|
}
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
void GstEngine::Seek(uint ms) {
|
2010-04-11 21:47:21 +02:00
|
|
|
if (!current_pipeline_)
|
|
|
|
return;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
if (current_pipeline_->Seek(ms * GST_MSECOND))
|
2010-04-12 02:20:52 +02:00
|
|
|
ClearScopeBuffers();
|
2010-04-11 21:47:21 +02:00
|
|
|
else
|
|
|
|
qDebug() << "Seek failed";
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
void GstEngine::SetEqualizerEnabled(bool enabled) {
|
2010-04-07 00:58:41 +02:00
|
|
|
equalizer_enabled_= enabled;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
if (current_pipeline_)
|
|
|
|
current_pipeline_->SetEqualizerEnabled(enabled);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-12 22:33:59 +02:00
|
|
|
void GstEngine::SetEqualizerParameters(int preamp, const QList<int>& band_gains) {
|
2010-04-07 00:58:41 +02:00
|
|
|
equalizer_preamp_ = preamp;
|
2010-04-07 18:26:04 +02:00
|
|
|
equalizer_gains_ = band_gains;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
if (current_pipeline_)
|
|
|
|
current_pipeline_->SetEqualizerParams(preamp, band_gains);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-12 01:24:03 +02:00
|
|
|
void GstEngine::SetVolumeSW( uint percent ) {
|
2010-04-11 21:47:21 +02:00
|
|
|
if (current_pipeline_)
|
|
|
|
current_pipeline_->SetVolume(percent);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
void GstEngine::timerEvent( QTimerEvent* ) {
|
|
|
|
// keep the scope from building while we are not visible
|
|
|
|
// this is why the timer must run as long as we are playing, and not just when
|
|
|
|
// we are fading
|
|
|
|
PruneScope();
|
2010-04-12 01:52:16 +02:00
|
|
|
|
|
|
|
// Emit TrackAboutToEnd when we're a few seconds away from finishing
|
2010-04-21 15:55:30 +02:00
|
|
|
if (current_pipeline_) {
|
|
|
|
const qint64 nanosec_position = current_pipeline_->position();
|
|
|
|
const qint64 nanosec_length = current_pipeline_->length();
|
|
|
|
const qint64 remaining = (nanosec_length - nanosec_position) / 1000000;
|
2010-04-12 01:52:16 +02:00
|
|
|
const qint64 fudge = 100; // Mmm fudge
|
2010-04-21 15:55:30 +02:00
|
|
|
const qint64 gap = autocrossfade_enabled_ ? fadeout_duration_ : kPreloadGap;
|
2010-04-12 01:52:16 +02:00
|
|
|
|
2010-04-21 15:55:30 +02:00
|
|
|
if (nanosec_length > 0 && remaining < gap + fudge)
|
2010-04-12 01:52:16 +02:00
|
|
|
EmitAboutToEnd();
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
void GstEngine::HandlePipelineError(const QString& message) {
|
2010-04-12 02:20:52 +02:00
|
|
|
qWarning() << "Gstreamer error:" << message;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
current_pipeline_.reset();
|
2010-04-19 14:04:35 +02:00
|
|
|
emit Error(message);
|
|
|
|
emit StateChanged(Engine::Empty);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
void GstEngine::EndOfStreamReached() {
|
2010-04-11 21:47:21 +02:00
|
|
|
current_pipeline_.reset();
|
2010-04-12 01:24:03 +02:00
|
|
|
emit TrackEnded();
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
void GstEngine::NewMetaData(const Engine::SimpleMetaBundle& bundle) {
|
2010-04-12 01:24:03 +02:00
|
|
|
emit MetaData(bundle);
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
GstElement* GstEngine::CreateElement(
|
|
|
|
const QString& factoryName, GstElement* bin, const QString& name ) {
|
|
|
|
GstElement* element =
|
|
|
|
gst_element_factory_make(
|
|
|
|
factoryName.toAscii().constData(),
|
|
|
|
name.isNull() ? factoryName.toAscii().constData() : name.toAscii().constData() );
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
if ( element ) {
|
|
|
|
if ( bin ) gst_bin_add( GST_BIN( bin ), element );
|
|
|
|
} else {
|
2010-04-21 13:14:12 +02:00
|
|
|
emit Error(QString("GStreamer could not create the element: %1. "
|
|
|
|
"Please make sure that you have installed all necessary GStreamer plugins (e.g. OGG and MP3)").arg( factoryName ) );
|
2010-04-07 00:58:41 +02:00
|
|
|
gst_object_unref( GST_OBJECT( bin ) );
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
return element;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2010-04-07 15:51:14 +02:00
|
|
|
GstEngine::PluginDetailsList
|
|
|
|
GstEngine::GetPluginList(const QString& classname) const {
|
|
|
|
PluginDetailsList ret;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
GstRegistry* registry = gst_registry_get_default();
|
2010-04-07 15:51:14 +02:00
|
|
|
GList* features =
|
|
|
|
gst_registry_get_feature_list(registry, GST_TYPE_ELEMENT_FACTORY);
|
|
|
|
|
|
|
|
while (features) {
|
|
|
|
GstElementFactory* factory = GST_ELEMENT_FACTORY(features->data);
|
|
|
|
if (QString(factory->details.klass).contains(classname)) {
|
|
|
|
PluginDetails details;
|
|
|
|
details.name = QString::fromUtf8(GST_PLUGIN_FEATURE_NAME(features->data));
|
|
|
|
details.long_name = QString::fromUtf8(factory->details.longname);
|
|
|
|
details.description = QString::fromUtf8(factory->details.description);
|
|
|
|
details.author = QString::fromUtf8(factory->details.author);
|
|
|
|
ret << details;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
2010-04-07 00:58:41 +02:00
|
|
|
features = g_list_next ( features );
|
|
|
|
}
|
2010-04-07 15:51:14 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
gst_plugin_feature_list_free(features);
|
2010-04-07 15:51:14 +02:00
|
|
|
return ret;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
shared_ptr<GstEnginePipeline> GstEngine::CreatePipeline(const QUrl& url) {
|
2010-04-21 13:14:12 +02:00
|
|
|
shared_ptr<GstEnginePipeline> ret(new GstEnginePipeline(this));
|
2010-04-11 21:47:21 +02:00
|
|
|
ret->set_forwards_buffers(true);
|
|
|
|
ret->set_output_device(sink_, device_);
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
connect(ret.get(), SIGNAL(EndOfStreamReached()), SLOT(EndOfStreamReached()));
|
2010-04-12 02:20:52 +02:00
|
|
|
connect(ret.get(), SIGNAL(BufferFound(GstBuffer*)), SLOT(AddBufferToScope(GstBuffer*)));
|
2010-04-11 21:47:21 +02:00
|
|
|
connect(ret.get(), SIGNAL(Error(QString)), SLOT(HandlePipelineError(QString)));
|
|
|
|
connect(ret.get(), SIGNAL(MetadataFound(Engine::SimpleMetaBundle)),
|
|
|
|
SLOT(NewMetaData(Engine::SimpleMetaBundle)));
|
2010-04-12 02:20:52 +02:00
|
|
|
connect(ret.get(), SIGNAL(destroyed()), SLOT(ClearScopeBuffers()));
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
if (!ret->Init(url))
|
|
|
|
ret.reset();
|
2010-04-07 00:58:41 +02:00
|
|
|
|
2010-04-11 21:47:21 +02:00
|
|
|
return ret;
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
qint64 GstEngine::PruneScope() {
|
2010-04-11 21:47:21 +02:00
|
|
|
if (!current_pipeline_)
|
|
|
|
return 0;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// get the position playing in the audio device
|
2010-04-12 02:20:52 +02:00
|
|
|
qint64 pos = current_pipeline_->position();
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
GstBuffer *buf = 0;
|
|
|
|
quint64 etime;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// free up the buffers that the audio device has advanced past already
|
|
|
|
do {
|
|
|
|
// most delayed buffers are at the head of the queue
|
|
|
|
buf = reinterpret_cast<GstBuffer *>( g_queue_peek_head(delayq_) );
|
|
|
|
if (buf) {
|
|
|
|
// the start time of the buffer
|
|
|
|
quint64 stime = GST_BUFFER_TIMESTAMP(buf);
|
|
|
|
// the duration of the buffer
|
|
|
|
quint64 dur = GST_BUFFER_DURATION(buf);
|
|
|
|
// therefore we can calculate the end time of the buffer
|
|
|
|
etime = stime + dur;
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
// purge this buffer if the pos is past the end time of the buffer
|
|
|
|
if (pos > qint64(etime)) {
|
|
|
|
g_queue_pop_head(delayq_);
|
|
|
|
gst_buffer_unref(buf);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} while (buf && pos > qint64(etime));
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-07 00:58:41 +02:00
|
|
|
return pos;
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
|
2010-04-12 02:20:52 +02:00
|
|
|
void GstEngine::ClearScopeBuffers() {
|
2010-04-07 00:58:41 +02:00
|
|
|
// just free them all
|
|
|
|
while (g_queue_get_length(delayq_)) {
|
|
|
|
GstBuffer* buf = reinterpret_cast<GstBuffer *>( g_queue_pop_head(delayq_) );
|
|
|
|
gst_buffer_unref(buf);
|
|
|
|
}
|
2010-04-06 18:57:02 +02:00
|
|
|
}
|
2010-04-08 22:14:11 +02:00
|
|
|
|
2010-04-08 22:17:57 +02:00
|
|
|
bool GstEngine::DoesThisSinkSupportChangingTheOutputDeviceToAUserEditableString(const QString &name) {
|
2010-04-08 22:14:11 +02:00
|
|
|
return (name == "alsasink" || name == "osssink" || name == "pulsesink");
|
|
|
|
}
|