Use the segment start time when calculating the timestamp of data buffers in the scope. Fixes a memory leak and makes the scope work again when playing ogg streams. Fixes issue #495.

This commit is contained in:
David Sansome 2010-07-11 13:31:03 +00:00
parent 0c123d58e8
commit b62263540f
3 changed files with 25 additions and 4 deletions

View File

@ -308,7 +308,8 @@ void GstEngine::UpdateScope() {
typedef int16_t sampletype;
// prune the scope and get the current pos of the audio device
quint64 pos = PruneScope();
const quint64 pos = PruneScope();
const quint64 segment_start = current_pipeline_->segment_start();
// head of the delay queue is the most delayed, so we work with that one
GstBuffer *buf = reinterpret_cast<GstBuffer *>( g_queue_peek_head(delayq_) );
@ -316,7 +317,7 @@ void GstEngine::UpdateScope() {
return;
// start time for this buffer
quint64 stime = GST_BUFFER_TIMESTAMP(buf);
quint64 stime = GST_BUFFER_TIMESTAMP(buf) - segment_start;
// duration of the buffer...
quint64 dur = GST_BUFFER_DURATION(buf);
// therefore we can calculate the end time for the buffer
@ -716,7 +717,8 @@ qint64 GstEngine::PruneScope() {
return 0;
// get the position playing in the audio device
qint64 pos = current_pipeline_->position();
const qint64 pos = current_pipeline_->position();
const qint64 segment_start = current_pipeline_->segment_start();
GstBuffer *buf = 0;
quint64 etime = 0;
@ -727,7 +729,7 @@ qint64 GstEngine::PruneScope() {
buf = reinterpret_cast<GstBuffer *>( g_queue_peek_head(delayq_) );
if (buf) {
// the start time of the buffer
quint64 stime = GST_BUFFER_TIMESTAMP(buf);
quint64 stime = GST_BUFFER_TIMESTAMP(buf) - segment_start;
// the duration of the buffer
quint64 dur = GST_BUFFER_DURATION(buf);
// therefore we can calculate the end time of the buffer

View File

@ -29,6 +29,7 @@ GstEnginePipeline::GstEnginePipeline(GstEngine* engine)
engine_(engine),
valid_(false),
sink_(GstEngine::kAutoSink),
segment_start_(0),
rg_enabled_(false),
rg_mode_(0),
rg_preamp_(0.0),
@ -145,6 +146,7 @@ bool GstEnginePipeline::Init(const QUrl &url) {
// so that our visualization are not affected by them
pad = gst_element_get_pad(scope_element, "src");
gst_pad_add_buffer_probe(pad, G_CALLBACK(HandoffCallback), this);
gst_pad_add_event_probe(pad, G_CALLBACK(EventHandoffCallback), this);
gst_object_unref (pad);
// Ensure we get the right type out of audioconvert for our scope
@ -321,6 +323,20 @@ bool GstEnginePipeline::HandoffCallback(GstPad*, GstBuffer* buf, gpointer self)
return true;
}
bool GstEnginePipeline::EventHandoffCallback(GstPad*, GstEvent* e, gpointer self) {
GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);
if (GST_EVENT_TYPE(e) == GST_EVENT_NEWSEGMENT) {
// The segment start time is used to calculate the proper offset of data
// buffers from the start of the stream
gint64 start = 0;
gst_event_parse_new_segment(e, NULL, NULL, NULL, &start, NULL, NULL);
instance->segment_start_ = start;
}
return true;
}
void GstEnginePipeline::SourceDrainedCallback(GstURIDecodeBin* bin, gpointer self) {
GstEnginePipeline* instance = reinterpret_cast<GstEnginePipeline*>(self);

View File

@ -72,6 +72,7 @@ class GstEnginePipeline : public QObject {
qint64 position() const;
qint64 length() const;
GstState state() const;
qint64 segment_start() const { return segment_start_; }
QUrl redirect_url() const { return redirect_url_; }
@ -94,6 +95,7 @@ class GstEnginePipeline : public QObject {
static gboolean BusCallback(GstBus*, GstMessage*, gpointer);
static void NewPadCallback(GstElement*, GstPad*, gpointer);
static bool HandoffCallback(GstPad*, GstBuffer*, gpointer);
static bool EventHandoffCallback(GstPad*, GstEvent*, gpointer);
static void SourceDrainedCallback(GstURIDecodeBin*, gpointer);
static bool StopUriDecodeBin(gpointer bin);
void TagMessageReceived(GstMessage*);
@ -121,6 +123,7 @@ class GstEnginePipeline : public QObject {
// These get called when there is a new audio buffer available
QList<BufferConsumer*> buffer_consumers_;
QMutex buffer_consumers_mutex_;
qint64 segment_start_;
// ReplayGain
bool rg_enabled_;