From 8ecdb3fdc1a35a2a4f04883d4ba0dd80fe5a3c14 Mon Sep 17 00:00:00 2001 From: f4exb Date: Mon, 5 Apr 2021 13:13:57 +0200 Subject: [PATCH] DATV demod: player cleanup (4) --- plugins/channelrx/demoddatv/datvdemodgui.cpp | 4 +- .../channelrx/demoddatv/datvideorender.cpp | 151 +++++++++--------- plugins/channelrx/demoddatv/datvideorender.h | 31 ++-- .../channelrx/demoddatv/datvideostream.cpp | 143 +++++++++-------- plugins/channelrx/demoddatv/datvideostream.h | 31 ++-- 5 files changed, 178 insertions(+), 182 deletions(-) diff --git a/plugins/channelrx/demoddatv/datvdemodgui.cpp b/plugins/channelrx/demoddatv/datvdemodgui.cpp index e6670bbf4..e9ff3eb1a 100644 --- a/plugins/channelrx/demoddatv/datvdemodgui.cpp +++ b/plugins/channelrx/demoddatv/datvdemodgui.cpp @@ -219,7 +219,7 @@ DATVDemodGUI::DATVDemodGUI(PluginAPI* objPluginAPI, DeviceUISet *deviceUISet, Ba m_objDATVDemod->setCNRMeter(ui->cnrMeter); m_objDATVDemod->SetVideoRender(ui->screenTV_2); - connect(m_objDATVDemod->getVideoStream(), &DATVideostream::onDataPackets, this, &DATVDemodGUI::on_StreamDataAvailable); + connect(m_objDATVDemod->getVideoStream(), &DATVideostream::fifoData, this, &DATVDemodGUI::on_StreamDataAvailable); connect(ui->screenTV_2, &DATVideoRender::onMetaDataChanged, this, &DATVDemodGUI::on_StreamMetaDataChanged); m_intPreviousDecodedData=0; @@ -704,7 +704,7 @@ void DATVDemodGUI::on_chkAllowDrift_clicked() void DATVDemodGUI::on_fullScreen_clicked() { - ui->screenTV_2->SetFullScreen(true); + ui->screenTV_2->setFullScreen(true); } void DATVDemodGUI::on_mouseEvent(QMouseEvent* obj) diff --git a/plugins/channelrx/demoddatv/datvideorender.cpp b/plugins/channelrx/demoddatv/datvideorender.cpp index 077e138be..664060f72 100644 --- a/plugins/channelrx/demoddatv/datvideorender.cpp +++ b/plugins/channelrx/demoddatv/datvideorender.cpp @@ -75,7 +75,7 @@ bool DATVideoRender::eventFilter(QObject *obj, QEvent *event) { if (event->type() == QEvent::MouseButtonRelease) { - SetFullScreen(false); + setFullScreen(false); return true; } else @@ -85,7 +85,7 @@ bool DATVideoRender::eventFilter(QObject *obj, QEvent *event) } } -void DATVideoRender::SetFullScreen(bool fullScreen) +void DATVideoRender::setFullScreen(bool fullScreen) { if (m_isFullScreen == fullScreen) { @@ -94,7 +94,7 @@ void DATVideoRender::SetFullScreen(bool fullScreen) if (fullScreen == true) { - qDebug("DATVideoRender::SetFullScreen: go to fullscreen"); + qDebug("DATVideoRender::setFullScreen: go to fullscreen"); // m_originalWindowFlags = this->windowFlags(); // m_originalSize = this->size(); // m_parentWidget->layout()->removeWidget(this); @@ -108,7 +108,7 @@ void DATVideoRender::SetFullScreen(bool fullScreen) } else { - qDebug("DATVideoRender::SetFullScreen: come back from fullscreen"); + qDebug("DATVideoRender::setFullScreen: come back from fullscreen"); // //this->setParent(m_parentWidget); // this->resize(m_originalSize); // this->overrideWindowFlags(m_originalWindowFlags); @@ -122,16 +122,16 @@ void DATVideoRender::SetFullScreen(bool fullScreen) } } -static int ReadFunction(void *opaque, uint8_t *buf, int buf_size) +int DATVideoRender::ReadFunction(void *opaque, uint8_t *buf, int buf_size) { - QIODevice *stream = reinterpret_cast(opaque); + DATVideostream *stream = reinterpret_cast(opaque); int nbBytes = stream->read((char *)buf, buf_size); return nbBytes; } -static int64_t SeekFunction(void *opaque, int64_t offset, int whence) +int64_t DATVideoRender::SeekFunction(void *opaque, int64_t offset, int whence) { - QIODevice *stream = reinterpret_cast(opaque); + DATVideostream *stream = reinterpret_cast(opaque); if (whence == AVSEEK_SIZE) { @@ -151,13 +151,13 @@ static int64_t SeekFunction(void *opaque, int64_t offset, int whence) return stream->pos(); } -void DATVideoRender::ResetMetaData() +void DATVideoRender::resetMetaData() { m_metaData.reset(); emit onMetaDataChanged(new DataTSMetaData2(m_metaData)); } -bool DATVideoRender::PreprocessStream() +bool DATVideoRender::preprocessStream() { AVDictionary *opts = nullptr; AVCodec *videoCodec = nullptr; @@ -166,15 +166,14 @@ bool DATVideoRender::PreprocessStream() int intRet = -1; char *buffer = nullptr; - ResetMetaData(); + resetMetaData(); //Identify stream if (avformat_find_stream_info(m_formatCtx, nullptr) < 0) { avformat_close_input(&m_formatCtx); - m_formatCtx = nullptr; - qDebug() << "DATVideoRender::PreprocessStream cannot find stream info"; + qDebug() << "DATVideoRender::preprocessStream cannot find stream info"; return false; } @@ -184,8 +183,7 @@ bool DATVideoRender::PreprocessStream() if (intRet < 0) { avformat_close_input(&m_formatCtx); - m_formatCtx = nullptr; - qDebug() << "DATVideoRender::PreprocessStream cannot find video stream"; + qDebug() << "DATVideoRender::preprocessStream cannot find video stream"; return false; } @@ -195,7 +193,7 @@ bool DATVideoRender::PreprocessStream() intRet = av_find_best_stream(m_formatCtx, AVMEDIA_TYPE_AUDIO, -1, -1, nullptr, 0); if (intRet < 0) { - qDebug() << "DATVideoRender::PreprocessStream cannot find audio stream"; + qDebug() << "DATVideoRender::preprocessStream cannot find audio stream"; } m_audioStreamIndex = intRet; @@ -211,8 +209,6 @@ bool DATVideoRender::PreprocessStream() m_videoDecoderCtx = avcodec_alloc_context3(nullptr); avcodec_parameters_to_context(m_videoDecoderCtx, parms); - // m_videoDecoderCtx = m_formatCtx->streams[m_videoStreamIndex]->codec; // old style - //Meta Data m_metaData.PID = m_formatCtx->streams[m_videoStreamIndex]->id; @@ -249,13 +245,12 @@ bool DATVideoRender::PreprocessStream() if (videoCodec == nullptr) { avformat_close_input(&m_formatCtx); - m_formatCtx = nullptr; - qDebug() << "DATVideoRender::PreprocessStream cannot find associated video CODEC"; + qDebug() << "DATVideoRender::preprocessStream cannot find associated video CODEC"; return false; } else { - qDebug() << "DATVideoRender::PreprocessStream: video CODEC found: " << videoCodec->name; + qDebug() << "DATVideoRender::preprocessStream: video CODEC found: " << videoCodec->name; } av_dict_set(&opts, "refcounted_frames", "1", 0); @@ -263,8 +258,7 @@ bool DATVideoRender::PreprocessStream() if (avcodec_open2(m_videoDecoderCtx, videoCodec, &opts) < 0) { avformat_close_input(&m_formatCtx); - m_formatCtx = nullptr; - qDebug() << "DATVideoRender::PreprocessStream cannot open associated video CODEC"; + qDebug() << "DATVideoRender::preprocessStream cannot open associated video CODEC"; return false; } @@ -274,8 +268,7 @@ bool DATVideoRender::PreprocessStream() if (!m_frame) { avformat_close_input(&m_formatCtx); - m_formatCtx = nullptr; - qDebug() << "DATVideoRender::PreprocessStream cannot allocate frame"; + qDebug() << "DATVideoRender::preprocessStream cannot allocate frame"; return false; } @@ -289,7 +282,7 @@ bool DATVideoRender::PreprocessStream() QString metaStr; m_metaData.formatString(metaStr); - qDebug() << "DATVideoRender::PreprocessStream: video: " << metaStr; + qDebug() << "DATVideoRender::preprocessStream: video: " << metaStr; emit onMetaDataChanged(new DataTSMetaData2(m_metaData)); @@ -308,7 +301,7 @@ bool DATVideoRender::PreprocessStream() //m_audioDecoderCtx = m_formatCtx->streams[m_audioStreamIndex]->codec; // old style - qDebug() << "DATVideoRender::PreprocessStream: audio: " + qDebug() << "DATVideoRender::preprocessStream: audio: " << " channels: " << m_audioDecoderCtx->channels << " channel_layout: " << m_audioDecoderCtx->channel_layout << " sample_rate: " << m_audioDecoderCtx->sample_rate @@ -319,16 +312,16 @@ bool DATVideoRender::PreprocessStream() if (audioCodec == nullptr) { - qDebug() << "DATVideoRender::PreprocessStream cannot find associated audio CODEC"; + qDebug() << "DATVideoRender::preprocessStream cannot find associated audio CODEC"; m_audioStreamIndex = -1; // invalidate audio } else { - qDebug() << "DATVideoRender::PreprocessStream: audio CODEC found: " << audioCodec->name; + qDebug() << "DATVideoRender::preprocessStream: audio CODEC found: " << audioCodec->name; if (avcodec_open2(m_audioDecoderCtx, audioCodec, nullptr) < 0) { - qDebug() << "DATVideoRender::PreprocessStream cannot open associated audio CODEC"; + qDebug() << "DATVideoRender::preprocessStream cannot open associated audio CODEC"; m_audioStreamIndex = -1; // invalidate audio } else @@ -341,7 +334,7 @@ bool DATVideoRender::PreprocessStream() return true; } -bool DATVideoRender::OpenStream(DATVideostream *device) +bool DATVideoRender::openStream(DATVideostream *device) { int ioBufferSize = DATVideostream::m_defaultMemoryLimit; unsigned char *ptrIOBuffer = nullptr; @@ -349,13 +342,13 @@ bool DATVideoRender::OpenStream(DATVideostream *device) if (device == nullptr) { - qDebug() << "DATVideoRender::OpenStream QIODevice is nullptr"; + qDebug() << "DATVideoRender::openStream QIODevice is nullptr"; return false; } if (m_isOpen) { - qDebug() << "DATVideoRender::OpenStream already open"; + qDebug() << "DATVideoRender::openStream already open"; return false; } @@ -363,7 +356,7 @@ bool DATVideoRender::OpenStream(DATVideostream *device) if (device->bytesAvailable() <= 0) { - qDebug() << "DATVideoRender::OpenStream no data available"; + qDebug() << "DATVideoRender::openStream no data available"; m_metaData.OK_Data = false; emit onMetaDataChanged(new DataTSMetaData2(m_metaData)); return false; @@ -374,7 +367,7 @@ bool DATVideoRender::OpenStream(DATVideostream *device) if (!device->open(QIODevice::ReadOnly)) { - qDebug() << "DATVideoRender::OpenStream cannot open QIODevice"; + qDebug() << "DATVideoRender::openStream cannot open QIODevice"; return false; } @@ -384,7 +377,7 @@ bool DATVideoRender::OpenStream(DATVideostream *device) if (m_formatCtx == nullptr) { - qDebug() << "DATVideoRender::OpenStream cannot alloc format FFMPEG context"; + qDebug() << "DATVideoRender::openStream cannot alloc format FFMPEG context"; return false; } @@ -395,9 +388,9 @@ bool DATVideoRender::OpenStream(DATVideostream *device) ioBufferSize, 0, reinterpret_cast(device), - &ReadFunction, + &DATVideoRender::ReadFunction, nullptr, - &SeekFunction + &DATVideoRender::SeekFunction ); m_formatCtx->pb = ioCtx; @@ -405,22 +398,22 @@ bool DATVideoRender::OpenStream(DATVideostream *device) if (avformat_open_input(&m_formatCtx, nullptr, nullptr, nullptr) < 0) { - qDebug() << "DATVideoRender::OpenStream cannot open stream"; + qDebug() << "DATVideoRender::openStream cannot open stream"; return false; } - if (!PreprocessStream()) + if (!preprocessStream()) { return false; } - qDebug("DATVideoRender::OpenStream: successful"); + qDebug("DATVideoRender::openStream: successful"); m_isOpen = true; return true; } -bool DATVideoRender::RenderStream() +bool DATVideoRender::renderStream() { AVPacket packet; int gotFrame; @@ -428,17 +421,24 @@ bool DATVideoRender::RenderStream() if (!m_isOpen) { - qDebug() << "DATVideoRender::RenderStream Stream not open"; + qDebug() << "DATVideoRender::renderStream Stream not open"; return false; } //********** Rendering ********** if (av_read_frame(m_formatCtx, &packet) < 0) { - qDebug() << "DATVideoRender::RenderStream reading packet error"; + qDebug() << "DATVideoRender::renderStream reading packet error"; return false; } + if (packet.size == 0) + { + qDebug() << "DATVideoRender::renderStream packet empty"; + av_packet_unref(&packet); + return true; + } + //Video channel if ((packet.stream_index == m_videoStreamIndex) && (!m_videoMute)) { @@ -447,7 +447,7 @@ bool DATVideoRender::RenderStream() gotFrame = 0; - if (new_decode(m_videoDecoderCtx, m_frame, &gotFrame, &packet) >= 0) + if (newDecode(m_videoDecoderCtx, m_frame, &gotFrame, &packet) >= 0) { m_videoDecodeOK = true; @@ -485,22 +485,24 @@ bool DATVideoRender::RenderStream() if (sws_init_context(m_swsCtx, nullptr, nullptr) < 0) { - qDebug() << "DATVideoRender::RenderStream cannont init video data converter"; + qDebug() << "DATVideoRender::renderStream cannont init video data converter"; m_swsCtx = nullptr; + av_packet_unref(&packet); return false; } if ((m_currentRenderHeight > 0) && (m_currentRenderWidth > 0)) { - //av_freep(&m_pbytDecodedData[0]); - //av_freep(&m_pintDecodedLineSize[0]); + //av_freep(&m_decodedData[0]); + //av_freep(&m_decodedLineSize[0]); } - if (av_image_alloc(m_pbytDecodedData, m_pintDecodedLineSize, m_frame->width, m_frame->height, AV_PIX_FMT_RGB24, 1) < 0) + if (av_image_alloc(m_decodedData, m_decodedLineSize, m_frame->width, m_frame->height, AV_PIX_FMT_RGB24, 1) < 0) { - qDebug() << "DATVideoRender::RenderStream cannont init video image buffer"; + qDebug() << "DATVideoRender::renderStream cannont init video image buffer"; sws_freeContext(m_swsCtx); m_swsCtx = nullptr; + av_packet_unref(&packet); return false; } @@ -521,13 +523,14 @@ bool DATVideoRender::RenderStream() //Frame rendering - if (sws_scale(m_swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, m_pbytDecodedData, m_pintDecodedLineSize) < 0) + if (sws_scale(m_swsCtx, m_frame->data, m_frame->linesize, 0, m_frame->height, m_decodedData, m_decodedLineSize) < 0) { - qDebug() << "DATVideoRender::RenderStream error converting video frame to RGB"; + qDebug() << "DATVideoRender::renderStream error converting video frame to RGB"; + av_packet_unref(&packet); return false; } - renderImage(m_pbytDecodedData[0]); + renderImage(m_decodedData[0]); av_frame_unref(m_frame); m_frameCount++; } @@ -535,7 +538,7 @@ bool DATVideoRender::RenderStream() else { m_videoDecodeOK = false; - // qDebug() << "DATVideoRender::RenderStream video decode error"; + // qDebug() << "DATVideoRender::renderStream video decode error"; } } // Audio channel @@ -545,7 +548,7 @@ bool DATVideoRender::RenderStream() av_frame_unref(m_frame); gotFrame = 0; - if (new_decode(m_audioDecoderCtx, m_frame, &gotFrame, &packet) >= 0) + if (newDecode(m_audioDecoderCtx, m_frame, &gotFrame, &packet) >= 0) { m_audioDecodeOK = true; @@ -555,7 +558,7 @@ bool DATVideoRender::RenderStream() av_samples_alloc((uint8_t**) &audioBuffer, nullptr, 2, m_frame->nb_samples, AV_SAMPLE_FMT_S16, 0); int samples_per_channel = swr_convert(m_audioSWR, (uint8_t**) &audioBuffer, m_frame->nb_samples, (const uint8_t**) m_frame->data, m_frame->nb_samples); if (samples_per_channel < m_frame->nb_samples) { - qDebug("DATVideoRender::RenderStream: converted samples missing %d/%d returned", samples_per_channel, m_frame->nb_samples); + qDebug("DATVideoRender::renderStream: converted samples missing %d/%d returned", samples_per_channel, m_frame->nb_samples); } // direct writing: @@ -564,7 +567,7 @@ bool DATVideoRender::RenderStream() }); int ret = m_audioFifo->write((const quint8*) &audioBuffer[0], samples_per_channel); if (ret < samples_per_channel) { - // qDebug("DATVideoRender::RenderStream: audio samples missing %d/%d written", ret, samples_per_channel); + // qDebug("DATVideoRender::renderStream: audio samples missing %d/%d written", ret, samples_per_channel); } // buffered writing: @@ -582,7 +585,7 @@ bool DATVideoRender::RenderStream() // }); // int ret = m_audioFifo->write((const quint8*) &m_audioFifoBuffer[0], m_audioFifoBufferSize); // if (ret < m_audioFifoBufferSize) { - // qDebug("DATVideoRender::RenderStream: audio samples missing %d/%d written", ret, m_audioFifoBufferSize); + // qDebug("DATVideoRender::renderStream: audio samples missing %d/%d written", ret, m_audioFifoBufferSize); // } // std::copy(&audioBuffer[2*remainder], &audioBuffer[2*samples_per_channel], &m_audioFifoBuffer[0]); // m_audioFifoBufferIndex = samples_per_channel - remainder; @@ -594,7 +597,7 @@ bool DATVideoRender::RenderStream() else { m_audioDecodeOK = false; - // qDebug("DATVideoRender::RenderStream: audio decode error"); + // qDebug("DATVideoRender::renderStream: audio decode error"); } } @@ -639,48 +642,40 @@ void DATVideoRender::setResampler() << " out_sample_fmt: " << AV_SAMPLE_FMT_S16; } -bool DATVideoRender::CloseStream(QIODevice *device) +bool DATVideoRender::closeStream(QIODevice *device) { - qDebug("DATVideoRender::CloseStream"); + qDebug("DATVideoRender::closeStream"); if (!device) { - qDebug() << "DATVideoRender::CloseStream QIODevice is nullptr"; + qDebug() << "DATVideoRender::closeStream QIODevice is nullptr"; return false; } if (!m_isOpen) { - qDebug() << "DATVideoRender::CloseStream Stream not open"; + qDebug() << "DATVideoRender::closeStream Stream not open"; return false; } if (!m_formatCtx) { - qDebug() << "DATVideoRender::CloseStream FFMEG Context is not initialized"; + qDebug() << "DATVideoRender::closeStream FFMEG Context is not initialized"; return false; } avformat_close_input(&m_formatCtx); - m_formatCtx=nullptr; - if (m_videoDecoderCtx) - { - avcodec_close(m_videoDecoderCtx); - m_videoDecoderCtx = nullptr; + if (m_videoDecoderCtx) { + avcodec_free_context(&m_videoDecoderCtx); } - if (m_audioDecoderCtx) - { + if (m_audioDecoderCtx) { avcodec_free_context(&m_audioDecoderCtx); - avcodec_close(m_audioDecoderCtx); - m_audioDecoderCtx = nullptr; } - if (m_audioSWR) - { + if (m_audioSWR) { swr_free(&m_audioSWR); - m_audioSWR = nullptr; } if (m_frame) @@ -700,7 +695,7 @@ bool DATVideoRender::CloseStream(QIODevice *device) m_currentRenderWidth = -1; m_currentRenderHeight = -1; - ResetMetaData(); + resetMetaData(); return true; } @@ -709,7 +704,7 @@ bool DATVideoRender::CloseStream(QIODevice *device) * Replacement of deprecated avcodec_decode_video2 with the same signature * https://blogs.gentoo.org/lu_zero/2016/03/29/new-avcodec-api/ */ -int DATVideoRender::new_decode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt) +int DATVideoRender::newDecode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt) { int ret; diff --git a/plugins/channelrx/demoddatv/datvideorender.h b/plugins/channelrx/demoddatv/datvideorender.h index 2446c4057..84e797bce 100644 --- a/plugins/channelrx/demoddatv/datvideorender.h +++ b/plugins/channelrx/demoddatv/datvideorender.h @@ -110,12 +110,12 @@ class DATVideoRender : public TVScreen explicit DATVideoRender(QWidget *parent); ~DATVideoRender(); - void SetFullScreen(bool blnFullScreen); + void setFullScreen(bool blnFullScreen); void setAudioFIFO(AudioFifo *fifo) { m_audioFifo = fifo; } - bool OpenStream(DATVideostream *objDevice); - bool RenderStream(); - bool CloseStream(QIODevice *objDevice); + bool openStream(DATVideostream *objDevice); + bool renderStream(); + bool closeStream(QIODevice *objDevice); int getVideoStreamIndex() const { return m_videoStreamIndex; } int getAudioStreamIndex() const { return m_audioStreamIndex; } @@ -128,16 +128,12 @@ class DATVideoRender : public TVScreen bool getVideoDecodeOK() const { return m_videoDecodeOK; } private: - struct DataTSMetaData2 m_metaData; QWidget *m_parentWidget; Qt::WindowFlags m_originalWindowFlags; QSize m_originalSize; - bool m_isFullScreen; - bool m_isOpen; - SwsContext *m_swsCtx; AVFormatContext *m_formatCtx; AVCodecContext *m_videoDecoderCtx; @@ -153,8 +149,8 @@ class DATVideoRender : public TVScreen bool m_videoMute; float m_audioVolume; - uint8_t *m_pbytDecodedData[4]; - int m_pintDecodedLineSize[4]; + uint8_t *m_decodedData[4]; + int m_decodedLineSize[4]; int m_frameCount; int m_videoStreamIndex; @@ -166,10 +162,13 @@ class DATVideoRender : public TVScreen bool m_audioDecodeOK; bool m_videoDecodeOK; - bool PreprocessStream(); - void ResetMetaData(); + static int ReadFunction(void *opaque, uint8_t *buf, int buf_size); + static int64_t SeekFunction(void *opaque, int64_t offset, int whence); - int new_decode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt); + bool preprocessStream(); + void resetMetaData(); + + int newDecode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt); void setResampler(); protected: @@ -214,7 +213,7 @@ class DATVideoRenderThread : public QThread return; } - m_renderingVideo = m_renderer->OpenStream(m_stream); + m_renderingVideo = m_renderer->openStream(m_stream); if (!m_renderingVideo) { return; @@ -222,12 +221,12 @@ class DATVideoRenderThread : public QThread while ((m_renderingVideo == true) && (m_renderer)) { - if (!m_renderer->RenderStream()) { + if (!m_renderer->renderStream()) { break; } } - m_renderer->CloseStream(m_stream); + m_renderer->closeStream(m_stream); m_renderingVideo = false; } diff --git a/plugins/channelrx/demoddatv/datvideostream.cpp b/plugins/channelrx/demoddatv/datvideostream.cpp index 4eba45b4a..de268e945 100644 --- a/plugins/channelrx/demoddatv/datvideostream.cpp +++ b/plugins/channelrx/demoddatv/datvideostream.cpp @@ -20,51 +20,51 @@ #include DATVideostream::DATVideostream(): - m_objMutex(QMutex::NonRecursive) + m_mutex(QMutex::NonRecursive) { cleanUp(); - m_intTotalReceived = 0; - m_intPacketReceived = 0; - m_intMemoryLimit = m_defaultMemoryLimit; + m_totalReceived = 0; + m_packetReceived = 0; + m_memoryLimit = m_defaultMemoryLimit; m_multiThreaded = false; m_threadTimeout = -1; - m_objeventLoop.connect(this,SIGNAL(onDataAvailable()), &m_objeventLoop, SLOT(quit()),Qt::QueuedConnection); + m_eventLoop.connect(this, SIGNAL(dataAvailable()), &m_eventLoop, SLOT(quit()), Qt::QueuedConnection); } DATVideostream::~DATVideostream() { - m_objeventLoop.disconnect(this,SIGNAL(onDataAvailable()), &m_objeventLoop, SLOT(quit())); + m_eventLoop.disconnect(this, SIGNAL(dataAvailable()), &m_eventLoop, SLOT(quit())); cleanUp(); } void DATVideostream::cleanUp() { - if (m_objFIFO.size() > 0) { - m_objFIFO.clear(); + if (m_fifo.size() > 0) { + m_fifo.clear(); } - if (m_objeventLoop.isRunning()) { - m_objeventLoop.exit(); + if (m_eventLoop.isRunning()) { + m_eventLoop.exit(); } - m_intBytesAvailable = 0; - m_intBytesWaiting = 0; - m_intPercentBuffer = 0; + m_bytesAvailable = 0; + m_bytesWaiting = 0; + m_percentBuffer = 0; } void DATVideostream::resetTotalReceived() { - m_intTotalReceived = 0; - emit onDataPackets(&m_intBytesWaiting, &m_intPercentBuffer, &m_intTotalReceived); + m_totalReceived = 0; + emit fifoData(&m_bytesWaiting, &m_percentBuffer, &m_totalReceived); } void DATVideostream::setMultiThreaded(bool multiThreaded) { if (multiThreaded) { - if (m_objeventLoop.isRunning()) { - m_objeventLoop.exit(); + if (m_eventLoop.isRunning()) { + m_eventLoop.exit(); } } @@ -77,34 +77,30 @@ int DATVideostream::pushData(const char * chrData, int intSize) return 0; } - m_objMutex.lock(); + m_mutex.lock(); - m_intPacketReceived++; - m_intBytesWaiting += intSize; + m_packetReceived++; + m_bytesWaiting += intSize; - if (m_intBytesWaiting > m_intMemoryLimit) { - m_intBytesWaiting -= m_objFIFO.dequeue().size(); + if (m_bytesWaiting > m_memoryLimit) { + m_bytesWaiting -= m_fifo.dequeue().size(); } - m_objFIFO.enqueue(QByteArray(chrData,intSize)); - m_intBytesAvailable = m_objFIFO.head().size(); - m_intTotalReceived += intSize; + m_fifo.enqueue(QByteArray(chrData,intSize)); + m_bytesAvailable = m_fifo.head().size(); + m_totalReceived += intSize; - m_objMutex.unlock(); + m_mutex.unlock(); - if (m_objeventLoop.isRunning()) { - emit onDataAvailable(); + if (m_eventLoop.isRunning()) { + emit dataAvailable(); } - if (m_intPacketReceived % m_minStackSize == 1) - { - m_intPercentBuffer = (100*m_intBytesWaiting)/m_intMemoryLimit; + m_percentBuffer = (100*m_bytesWaiting) / m_memoryLimit; + m_percentBuffer = m_percentBuffer > 100 ? 100 : m_percentBuffer; - if (m_intPercentBuffer > 100) { - m_intPercentBuffer = 100; - } - - emit onDataPackets(&m_intBytesWaiting, &m_intPercentBuffer, &m_intTotalReceived); + if (m_packetReceived % 10 == 1) { + emit fifoData(&m_bytesWaiting, &m_percentBuffer, &m_totalReceived); } return intSize; @@ -117,7 +113,7 @@ bool DATVideostream::isSequential() const qint64 DATVideostream::bytesAvailable() const { - return m_intBytesAvailable; + return m_bytesAvailable; } void DATVideostream::close() @@ -136,40 +132,38 @@ bool DATVideostream::open(OpenMode mode) qint64 DATVideostream::readData(char *data, qint64 len) { - QByteArray objCurrentArray; - int intEffectiveLen = 0; - int intExpectedLen = 0; - int intThreadLoop = 0; + QByteArray currentArray; + int effectiveLen = 0; + int expectedLen = (int) len; + int threadLoop = 0; - intExpectedLen = (int) len; - - if (intExpectedLen <= 0) { + if (expectedLen <= 0) { return 0; } - if (m_objeventLoop.isRunning()) { + if (m_eventLoop.isRunning()) { return 0; } - m_objMutex.lock(); + m_mutex.lock(); //DATA in FIFO ? -> Waiting for DATA - if ((m_objFIFO.isEmpty()) || (m_objFIFO.count() < m_minStackSize)) + if ((m_fifo.isEmpty()) || (m_fifo.count() < m_minStackSize)) { - m_objMutex.unlock(); + m_mutex.unlock(); if (m_multiThreaded == true) { - intThreadLoop=0; + threadLoop = 0; - while ((m_objFIFO.isEmpty()) || (m_objFIFO.count() < m_minStackSize)) + while ((m_fifo.isEmpty()) || (m_fifo.count() < m_minStackSize)) { QThread::msleep(5); - intThreadLoop++; + threadLoop++; if (m_threadTimeout >= 0) { - if (intThreadLoop*5 > m_threadTimeout) { + if (threadLoop*5 > m_threadTimeout) { return -1; } } @@ -177,42 +171,53 @@ qint64 DATVideostream::readData(char *data, qint64 len) } else { - m_objeventLoop.exec(); + m_eventLoop.exec(); } - m_objMutex.lock(); + m_mutex.lock(); } //Read DATA - intEffectiveLen = m_objFIFO.head().size(); + effectiveLen = m_fifo.head().size(); - if (intExpectedLen < intEffectiveLen) + if (expectedLen < effectiveLen) { //Partial Read - objCurrentArray = m_objFIFO.head(); - memcpy((void *)data,objCurrentArray.constData(),intExpectedLen); - m_objFIFO.head().remove(0,intExpectedLen); - intEffectiveLen = intExpectedLen; - m_intBytesWaiting -= intExpectedLen; + currentArray = m_fifo.head(); + std::copy( + currentArray.constData(), + currentArray.constData() + expectedLen, + data + ); + m_fifo.head().remove(0, expectedLen); + effectiveLen = expectedLen; + m_bytesWaiting -= expectedLen; } else { //Complete Read - objCurrentArray = m_objFIFO.dequeue(); - memcpy((void *)data,objCurrentArray.constData(),intEffectiveLen); - m_intBytesWaiting -= intEffectiveLen; + currentArray = m_fifo.dequeue(); + std::copy( + currentArray.constData(), + currentArray.constData() + effectiveLen, + data + ); + m_bytesWaiting -= effectiveLen; } - m_intPercentBuffer = (100*m_intBytesWaiting) / m_intMemoryLimit; + m_percentBuffer = (100*m_bytesWaiting) / m_memoryLimit; + m_percentBuffer = m_percentBuffer > 100 ? 100 : m_percentBuffer; - emit onDataPackets(&m_intBytesWaiting, &m_intPercentBuffer, &m_intTotalReceived); + if (m_packetReceived % 10 == 0) { + emit fifoData(&m_bytesWaiting, &m_percentBuffer, &m_totalReceived); + } //Next available DATA - m_intBytesAvailable = m_objFIFO.head().size(); + m_bytesAvailable = m_fifo.head().size(); - m_objMutex.unlock(); + m_mutex.unlock(); - return (qint64)intEffectiveLen; + return (qint64) effectiveLen; } qint64 DATVideostream::writeData(const char *data, qint64 len) diff --git a/plugins/channelrx/demoddatv/datvideostream.h b/plugins/channelrx/demoddatv/datvideostream.h index 3436ec2b4..4d8cbc3ab 100644 --- a/plugins/channelrx/demoddatv/datvideostream.h +++ b/plugins/channelrx/demoddatv/datvideostream.h @@ -32,10 +32,7 @@ class DATVideostream : public QIODevice public: DATVideostream(); - ~DATVideostream(); - - static const int m_defaultMemoryLimit = 2820000; - static const int m_minStackSize = 4; + virtual ~DATVideostream(); int pushData(const char * chrData, int intSize); void resetTotalReceived(); @@ -48,31 +45,31 @@ public: virtual void close(); virtual bool open(OpenMode mode); - QQueue m_objFIFO; + static const int m_defaultMemoryLimit = 2820000; + static const int m_minStackSize = 4; signals: - - void onDataAvailable(); - void onDataPackets(int *intDataBytes, int *intPercentBuffer,qint64 *intTotalReceived); + void dataAvailable(); + void fifoData(int *intDataBytes, int *intPercentBuffer, qint64 *intTotalReceived); protected: - virtual qint64 readData(char *data, qint64 len); virtual qint64 writeData(const char *data, qint64 len); virtual qint64 readLineData(char *data, qint64 maxSize); private: + QQueue m_fifo; bool m_multiThreaded; int m_threadTimeout; - QEventLoop m_objeventLoop; - QMutex m_objMutex; - int m_intMemoryLimit; - int m_intBytesAvailable; - int m_intBytesWaiting; - int m_intPercentBuffer; - qint64 m_intTotalReceived; - qint64 m_intPacketReceived; + QEventLoop m_eventLoop; + QMutex m_mutex; + int m_memoryLimit; + int m_bytesAvailable; + int m_bytesWaiting; + int m_percentBuffer; + qint64 m_totalReceived; + qint64 m_packetReceived; };