refactor: move VideoBuffer to Decoder

This commit is contained in:
Barry 2022-04-09 10:12:34 +08:00
parent 32cebd45aa
commit b83c8cbe6f
8 changed files with 129 additions and 89 deletions

View file

@ -4,9 +4,20 @@
#include "decoder.h"
#include "videobuffer.h"
Decoder::Decoder(VideoBuffer *vb, QObject *parent) : QObject(parent), m_vb(vb) {}
Decoder::Decoder(std::function<void(int, int, uint8_t*, uint8_t*, uint8_t*, int, int, int)> onFrame, QObject *parent)
: QObject(parent)
, m_vb(new VideoBuffer())
, m_onFrame(onFrame)
{
m_vb->init();
connect(this, &Decoder::newFrame, this, &Decoder::onNewFrame, Qt::QueuedConnection);
connect(m_vb, &VideoBuffer::updateFPS, this, &Decoder::updateFPS);
}
Decoder::~Decoder() {}
Decoder::~Decoder() {
m_vb->deInit();
delete m_vb;
}
bool Decoder::open()
{
@ -110,6 +121,14 @@ bool Decoder::push(const AVPacket *packet)
return true;
}
void Decoder::peekFrame(std::function<void (int, int, uint8_t *)> onFrame)
{
if (!m_vb) {
return;
}
m_vb->peekRenderedFrame(onFrame);
}
void Decoder::pushFrame()
{
if (!m_vb) {
@ -121,5 +140,16 @@ void Decoder::pushFrame()
// the previous newFrame will consume this frame
return;
}
emit onNewFrame();
emit newFrame();
}
void Decoder::onNewFrame() {
if (!m_onFrame) {
return;
}
m_vb->lock();
const AVFrame *frame = m_vb->consumeRenderedFrame();
m_onFrame(frame->width, frame->height, frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], frame->linesize[1], frame->linesize[2]);
m_vb->unLock();
}

View file

@ -12,23 +12,31 @@ class Decoder : public QObject
{
Q_OBJECT
public:
Decoder(VideoBuffer *vb, QObject *parent = Q_NULLPTR);
Decoder(std::function<void(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV)> onFrame, QObject *parent = Q_NULLPTR);
virtual ~Decoder();
bool open();
void close();
bool push(const AVPacket *packet);
void peekFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame);
signals:
void updateFPS(quint32 fps);
private slots:
void onNewFrame();
protected:
signals:
void newFrame();
private:
void pushFrame();
private:
VideoBuffer *m_vb = Q_NULLPTR;
AVCodecContext *m_codecCtx = Q_NULLPTR;
bool m_isCodecCtxOpen = false;
std::function<void(int, int, uint8_t*, uint8_t*, uint8_t*, int, int, int)> m_onFrame = Q_NULLPTR;
};
#endif // DECODER_H

View file

@ -1,17 +1,20 @@
#include "videobuffer.h"
#include "avframeconvert.h"
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
}
VideoBuffer::VideoBuffer() {}
VideoBuffer::VideoBuffer(QObject *parent) : QObject(parent) {
connect(&m_fpsCounter, &FpsCounter::updateFPS, this, &VideoBuffer::updateFPS);
}
VideoBuffer::~VideoBuffer() {}
bool VideoBuffer::init(bool renderExpiredFrames)
bool VideoBuffer::init()
{
m_renderExpiredFrames = renderExpiredFrames;
m_decodingFrame = av_frame_alloc();
if (!m_decodingFrame) {
goto error;
@ -57,6 +60,11 @@ void VideoBuffer::unLock()
m_mutex.unlock();
}
void VideoBuffer::setRenderExpiredFrames(bool renderExpiredFrames)
{
m_renderExpiredFrames = renderExpiredFrames;
}
AVFrame *VideoBuffer::decodingFrame()
{
return m_decodingFrame;
@ -99,9 +107,51 @@ const AVFrame *VideoBuffer::consumeRenderedFrame()
return m_renderingframe;
}
const AVFrame *VideoBuffer::peekRenderedFrame()
void VideoBuffer::peekRenderedFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame)
{
return m_renderingframe;
if (!onFrame) {
return;
}
lock();
auto frame = m_renderingframe;
int width = frame->width;
int height = frame->height;
// create buffer
uint8_t* rgbBuffer = new uint8_t[width * height * 4];
AVFrame *rgbFrame = av_frame_alloc();
if (!rgbFrame) {
delete [] rgbBuffer;
return;
}
// bind buffer to AVFrame
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, rgbBuffer, AV_PIX_FMT_RGB32, width, height, 4);
// convert
AVFrameConvert convert;
convert.setSrcFrameInfo(width, height, AV_PIX_FMT_YUV420P);
convert.setDstFrameInfo(width, height, AV_PIX_FMT_RGB32);
bool ret = false;
ret = convert.init();
if (!ret) {
delete [] rgbBuffer;
av_free(rgbFrame);
return;
}
ret = convert.convert(frame, rgbFrame);
if (!ret) {
delete [] rgbBuffer;
av_free(rgbFrame);
return;
}
convert.deInit();
av_free(rgbFrame);
unLock();
onFrame(width, height, rgbBuffer);
delete [] rgbBuffer;
}
void VideoBuffer::interrupt()
@ -115,11 +165,6 @@ void VideoBuffer::interrupt()
}
}
FpsCounter *VideoBuffer::getFPSCounter()
{
return &m_fpsCounter;
}
void VideoBuffer::swap()
{
AVFrame *tmp = m_decodingFrame;

View file

@ -3,22 +3,25 @@
#include <QMutex>
#include <QWaitCondition>
#include <QObject>
#include "fpscounter.h"
// forward declarations
typedef struct AVFrame AVFrame;
class VideoBuffer
class VideoBuffer : public QObject
{
Q_OBJECT
public:
VideoBuffer();
VideoBuffer(QObject *parent = Q_NULLPTR);
virtual ~VideoBuffer();
bool init(bool renderExpiredFrames = false);
bool init();
void deInit();
void lock();
void unLock();
void setRenderExpiredFrames(bool renderExpiredFrames);
AVFrame *decodingFrame();
// set the decoder frame as ready for rendering
@ -32,12 +35,13 @@ public:
// unlocking m_mutex
const AVFrame *consumeRenderedFrame();
const AVFrame *peekRenderedFrame();
void peekRenderedFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame);
// wake up and avoid any blocking call
void interrupt();
FpsCounter *getFPSCounter();
signals:
void updateFPS(quint32 fps);
private:
void swap();

View file

@ -2,7 +2,6 @@
#include <QMessageBox>
#include <QTimer>
#include "avframeconvert.h"
#include "config.h"
#include "controller.h"
#include "devicemsg.h"
@ -13,12 +12,7 @@
#include "recorder.h"
#include "server.h"
#include "stream.h"
#include "videobuffer.h"
#include "videoform.h"
extern "C"
{
#include "libavutil/imgutils.h"
}
Device::Device(DeviceParams params, QObject *parent) : QObject(parent), m_params(params)
{
@ -29,9 +23,12 @@ Device::Device(DeviceParams params, QObject *parent) : QObject(parent), m_params
}
if (params.display) {
m_vb = new VideoBuffer();
m_vb->init(params.renderExpiredFrames);
m_decoder = new Decoder(m_vb, this);
m_decoder = new Decoder([this](int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV) {
if (m_videoForm) {
m_videoForm->updateRender(width, height, dataY, dataU, dataV, linesizeY, linesizeU, linesizeV);
}
}, this);
m_fileHandler = new FileHandler(this);
m_controller = new Controller([this](const QByteArray& buffer) -> qint64 {
if (!m_server || !m_server->getControlSocket()) {
@ -84,10 +81,6 @@ Device::~Device()
m_recorder->close();
delete m_recorder;
}
if (m_vb) {
m_vb->deInit();
delete m_vb;
}
if (m_videoForm) {
m_videoForm->close();
delete m_videoForm;
@ -128,14 +121,14 @@ void Device::updateScript(QString script)
void Device::onScreenshot()
{
if (!m_vb) {
if (!m_decoder) {
return;
}
m_vb->lock();
// screenshot
saveFrame(m_vb->peekRenderedFrame());
m_vb->unLock();
m_decoder->peekFrame([this](int width, int height, uint8_t* dataRGB32) {
saveFrame(width, height, dataRGB32);
});
}
void Device::onShowTouch(bool show)
@ -332,22 +325,8 @@ void Device::initSignals()
}, Qt::DirectConnection);
}
if (m_decoder && m_vb) {
// must be Qt::QueuedConnection, ui update must be main thread
connect(
m_decoder,
&Decoder::onNewFrame,
this,
[this]() {
m_vb->lock();
const AVFrame *frame = m_vb->consumeRenderedFrame();
if (m_videoForm) {
m_videoForm->updateRender(frame);
}
m_vb->unLock();
},
Qt::QueuedConnection);
connect(m_vb->getFPSCounter(), &::FpsCounter::updateFPS, m_videoForm, &VideoForm::updateFPS);
if (m_decoder) {
connect(m_decoder, &Decoder::updateFPS, m_videoForm, &VideoForm::updateFPS);
}
}
@ -412,37 +391,13 @@ bool Device::isCurrentCustomKeymap()
return m_controller->isCurrentCustomKeymap();
}
bool Device::saveFrame(const AVFrame *frame)
bool Device::saveFrame(int width, int height, uint8_t* dataRGB32)
{
if (!frame) {
if (!dataRGB32) {
return false;
}
// create buffer
QImage rgbImage(frame->width, frame->height, QImage::Format_RGB32);
AVFrame *rgbFrame = av_frame_alloc();
if (!rgbFrame) {
return false;
}
// bind buffer to AVFrame
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, rgbImage.bits(), AV_PIX_FMT_RGB32, frame->width, frame->height, 4);
// convert
AVFrameConvert convert;
convert.setSrcFrameInfo(frame->width, frame->height, AV_PIX_FMT_YUV420P);
convert.setDstFrameInfo(frame->width, frame->height, AV_PIX_FMT_RGB32);
bool ret = false;
ret = convert.init();
if (!ret) {
return false;
}
ret = convert.convert(frame, rgbFrame);
if (!ret) {
return false;
}
convert.deInit();
av_free(rgbFrame);
QImage rgbImage(dataRGB32, width, height, QImage::Format_RGB32);
// save
QString absFilePath;
@ -456,7 +411,7 @@ bool Device::saveFrame(const AVFrame *frame)
fileName = Config::getInstance().getTitle() + fileName + ".png";
QDir dir(fileDir);
absFilePath = dir.absoluteFilePath(fileName);
ret = rgbImage.save(absFilePath, "PNG", 100);
int ret = rgbImage.save(absFilePath, "PNG", 100);
if (!ret) {
return false;
}

View file

@ -108,7 +108,7 @@ public slots:
private:
void initSignals();
void startServer();
bool saveFrame(const AVFrame *frame);
bool saveFrame(int width, int height, uint8_t* dataRGB32);
private:
// server relevant
@ -117,7 +117,6 @@ private:
QPointer<Controller> m_controller;
QPointer<FileHandler> m_fileHandler;
QPointer<Stream> m_stream;
VideoBuffer *m_vb = Q_NULLPTR;
Recorder *m_recorder = Q_NULLPTR;
// ui

View file

@ -148,7 +148,7 @@ void VideoForm::showFPS(bool show)
m_fpsLabel->setVisible(show);
}
void VideoForm::updateRender(const AVFrame *frame)
void VideoForm::updateRender(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV)
{
if (m_videoWidget->isHidden()) {
if (m_loadingWidget) {
@ -157,9 +157,9 @@ void VideoForm::updateRender(const AVFrame *frame)
m_videoWidget->show();
}
updateShowSize(QSize(frame->width, frame->height));
m_videoWidget->setFrameSize(QSize(frame->width, frame->height));
m_videoWidget->updateTextures(frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], frame->linesize[1], frame->linesize[2]);
updateShowSize(QSize(width, height));
m_videoWidget->setFrameSize(QSize(width, height));
m_videoWidget->updateTextures(dataY, dataU, dataV, linesizeY, linesizeU, linesizeV);
}
void VideoForm::showToolForm(bool show)

View file

@ -9,7 +9,6 @@ namespace Ui
class videoForm;
}
struct AVFrame;
class ToolForm;
class Device;
class FileHandler;
@ -24,7 +23,7 @@ public:
void staysOnTop(bool top = true);
void updateShowSize(const QSize &newSize);
void updateRender(const AVFrame *frame);
void updateRender(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV);
void setDevice(Device *device);
QRect getGrabCursorRect();
const QSize &frameSize();