视频显示初步完成

This commit is contained in:
rankun 2018-10-25 01:59:46 +08:00
commit 9b92e79cfe
10 changed files with 250 additions and 53 deletions

View file

@ -31,7 +31,8 @@ SOURCES += \
decoder.cpp \
server.cpp \
convert.cpp \
glyuvwidget.cpp
glyuvwidget.cpp \
frames.cpp
HEADERS += \
dialog.h \
@ -39,7 +40,8 @@ HEADERS += \
decoder.h \
server.h \
convert.h \
glyuvwidget.h
glyuvwidget.h \
frames.h
FORMS += \
dialog.ui

View file

@ -25,7 +25,6 @@ public:
bool isInit();
void deInit();
bool convert(AVFrame* srcFrame, AVFrame* dstFrame);
//int srcW, int srcH, enum AVPixelFormat srcFormat,int dstW, int dstH, enum AVPixelFormat dstFormat,
private:
quint32 m_srcWidth = 0;

View file

@ -1,6 +1,7 @@
#include <QDebug>
#include "decoder.h"
#include "frames.h"
#define BUFSIZE 0x10000
@ -30,6 +31,11 @@ void Decoder::deInit()
avformat_network_deinit(); // ignore failure
}
void Decoder::setFrames(Frames *frames)
{
m_frames = frames;
}
static qint32 readPacket(void *opaque, quint8 *buf, qint32 bufSize) {
Decoder *decoder = (Decoder*)opaque;
if (decoder) {
@ -77,6 +83,9 @@ bool Decoder::startDecode()
void Decoder::stopDecode()
{
m_quit = true;
if (m_frames) {
m_frames->stop();
}
wait();
}
@ -87,13 +96,6 @@ void Decoder::run()
AVFormatContext *formatCtx = Q_NULLPTR;
AVCodec *codec = Q_NULLPTR;
AVCodecContext *codecCtx = Q_NULLPTR;
// frame is stand alone
AVFrame* yuvDecoderFrame = Q_NULLPTR;
AVFrame* rgbDecoderFrame = Q_NULLPTR;
yuvDecoderFrame = av_frame_alloc();
rgbDecoderFrame = av_frame_alloc();
bool isFormatCtxOpen = false;
bool isCodecCtxOpen = false;
@ -152,6 +154,7 @@ void Decoder::run()
packet.size = 0;
while (!m_quit && !av_read_frame(formatCtx, &packet)) {
AVFrame* decodingFrame = m_frames->decodingFrame();
// the new decoding/encoding API has been introduced by:
// <http://git.videolan.org/?p=ffmpeg.git;a=commitdiff;h=7fc329e2dd6226dfecaa4a1d7adf353bf2773726>
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 37, 0)
@ -160,18 +163,21 @@ void Decoder::run()
qCritical("Could not send video packet: %d", ret);
goto runQuit;
}
ret = avcodec_receive_frame(codecCtx, yuvDecoderFrame);
if (decodingFrame) {
ret = avcodec_receive_frame(codecCtx, decodingFrame);
}
if (!ret) {
// a frame was received
emit getOneFrame(yuvDecoderFrame->data[0], yuvDecoderFrame->data[1], yuvDecoderFrame->data[2],
yuvDecoderFrame->linesize[0], yuvDecoderFrame->linesize[1], yuvDecoderFrame->linesize[2]);
qDebug() << "emit getOneFrame";
pushFrame();
//emit getOneFrame(yuvDecoderFrame->data[0], yuvDecoderFrame->data[1], yuvDecoderFrame->data[2],
// yuvDecoderFrame->linesize[0], yuvDecoderFrame->linesize[1], yuvDecoderFrame->linesize[2]);
/*
// m_conver转换yuv为rgb是使用cpu转的占用cpu太高改用opengl渲染yuv
// QImage的copy也非常占用内存此方案不考虑
if (!m_conver.isInit()) {
qDebug() << "decoder frame format" << yuvDecoderFrame->format;
qDebug() << "decoder frame format" << decodingFrame->format;
m_conver.setSrcFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_YUV420P);
m_conver.setDstFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB32);
m_conver.init();
@ -180,7 +186,7 @@ void Decoder::run()
outBuffer=new quint8[avpicture_get_size(AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height)];
avpicture_fill((AVPicture *)rgbDecoderFrame, outBuffer, AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height);
}
m_conver.convert(yuvDecoderFrame, rgbDecoderFrame);
m_conver.convert(decodingFrame, rgbDecoderFrame);
//QImage tmpImg((uchar *)outBuffer, codecCtx->width, codecCtx->height, QImage::Format_RGB32);
//QImage image = tmpImg.copy();
//emit getOneImage(image);
@ -193,13 +199,16 @@ void Decoder::run()
#else
while (packet.size > 0) {
int gotPicture = 0;
int len = avcodec_decode_video2(codecCtx, yuvDecoderFrame, &gotpicture, &packet);
int len = -1;
if (decodingFrame) {
len = avcodec_decode_video2(codecCtx, decodingFrame, &gotpicture, &packet);
}
if (len < 0) {
qCritical("Could not decode video packet: %d", len);
goto runQuit;
}
if (gotPicture) {
//push_frame(decoder);
pushFrame();
}
packet.size -= len;
packet.data += len;
@ -230,17 +239,18 @@ runQuit:
avcodec_free_context(&codecCtx);
}
if (yuvDecoderFrame) {
av_free(yuvDecoderFrame);
}
if (rgbDecoderFrame) {
av_free(rgbDecoderFrame);
}
m_conver.deInit();
if (m_deviceSocket) {
m_deviceSocket->disconnectFromHost();
delete m_deviceSocket;
}
//notify_stopped();
}
void Decoder::pushFrame()
{
bool previousFrameConsumed = m_frames->offerDecodedFrame();
if (!previousFrameConsumed) {
// the previous newFrame will consume this frame
return;
}
emit newFrame();
}

View file

@ -4,15 +4,18 @@
#include <QThread>
#include <QTcpSocket>
#include <QPointer>
#include <QImage>
#include <QMutex>
//#include "convert.h"
#include "convert.h"
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
}
class Frames;
class Decoder : public QThread
{
Q_OBJECT
@ -24,21 +27,24 @@ public:
static bool init();
static void deInit();
void setFrames(Frames* frames);
void setDeviceSocket(QTcpSocket* deviceSocket);
qint32 recvData(quint8* buf, qint32 bufSize);
bool startDecode();
void stopDecode();
signals:
void getOneFrame(quint8* bufferY, quint8* bufferU, quint8* bufferV, quint32 linesizeY, quint32 linesizeU, quint32 linesizeV);
void newFrame();
protected:
void run();
void pushFrame();
private:
QPointer<QTcpSocket> m_deviceSocket = Q_NULLPTR;
QPointer<QTcpSocket> m_deviceSocket = Q_NULLPTR;
QMutex m_mutex;
bool m_quit = false;
Convert m_conver;
Frames* m_frames;
};
#endif // DECODER_H

View file

@ -10,11 +10,15 @@ Dialog::Dialog(QWidget *parent) :
{
ui->setupUi(this);
GLYuvWidget* w = new GLYuvWidget(this);
w = new GLYuvWidget(this);
w->resize(ui->imgLabel->size());
w->move(230, 20);
Decoder::init();
frames.init();
decoder.setFrames(&frames);
server = new Server();
connect(server, &Server::serverStartResult, this, [this](bool success){
if (success) {
@ -30,28 +34,25 @@ Dialog::Dialog(QWidget *parent) :
});
// must be Qt::QueuedConnection, ui update must be main thread
QObject::connect(&decoder, &Decoder::getOneFrame,w,&GLYuvWidget::slotShowYuv,
Qt::QueuedConnection);
/*
// must be Qt::QueuedConnection, ui update must be main thread
connect(&decoder, &Decoder::getOneImage, this, [this](QImage img){
qDebug() << "getOneImage";
return;
//18% cpu
// 将图像按比例缩放成和窗口一样大小
QImage img2 = img.scaled(ui->imgLabel->size(), Qt::IgnoreAspectRatio);
ui->imgLabel->setPixmap(QPixmap::fromImage(img2));
//24% cpu
}, Qt::QueuedConnection);
*/
QObject::connect(&decoder, &Decoder::newFrame, this, [this](){
frames.lock();
const AVFrame *frame = frames.consumeRenderedFrame();
w->setVideoSize(frame->width, frame->height);
/*
if (!prepare_for_frame(screen, new_frame_size)) {
mutex_unlock(frames->mutex);
return SDL_FALSE;
}
*/
w->updateTexture(frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], frame->linesize[1], frame->linesize[2]);
frames.unLock();
},Qt::QueuedConnection);
}
Dialog::~Dialog()
{
Decoder::deInit();
frames.deInit();
delete ui;
}

View file

@ -5,11 +5,13 @@
#include "server.h"
#include "decoder.h"
#include "frames.h"
namespace Ui {
class Dialog;
}
class GLYuvWidget;
class Dialog : public QDialog
{
Q_OBJECT
@ -29,6 +31,8 @@ private:
Ui::Dialog *ui;
Server* server;
Decoder decoder;
Frames frames;
GLYuvWidget* w;
};
#endif // DIALOG_H

117
src/frames.cpp Normal file
View file

@ -0,0 +1,117 @@
#include "frames.h"
extern "C"
{
#include "libavutil/avutil.h"
#include "libavformat/avformat.h"
}
Frames::Frames()
{
}
Frames::~Frames()
{
}
bool Frames::init()
{
m_decodingFrame = av_frame_alloc();
if (!m_decodingFrame) {
goto error;
}
m_renderingframe = av_frame_alloc();
if (!m_renderingframe) {
goto error;
}
// there is initially no rendering frame, so consider it has already been
// consumed
m_renderingFrameConsumed = true;
return true;
error:
deInit();
return false;
}
void Frames::deInit()
{
if (m_decodingFrame) {
av_frame_free(&m_decodingFrame);
m_decodingFrame = Q_NULLPTR;
}
if (m_renderingframe) {
av_frame_free(&m_renderingframe);
m_renderingframe = Q_NULLPTR;
}
}
void Frames::lock()
{
m_mutex.lock();
}
void Frames::unLock()
{
m_mutex.unlock();
}
AVFrame *Frames::decodingFrame()
{
return m_decodingFrame;
}
bool Frames::offerDecodedFrame()
{
m_mutex.lock();
#ifndef SKIP_FRAMES
// if SKIP_FRAMES is disabled, then the decoder must wait for the current
// frame to be consumed
while (!m_renderingFrameConsumed && !m_stopped) {
m_renderingFrameConsumedCond.wait(&m_mutex);
}
#else
#endif
swap();
bool previousFrameConsumed = m_renderingFrameConsumed;
m_renderingFrameConsumed = false;
m_mutex.unlock();
return previousFrameConsumed;
}
const AVFrame *Frames::consumeRenderedFrame()
{
Q_ASSERT(!m_renderingFrameConsumed);
m_renderingFrameConsumed = true;
#ifndef SKIP_FRAMES
// if SKIP_FRAMES is disabled, then notify the decoder the current frame is
// consumed, so that it may push a new one
m_renderingFrameConsumedCond.wakeOne();
#endif
return m_renderingframe;
}
void Frames::stop()
{
#ifndef SKIP_FRAMES
m_mutex.lock();
m_stopped = true;
m_mutex.unlock();
// wake up blocking wait
m_renderingFrameConsumedCond.wakeOne();
#endif
}
void Frames::swap()
{
AVFrame *tmp = m_decodingFrame;
m_decodingFrame = m_renderingframe;
m_renderingframe = tmp;
}

50
src/frames.h Normal file
View file

@ -0,0 +1,50 @@
#ifndef FRAMES_H
#define FRAMES_H
#include <QMutex>
#include <QWaitCondition>
// forward declarations
typedef struct AVFrame AVFrame;
class Frames
{
public:
Frames();
virtual ~Frames();
bool init();
void deInit();
void lock();
void unLock();
AVFrame* decodingFrame();
// set the decoder frame as ready for rendering
// this function locks m_mutex during its execution
// returns true if the previous frame had been consumed
bool offerDecodedFrame();
// mark the rendering frame as consumed and return it
// MUST be called with m_mutex locked!!!
// the caller is expected to render the returned frame to some texture before
// unlocking m_mutex
const AVFrame* consumeRenderedFrame();
// wake up and avoid any blocking call
void stop();
private:
void swap();
private:
AVFrame* m_decodingFrame = Q_NULLPTR;
AVFrame* m_renderingframe = Q_NULLPTR;
QMutex m_mutex;
bool m_renderingFrameConsumed = true;
#ifndef SKIP_FRAMES
QWaitCondition m_renderingFrameConsumedCond;
bool m_stopped = true;
#endif
};
#endif // FRAMES_H

View file

@ -22,7 +22,13 @@ GLYuvWidget::~GLYuvWidget()
doneCurrent();
}
void GLYuvWidget::slotShowYuv(quint8* bufferY, quint8* bufferU, quint8* bufferV, quint32 linesizeY, quint32 linesizeU, quint32 linesizeV)
void GLYuvWidget::setVideoSize(quint32 videoWidth, quint32 videoHeight)
{
m_videoWidth = videoWidth;
m_videoHeight = videoHeight;
}
void GLYuvWidget::updateTexture(quint8* bufferY, quint8* bufferU, quint8* bufferV, quint32 linesizeY, quint32 linesizeU, quint32 linesizeV)
{
qDebug() << "slotShowYuv";
m_bufferY = bufferY;
@ -34,6 +40,7 @@ void GLYuvWidget::slotShowYuv(quint8* bufferY, quint8* bufferU, quint8* bufferV,
//update(); // 不实时
//repaint(); // 同上
paintEvent(nullptr);// 最实时的方案
update();
}
void GLYuvWidget::initializeGL()

View file

@ -16,9 +16,10 @@ public:
GLYuvWidget(QWidget *parent = 0);
~GLYuvWidget();
public slots:
public:
void setVideoSize(quint32 videoWidth, quint32 videoHeight);
//显示一帧Yuv图像
void slotShowYuv(quint8* bufferY, quint8* bufferU, quint8* bufferV, quint32 linesizeY, quint32 linesizeU, quint32 linesizeV);
void updateTexture(quint8* bufferY, quint8* bufferU, quint8* bufferV, quint32 linesizeY, quint32 linesizeU, quint32 linesizeV);
protected:
void initializeGL() Q_DECL_OVERRIDE;