diff --git a/src/convert.cpp b/src/convert.cpp index f63d369..ef5ede5 100644 --- a/src/convert.cpp +++ b/src/convert.cpp @@ -72,8 +72,7 @@ bool Convert::convert(AVFrame* srcFrame, AVFrame* dstFrame) if(!m_convertCtx || !srcFrame || !dstFrame) { return false; } - qint32 ret = sws_scale(m_convertCtx, (const uint8_t* const*)srcFrame->data, srcFrame->linesize, 0, m_srcHeight, dstFrame->data, dstFrame->linesize); - qDebug() << "Convert::convert sws_scale return " << ret; + qint32 ret = sws_scale(m_convertCtx, (const uint8_t* const*)srcFrame->data, srcFrame->linesize, 0, m_srcHeight, dstFrame->data, dstFrame->linesize); if (0 == ret) { return false; } diff --git a/src/convert.h b/src/convert.h index 552dd08..36a4044 100644 --- a/src/convert.h +++ b/src/convert.h @@ -4,6 +4,7 @@ extern "C" { +#include "libavcodec/avcodec.h" #include "libswscale/swscale.h" #include "libavutil/frame.h" } diff --git a/src/decoder.cpp b/src/decoder.cpp index 5218781..55053d3 100644 --- a/src/decoder.cpp +++ b/src/decoder.cpp @@ -93,6 +93,7 @@ void Decoder::run() AVFrame* rgbDecoderFrame = Q_NULLPTR; yuvDecoderFrame = av_frame_alloc(); rgbDecoderFrame = av_frame_alloc(); + quint8 *outBuffer = Q_NULLPTR; bool isFormatCtxOpen = false; bool isCodecCtxOpen = false; @@ -164,12 +165,19 @@ void Decoder::run() if (!ret) { // a frame was received if (!m_conver.isInit()) { + qDebug() << "decoder frame format" << yuvDecoderFrame->format; m_conver.setSrcFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_YUV420P); - m_conver.setDstFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB24); + m_conver.setDstFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB32); m_conver.init(); } + if (!outBuffer) { + outBuffer=new quint8[avpicture_get_size(AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height)]; + avpicture_fill((AVPicture *)rgbDecoderFrame, outBuffer, AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height); + } m_conver.convert(yuvDecoderFrame, rgbDecoderFrame); - //push_frame(decoder); + QImage tmpImg((uchar *)outBuffer, codecCtx->width, codecCtx->height, QImage::Format_RGB32); + QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示 + emit getOneImage(image); } else if (ret != AVERROR(EAGAIN)) { qCritical("Could not receive video frame: %d", ret); av_packet_unref(&packet); @@ -215,6 +223,9 @@ runQuit: avcodec_free_context(&codecCtx); } + if (outBuffer) { + delete[] outBuffer; + } if (yuvDecoderFrame) { av_free(yuvDecoderFrame); } diff --git a/src/decoder.h b/src/decoder.h index 05a9ede..8aff8c6 100644 --- a/src/decoder.h +++ b/src/decoder.h @@ -4,6 +4,7 @@ #include #include #include +#include #include "convert.h" extern "C" @@ -28,6 +29,8 @@ public: bool startDecode(); void stopDecode(); +signals: + void getOneImage(QImage img); protected: void run(); diff --git a/src/dialog.cpp b/src/dialog.cpp index 2ce2cf3..12ebfa9 100644 --- a/src/dialog.cpp +++ b/src/dialog.cpp @@ -23,6 +23,14 @@ Dialog::Dialog(QWidget *parent) : decoder.startDecode(); } }); + + // must be Qt::QueuedConnection, ui update must be main thread + connect(&decoder, &Decoder::getOneImage, this, [this](QImage img){ + // 将图像按比例缩放成和窗口一样大小 + QImage img2 = img.scaled(ui->imgLabel->size(), Qt::IgnoreAspectRatio); + ui->imgLabel->setPixmap(QPixmap::fromImage(img2)); + qDebug() << "getOneImage"; + }, Qt::QueuedConnection); } Dialog::~Dialog() diff --git a/src/dialog.ui b/src/dialog.ui index 8fda0b6..2b74cd0 100644 --- a/src/dialog.ui +++ b/src/dialog.ui @@ -6,8 +6,8 @@ 0 0 - 400 - 300 + 716 + 757 @@ -52,6 +52,22 @@ stopServer + + + + 230 + 20 + 451 + 701 + + + + background-color: rgb(0, 0, 0); + + + + +