diff --git a/src/QtScrcpy.pro b/src/QtScrcpy.pro index 1e052cc..e812aa4 100644 --- a/src/QtScrcpy.pro +++ b/src/QtScrcpy.pro @@ -30,14 +30,16 @@ SOURCES += \ adbprocess.cpp \ decoder.cpp \ server.cpp \ - convert.cpp + convert.cpp \ + glyuvwidget.cpp HEADERS += \ dialog.h \ adbprocess.h \ decoder.h \ server.h \ - convert.h + convert.h \ + glyuvwidget.h FORMS += \ dialog.ui diff --git a/src/decoder.cpp b/src/decoder.cpp index 55053d3..09f8566 100644 --- a/src/decoder.cpp +++ b/src/decoder.cpp @@ -80,6 +80,31 @@ void Decoder::stopDecode() wait(); } +void saveAVFrame_YUV_ToTempFile(AVFrame *pFrame, QByteArray& buffer) +{ + int t_frameWidth = pFrame->width; + int t_frameHeight = pFrame->height; + int t_yPerRowBytes = pFrame->linesize[0]; + int t_uPerRowBytes = pFrame->linesize[1]; + int t_vPerRowBytes = pFrame->linesize[2]; + + for(int i = 0;i< t_frameHeight ;i++) + { + buffer.append((char*)(pFrame->data[0]+i*t_yPerRowBytes), t_frameWidth); + } + + for(int i = 0;i< t_frameHeight/2 ;i++) + { + buffer.append((char*)(pFrame->data[1]+i*t_uPerRowBytes), t_frameWidth/2); + } + + for(int i = 0;i< t_frameHeight/2 ;i++) + { + buffer.append((char*)(pFrame->data[2]+i*t_vPerRowBytes), t_frameWidth/2); + } + +} + void Decoder::run() { unsigned char *decoderBuffer = Q_NULLPTR; @@ -162,8 +187,12 @@ void Decoder::run() goto runQuit; } ret = avcodec_receive_frame(codecCtx, yuvDecoderFrame); - if (!ret) { + if (!ret) { // a frame was received + QByteArray buffer; + saveAVFrame_YUV_ToTempFile(yuvDecoderFrame, buffer); + emit getOneFrame(buffer, codecCtx->width, codecCtx->height); + /* if (!m_conver.isInit()) { qDebug() << "decoder frame format" << yuvDecoderFrame->format; m_conver.setSrcFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_YUV420P); @@ -174,10 +203,16 @@ void Decoder::run() outBuffer=new quint8[avpicture_get_size(AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height)]; avpicture_fill((AVPicture *)rgbDecoderFrame, outBuffer, AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height); } - m_conver.convert(yuvDecoderFrame, rgbDecoderFrame); - QImage tmpImg((uchar *)outBuffer, codecCtx->width, codecCtx->height, QImage::Format_RGB32); - QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示 - emit getOneImage(image); + // tc games 3% ??? + // scrcpy 7% cpu + // 5% cpu + //m_conver.convert(yuvDecoderFrame, rgbDecoderFrame); + // 8% cpu + //QImage tmpImg((uchar *)outBuffer, codecCtx->width, codecCtx->height, QImage::Format_RGB32); + //QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示 + // 16% cpu + //emit getOneImage(image); + */ } else if (ret != AVERROR(EAGAIN)) { qCritical("Could not receive video frame: %d", ret); av_packet_unref(&packet); diff --git a/src/decoder.h b/src/decoder.h index 8aff8c6..aa98336 100644 --- a/src/decoder.h +++ b/src/decoder.h @@ -30,7 +30,7 @@ public: void stopDecode(); signals: - void getOneImage(QImage img); + void getOneFrame(QByteArray buffer, quint32 width, quint32 height); protected: void run(); diff --git a/src/dialog.cpp b/src/dialog.cpp index 12ebfa9..ed7ef5e 100644 --- a/src/dialog.cpp +++ b/src/dialog.cpp @@ -1,6 +1,7 @@ #include "dialog.h" #include "ui_dialog.h" #include "adbprocess.h" +#include "glyuvwidget.h" Dialog::Dialog(QWidget *parent) : @@ -8,6 +9,10 @@ Dialog::Dialog(QWidget *parent) : ui(new Ui::Dialog) { ui->setupUi(this); + + GLYuvWidget* w = new GLYuvWidget(this); + w->resize(ui->imgLabel->size()); + w->move(230, 20); Decoder::init(); server = new Server(); @@ -24,13 +29,24 @@ Dialog::Dialog(QWidget *parent) : } }); + // must be Qt::QueuedConnection, ui update must be main thread + QObject::connect(&decoder, &Decoder::getOneFrame,w,&GLYuvWidget::slotShowYuv, + Qt::QueuedConnection); + + /* // must be Qt::QueuedConnection, ui update must be main thread connect(&decoder, &Decoder::getOneImage, this, [this](QImage img){ + qDebug() << "getOneImage"; + + return; + //18% cpu // 将图像按比例缩放成和窗口一样大小 QImage img2 = img.scaled(ui->imgLabel->size(), Qt::IgnoreAspectRatio); ui->imgLabel->setPixmap(QPixmap::fromImage(img2)); - qDebug() << "getOneImage"; + //24% cpu + }, Qt::QueuedConnection); + */ } Dialog::~Dialog() diff --git a/src/glyuvwidget.cpp b/src/glyuvwidget.cpp new file mode 100644 index 0000000..ef23a5c --- /dev/null +++ b/src/glyuvwidget.cpp @@ -0,0 +1,187 @@ +#include "glyuvwidget.h" +#include +#include +#include +#include +#define VERTEXIN 0 +#define TEXTUREIN 1 + +GLYuvWidget::GLYuvWidget(QWidget *parent): + QOpenGLWidget(parent) +{ +} + +GLYuvWidget::~GLYuvWidget() +{ + makeCurrent(); + m_vbo.destroy(); + m_textureY->destroy(); + m_textureU->destroy(); + m_textureV->destroy(); + doneCurrent(); +} + +void GLYuvWidget::slotShowYuv(QByteArray buffer, uint width, uint height) +{ + m_videoWidth = width; + m_videoHeight = height; + + QFile *file = new QFile; + file->setFileName("android.yuv"); + bool ok = file->open(QIODevice::WriteOnly | QIODevice::Append); + if(ok) { + QDataStream out(file); + //out.setVersion(QDataStream::Qt_5_7); + out << buffer; + file->close(); + delete(file); + } + + m_buffer.setRawData(buffer, buffer.size()); + m_yuvPtr = m_buffer.data(); + + update(); +} + +void GLYuvWidget::initializeGL() +{ + initializeOpenGLFunctions(); + glDisable(GL_DEPTH_TEST); // 关闭深度缓冲测试,对于2D图形来说没用 + + static const GLfloat vertices[]{ + // 顶点坐标(-1.0,1.0) + -1.0f,-1.0f, + -1.0f,+1.0f, + +1.0f,+1.0f, + +1.0f,-1.0f, + // 纹理坐标(0.0,1.0) + 0.0f,1.0f, + 0.0f,0.0f, + 1.0f,0.0f, + 1.0f,1.0f, + }; + + // 初始化顶点缓冲 + m_vbo.create(); + m_vbo.bind(); + m_vbo.allocate(vertices,sizeof(vertices)); + + // 顶点着色器 + QOpenGLShader *vShader = new QOpenGLShader(QOpenGLShader::Vertex, this); + // 顶点着色器源码 + const char *vertexSrc = + "attribute vec4 vertexIn; \ + attribute vec2 textureIn; \ + varying vec2 textureOut; \ + void main(void) \ + { \ + gl_Position = vertexIn; \ + textureOut = textureIn; \ + }"; + vShader->compileSourceCode(vertexSrc); + + // 片段着色器 + QOpenGLShader *fShader = new QOpenGLShader(QOpenGLShader::Fragment, this); + // 片段着色器源码 + const char *fragmentSrc = + "varying vec2 textureOut; \ + uniform sampler2D tex_y; \ + uniform sampler2D tex_u; \ + uniform sampler2D tex_v; \ + void main(void) \ + { \ + vec3 yuv; \ + vec3 rgb; \ + yuv.x = texture2D(tex_y, textureOut).r; \ + yuv.y = texture2D(tex_u, textureOut).r - 0.5; \ + yuv.z = texture2D(tex_v, textureOut).r - 0.5; \ + rgb = mat3( 1, 1, 1, \ + 0, -0.39465, 2.03211, \ + 1.13983, -0.58060, 0) * yuv; \ + gl_FragColor = vec4(rgb, 1); \ + }"; + fShader->compileSourceCode(fragmentSrc); + + // 着色器程序 + m_shaderProgram = new QOpenGLShaderProgram(this); + // 添加着色器 + m_shaderProgram->addShader(vShader); + m_shaderProgram->addShader(fShader); + m_shaderProgram->bindAttributeLocation("vertexIn",VERTEXIN); + m_shaderProgram->bindAttributeLocation("textureIn",TEXTUREIN); + // 链接着色器 + m_shaderProgram->link(); + m_shaderProgram->bind(); + m_shaderProgram->enableAttributeArray(VERTEXIN); + m_shaderProgram->enableAttributeArray(TEXTUREIN); + m_shaderProgram->setAttributeBuffer(VERTEXIN, GL_FLOAT, 0, 2, 2*sizeof(GLfloat)); + m_shaderProgram->setAttributeBuffer(TEXTUREIN, GL_FLOAT, 8*sizeof(GLfloat), 2, 2*sizeof(GLfloat)); + + // 着色器程序中取得yuv纹理位置 + m_textureUniformY = m_shaderProgram->uniformLocation("tex_y"); + m_textureUniformU = m_shaderProgram->uniformLocation("tex_u"); + m_textureUniformV = m_shaderProgram->uniformLocation("tex_v"); + + m_textureY = new QOpenGLTexture(QOpenGLTexture::Target2D); + m_textureU = new QOpenGLTexture(QOpenGLTexture::Target2D); + m_textureV = new QOpenGLTexture(QOpenGLTexture::Target2D); + m_textureY->create(); + m_textureU->create(); + m_textureV->create(); + m_idY = m_textureY->textureId(); + m_idU = m_textureU->textureId(); + m_idV = m_textureV->textureId(); + glClearColor(0.0,0.0,0.0,0.0); +} + +void GLYuvWidget::inittexture() +{ + //QMatrix4x4 m; + //m.perspective(60.0f, 4.0f/3.0f, 0.1f, 100.0f );//透视矩阵随距离的变化,图形跟着变化。屏幕平面中心就是视点(摄像头),需要将图形移向屏幕里面一定距离。 + + //近裁剪平面是一个矩形,矩形左下角点三维空间坐标是(left,bottom,-near),右上角点是(right,top,-near)所以此处为负,表示z轴最大为10; + //远裁剪平面也是一个矩形,左下角点空间坐标是(left,bottom,-far),右上角点是(right,top,-far)所以此处为正,表示z轴最小为-10; + //此时坐标中心还是在屏幕水平面中间,只是前后左右的距离已限制。 + //m.ortho(-2,+2,-2,+2,-10,10); + + glActiveTexture(GL_TEXTURE0); //激活纹理单元GL_TEXTURE0,系统里面的 + glBindTexture(GL_TEXTURE_2D, m_idY); //绑定y分量纹理对象id到激活的纹理单元 + //使用内存中的数据创建真正的y分量纹理数据 + glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_videoWidth, m_videoHeight, 0, GL_RED, GL_UNSIGNED_BYTE, m_yuvPtr); + //https://blog.csdn.net/xipiaoyouzi/article/details/53584798 纹理参数解析 + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + glActiveTexture(GL_TEXTURE1); //激活纹理单元GL_TEXTURE1 + glBindTexture(GL_TEXTURE1, m_idU); + //使用内存中的数据创建真正的u分量纹理数据 + glTexImage2D(GL_TEXTURE_2D,0,GL_RED, m_videoWidth >> 1, m_videoHeight >> 1, 0, GL_RED, GL_UNSIGNED_BYTE, m_yuvPtr + m_videoWidth * m_videoHeight); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + glActiveTexture(GL_TEXTURE2); //激活纹理单元GL_TEXTURE2 + glBindTexture(GL_TEXTURE_2D, m_idV); + //使用内存中的数据创建真正的v分量纹理数据 + glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_videoWidth >> 1, m_videoHeight >> 1, 0, GL_RED, GL_UNSIGNED_BYTE, m_yuvPtr + m_videoWidth * m_videoHeight * 5 / 4); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); +} + +void GLYuvWidget::paintGL() +{ + inittexture(); + //指定y纹理要使用新值 + glUniform1i(m_textureUniformY, 0); + //指定u纹理要使用新值 + glUniform1i(m_textureUniformU, 1); + //指定v纹理要使用新值 + glUniform1i(m_textureUniformV, 2); + //使用顶点数组方式绘制图形 + glDrawArrays(GL_TRIANGLE_FAN, 0, 4); +} diff --git a/src/glyuvwidget.h b/src/glyuvwidget.h new file mode 100644 index 0000000..c40b2e5 --- /dev/null +++ b/src/glyuvwidget.h @@ -0,0 +1,55 @@ +#ifndef GLYUVWIDGET_H +#define GLYUVWIDGET_H + +#include +#include +#include +#include + +QT_FORWARD_DECLARE_CLASS(QOpenGLShaderProgram) +QT_FORWARD_DECLARE_CLASS(QOpenGLTexture) + +class GLYuvWidget : public QOpenGLWidget, protected QOpenGLFunctions +{ + Q_OBJECT +public: + GLYuvWidget(QWidget *parent = 0); + ~GLYuvWidget(); + +public slots: + //显示一帧Yuv图像 + void slotShowYuv(QByteArray buffer, uint width, uint height); + +protected: + void initializeGL() Q_DECL_OVERRIDE; + void paintGL() Q_DECL_OVERRIDE; + void inittexture(); + +private: + QOpenGLShaderProgram *m_shaderProgram = Q_NULLPTR; // 着色器程序 + QOpenGLBuffer m_vbo; // 顶点缓冲对象 + + //opengl中yuv纹理位置 + GLuint m_textureUniformY = 0; + GLuint m_textureUniformU = 0; + GLuint m_textureUniformV = 0; + + // yuv纹理对象 + QOpenGLTexture *m_textureY = Q_NULLPTR; + QOpenGLTexture *m_textureU = Q_NULLPTR; + QOpenGLTexture *m_textureV = Q_NULLPTR; + + // 纹理对象ID + GLuint m_idY = 0; + GLuint m_idU = 0; + GLuint m_idV = 0; + + // 视频宽高 + quint32 m_videoWidth; + quint32 m_videoHeight; + + char *m_yuvPtr = Q_NULLPTR; + QByteArray m_buffer; +}; + +#endif // GLYUVWIDGET_H