Merge pull request #600 from barry-ran/dev

feat: adjust ui
This commit is contained in:
Barry 2022-04-09 17:35:25 +08:00 committed by GitHub
commit bb43261872
30 changed files with 1364 additions and 1129 deletions

View file

@ -7,7 +7,9 @@
#include "receiver.h"
#include "videosocket.h"
Controller::Controller(QString gameScript, QObject *parent) : QObject(parent)
Controller::Controller(std::function<qint64(const QByteArray&)> sendData, QString gameScript, QObject *parent)
: QObject(parent)
, m_sendData(sendData)
{
m_receiver = new Receiver(this);
Q_ASSERT(m_receiver);
@ -17,15 +19,6 @@ Controller::Controller(QString gameScript, QObject *parent) : QObject(parent)
Controller::~Controller() {}
void Controller::setControlSocket(QTcpSocket *controlSocket)
{
if (m_controlSocket || !controlSocket) {
return;
}
m_controlSocket = controlSocket;
m_receiver->setControlSocket(controlSocket);
}
void Controller::postControlMsg(ControlMsg *controlMsg)
{
if (controlMsg) {
@ -33,6 +26,15 @@ void Controller::postControlMsg(ControlMsg *controlMsg)
}
}
void Controller::recvDeviceMsg(DeviceMsg *deviceMsg)
{
if (!m_receiver) {
return;
}
m_receiver->recvDeviceMsg(deviceMsg);
}
void Controller::test(QRect rc)
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TOUCH);
@ -236,8 +238,8 @@ bool Controller::sendControl(const QByteArray &buffer)
return false;
}
qint32 len = 0;
if (m_controlSocket) {
len = static_cast<qint32>(m_controlSocket->write(buffer.data(), buffer.length()));
if (m_sendData) {
len = static_cast<qint32>(m_sendData(buffer));
}
return len == buffer.length() ? true : false;
}

View file

@ -1,3 +1,4 @@
#ifndef CONTROLLER_H
#define CONTROLLER_H
@ -9,15 +10,16 @@
class QTcpSocket;
class Receiver;
class InputConvertBase;
class DeviceMsg;
class Controller : public QObject
{
Q_OBJECT
public:
Controller(QString gameScript = "", QObject *parent = Q_NULLPTR);
Controller(std::function<qint64(const QByteArray&)> sendData, QString gameScript = "", QObject *parent = Q_NULLPTR);
virtual ~Controller();
void setControlSocket(QTcpSocket *controlSocket);
void postControlMsg(ControlMsg *controlMsg);
void recvDeviceMsg(DeviceMsg *deviceMsg);
void test(QRect rc);
void updateScript(QString gameScript = "");
@ -62,9 +64,9 @@ private:
void postKeyCodeClick(AndroidKeycode keycode);
private:
QPointer<QTcpSocket> m_controlSocket;
QPointer<Receiver> m_receiver;
QPointer<InputConvertBase> m_inputConvert;
std::function<qint64(const QByteArray&)> m_sendData = Q_NULLPTR;
};
#endif // CONTROLLER_H

View file

@ -1,6 +1,5 @@
#include <QApplication>
#include <QClipboard>
#include <QTcpSocket>
#include "devicemsg.h"
#include "receiver.h"
@ -9,34 +8,7 @@ Receiver::Receiver(QObject *parent) : QObject(parent) {}
Receiver::~Receiver() {}
void Receiver::setControlSocket(QTcpSocket *controlSocket)
{
if (m_controlSocket || !controlSocket) {
return;
}
m_controlSocket = controlSocket;
connect(controlSocket, &QTcpSocket::readyRead, this, &Receiver::onReadyRead);
}
void Receiver::onReadyRead()
{
if (!m_controlSocket) {
return;
}
while (m_controlSocket->bytesAvailable()) {
QByteArray byteArray = m_controlSocket->peek(m_controlSocket->bytesAvailable());
DeviceMsg deviceMsg;
qint32 consume = deviceMsg.deserialize(byteArray);
if (0 >= consume) {
break;
}
m_controlSocket->read(consume);
processMsg(&deviceMsg);
}
}
void Receiver::processMsg(DeviceMsg *deviceMsg)
void Receiver::recvDeviceMsg(DeviceMsg *deviceMsg)
{
switch (deviceMsg->type()) {
case DeviceMsg::DMT_GET_CLIPBOARD: {

View file

@ -3,7 +3,6 @@
#include <QPointer>
class QTcpSocket;
class DeviceMsg;
class Receiver : public QObject
{
@ -12,16 +11,7 @@ public:
explicit Receiver(QObject *parent = Q_NULLPTR);
virtual ~Receiver();
void setControlSocket(QTcpSocket *controlSocket);
public slots:
void onReadyRead();
protected:
void processMsg(DeviceMsg *deviceMsg);
private:
QPointer<QTcpSocket> m_controlSocket;
void recvDeviceMsg(DeviceMsg *deviceMsg);
};
#endif // RECEIVER_H

View file

@ -4,12 +4,31 @@
#include "decoder.h"
#include "videobuffer.h"
Decoder::Decoder(VideoBuffer *vb, QObject *parent) : QObject(parent), m_vb(vb) {}
Decoder::~Decoder() {}
bool Decoder::open(const AVCodec *codec)
Decoder::Decoder(std::function<void(int, int, uint8_t*, uint8_t*, uint8_t*, int, int, int)> onFrame, QObject *parent)
: QObject(parent)
, m_vb(new VideoBuffer())
, m_onFrame(onFrame)
{
m_vb->init();
connect(this, &Decoder::newFrame, this, &Decoder::onNewFrame, Qt::QueuedConnection);
connect(m_vb, &VideoBuffer::updateFPS, this, &Decoder::updateFPS);
}
Decoder::~Decoder() {
m_vb->deInit();
delete m_vb;
}
bool Decoder::open()
{
// codec
AVCodec *codec = Q_NULLPTR;
codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
qCritical("H.264 decoder not found");
return false;
}
// codec context
m_codecCtx = avcodec_alloc_context3(codec);
if (!m_codecCtx) {
@ -26,6 +45,10 @@ bool Decoder::open(const AVCodec *codec)
void Decoder::close()
{
if (m_vb) {
m_vb->interrupt();
}
if (!m_codecCtx) {
return;
}
@ -98,11 +121,12 @@ bool Decoder::push(const AVPacket *packet)
return true;
}
void Decoder::interrupt()
void Decoder::peekFrame(std::function<void (int, int, uint8_t *)> onFrame)
{
if (m_vb) {
m_vb->interrupt();
if (!m_vb) {
return;
}
m_vb->peekRenderedFrame(onFrame);
}
void Decoder::pushFrame()
@ -116,5 +140,16 @@ void Decoder::pushFrame()
// the previous newFrame will consume this frame
return;
}
emit onNewFrame();
emit newFrame();
}
void Decoder::onNewFrame() {
if (!m_onFrame) {
return;
}
m_vb->lock();
const AVFrame *frame = m_vb->consumeRenderedFrame();
m_onFrame(frame->width, frame->height, frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], frame->linesize[1], frame->linesize[2]);
m_vb->unLock();
}

View file

@ -12,24 +12,31 @@ class Decoder : public QObject
{
Q_OBJECT
public:
Decoder(VideoBuffer *vb, QObject *parent = Q_NULLPTR);
Decoder(std::function<void(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV)> onFrame, QObject *parent = Q_NULLPTR);
virtual ~Decoder();
bool open(const AVCodec *codec);
bool open();
void close();
bool push(const AVPacket *packet);
void interrupt();
void peekFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame);
signals:
void updateFPS(quint32 fps);
private slots:
void onNewFrame();
protected:
signals:
void newFrame();
private:
void pushFrame();
private:
VideoBuffer *m_vb = Q_NULLPTR;
AVCodecContext *m_codecCtx = Q_NULLPTR;
bool m_isCodecCtxOpen = false;
std::function<void(int, int, uint8_t*, uint8_t*, uint8_t*, int, int, int)> m_onFrame = Q_NULLPTR;
};
#endif // DECODER_H

View file

@ -1,17 +1,20 @@
#include "videobuffer.h"
#include "avframeconvert.h"
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
}
VideoBuffer::VideoBuffer() {}
VideoBuffer::VideoBuffer(QObject *parent) : QObject(parent) {
connect(&m_fpsCounter, &FpsCounter::updateFPS, this, &VideoBuffer::updateFPS);
}
VideoBuffer::~VideoBuffer() {}
bool VideoBuffer::init(bool renderExpiredFrames)
bool VideoBuffer::init()
{
m_renderExpiredFrames = renderExpiredFrames;
m_decodingFrame = av_frame_alloc();
if (!m_decodingFrame) {
goto error;
@ -57,6 +60,11 @@ void VideoBuffer::unLock()
m_mutex.unlock();
}
void VideoBuffer::setRenderExpiredFrames(bool renderExpiredFrames)
{
m_renderExpiredFrames = renderExpiredFrames;
}
AVFrame *VideoBuffer::decodingFrame()
{
return m_decodingFrame;
@ -99,9 +107,51 @@ const AVFrame *VideoBuffer::consumeRenderedFrame()
return m_renderingframe;
}
const AVFrame *VideoBuffer::peekRenderedFrame()
void VideoBuffer::peekRenderedFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame)
{
return m_renderingframe;
if (!onFrame) {
return;
}
lock();
auto frame = m_renderingframe;
int width = frame->width;
int height = frame->height;
// create buffer
uint8_t* rgbBuffer = new uint8_t[width * height * 4];
AVFrame *rgbFrame = av_frame_alloc();
if (!rgbFrame) {
delete [] rgbBuffer;
return;
}
// bind buffer to AVFrame
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, rgbBuffer, AV_PIX_FMT_RGB32, width, height, 4);
// convert
AVFrameConvert convert;
convert.setSrcFrameInfo(width, height, AV_PIX_FMT_YUV420P);
convert.setDstFrameInfo(width, height, AV_PIX_FMT_RGB32);
bool ret = false;
ret = convert.init();
if (!ret) {
delete [] rgbBuffer;
av_free(rgbFrame);
return;
}
ret = convert.convert(frame, rgbFrame);
if (!ret) {
delete [] rgbBuffer;
av_free(rgbFrame);
return;
}
convert.deInit();
av_free(rgbFrame);
unLock();
onFrame(width, height, rgbBuffer);
delete [] rgbBuffer;
}
void VideoBuffer::interrupt()
@ -115,11 +165,6 @@ void VideoBuffer::interrupt()
}
}
FpsCounter *VideoBuffer::getFPSCounter()
{
return &m_fpsCounter;
}
void VideoBuffer::swap()
{
AVFrame *tmp = m_decodingFrame;

View file

@ -3,22 +3,25 @@
#include <QMutex>
#include <QWaitCondition>
#include <QObject>
#include "fpscounter.h"
// forward declarations
typedef struct AVFrame AVFrame;
class VideoBuffer
class VideoBuffer : public QObject
{
Q_OBJECT
public:
VideoBuffer();
VideoBuffer(QObject *parent = Q_NULLPTR);
virtual ~VideoBuffer();
bool init(bool renderExpiredFrames = false);
bool init();
void deInit();
void lock();
void unLock();
void setRenderExpiredFrames(bool renderExpiredFrames);
AVFrame *decodingFrame();
// set the decoder frame as ready for rendering
@ -32,12 +35,13 @@ public:
// unlocking m_mutex
const AVFrame *consumeRenderedFrame();
const AVFrame *peekRenderedFrame();
void peekRenderedFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame);
// wake up and avoid any blocking call
void interrupt();
FpsCounter *getFPSCounter();
signals:
void updateFPS(quint32 fps);
private:
void swap();

View file

@ -2,9 +2,9 @@
#include <QMessageBox>
#include <QTimer>
#include "avframeconvert.h"
#include "config.h"
#include "controller.h"
#include "devicemsg.h"
#include "decoder.h"
#include "device.h"
#include "filehandler.h"
@ -12,12 +12,7 @@
#include "recorder.h"
#include "server.h"
#include "stream.h"
#include "videobuffer.h"
#include "videoform.h"
extern "C"
{
#include "libavutil/imgutils.h"
}
Device::Device(DeviceParams params, QObject *parent) : QObject(parent), m_params(params)
{
@ -28,23 +23,36 @@ Device::Device(DeviceParams params, QObject *parent) : QObject(parent), m_params
}
if (params.display) {
m_vb = new VideoBuffer();
m_vb->init(params.renderExpiredFrames);
m_decoder = new Decoder(m_vb, this);
m_decoder = new Decoder([this](int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV) {
if (m_videoForm) {
m_videoForm->updateRender(width, height, dataY, dataU, dataV, linesizeY, linesizeU, linesizeV);
}
}, this);
m_fileHandler = new FileHandler(this);
m_controller = new Controller(params.gameScript, this);
m_controller = new Controller([this](const QByteArray& buffer) -> qint64 {
if (!m_server || !m_server->getControlSocket()) {
return 0;
}
return m_server->getControlSocket()->write(buffer.data(), buffer.length());
}, params.gameScript, this);
m_videoForm = new VideoForm(params.framelessWindow, Config::getInstance().getSkin());
m_videoForm->setDevice(this);
}
m_stream = new Stream(this);
if (m_decoder) {
m_stream->setDecoder(m_decoder);
}
m_stream = new Stream([this](quint8 *buf, qint32 bufSize) -> qint32 {
auto videoSocket = m_server->getVideoSocket();
if (!videoSocket) {
return 0;
}
return videoSocket->subThreadRecvData(buf, bufSize);
}, this);
m_server = new Server(this);
if (!m_params.recordFileName.trimmed().isEmpty()) {
m_recorder = new Recorder(m_params.recordFileName);
m_stream->setRecoder(m_recorder);
}
initSignals();
startServer();
@ -55,17 +63,23 @@ Device::~Device()
if (m_server) {
m_server->stop();
}
// server must stop before decoder, because decoder block main thread
if (m_stream) {
m_stream->stopDecode();
}
if (m_recorder) {
delete m_recorder;
// server must stop before decoder, because decoder block main thread
if (m_decoder) {
m_decoder->close();
}
if (m_vb) {
m_vb->deInit();
delete m_vb;
if (m_recorder) {
if (m_recorder->isRunning()) {
m_recorder->stopRecorder();
m_recorder->wait();
}
m_recorder->close();
delete m_recorder;
}
if (m_videoForm) {
m_videoForm->close();
@ -107,14 +121,14 @@ void Device::updateScript(QString script)
void Device::onScreenshot()
{
if (!m_vb) {
if (!m_decoder) {
return;
}
m_vb->lock();
// screenshot
saveFrame(m_vb->peekRenderedFrame());
m_vb->unLock();
m_decoder->peekFrame([this](int width, int height, uint8_t* dataRGB32) {
saveFrame(width, height, dataRGB32);
});
}
void Device::onShowTouch(bool show)
@ -242,16 +256,41 @@ void Device::initSignals()
// init recorder
if (m_recorder) {
m_recorder->setFrameSize(size);
if (!m_recorder->open()) {
qCritical("Could not open recorder");
}
if (!m_recorder->startRecorder()) {
qCritical("Could not start recorder");
}
}
// init decoder
if (m_decoder) {
m_decoder->open();
}
// init decoder
m_stream->setVideoSocket(m_server->getVideoSocket());
m_stream->startDecode();
// init controller
if (m_controller) {
m_controller->setControlSocket(m_server->getControlSocket());
}
// recv device msg
connect(m_server->getControlSocket(), &QTcpSocket::readyRead, this, [this](){
if (!m_controller) {
return;
}
auto controlSocket = m_server->getControlSocket();
while (controlSocket->bytesAvailable()) {
QByteArray byteArray = controlSocket->peek(controlSocket->bytesAvailable());
DeviceMsg deviceMsg;
qint32 consume = deviceMsg.deserialize(byteArray);
if (0 >= consume) {
break;
}
controlSocket->read(consume);
m_controller->recvDeviceMsg(&deviceMsg);
}
});
// 显示界面时才自动息屏m_params.display
if (m_params.closeScreen && m_params.display && m_controller) {
@ -270,24 +309,24 @@ void Device::initSignals()
deleteLater();
qDebug() << "stream thread stop";
});
connect(m_stream, &Stream::getFrame, this, [this](AVPacket *packet) {
if (m_decoder && !m_decoder->push(packet)) {
qCritical("Could not send packet to decoder");
}
if (m_recorder && !m_recorder->push(packet)) {
qCritical("Could not send packet to recorder");
}
}, Qt::DirectConnection);
connect(m_stream, &Stream::getConfigFrame, this, [this](AVPacket *packet) {
if (m_recorder && !m_recorder->push(packet)) {
qCritical("Could not send config packet to recorder");
}
}, Qt::DirectConnection);
}
if (m_decoder && m_vb) {
// must be Qt::QueuedConnection, ui update must be main thread
connect(
m_decoder,
&Decoder::onNewFrame,
this,
[this]() {
m_vb->lock();
const AVFrame *frame = m_vb->consumeRenderedFrame();
if (m_videoForm) {
m_videoForm->updateRender(frame);
}
m_vb->unLock();
},
Qt::QueuedConnection);
connect(m_vb->getFPSCounter(), &::FpsCounter::updateFPS, m_videoForm, &VideoForm::updateFPS);
if (m_decoder) {
connect(m_decoder, &Decoder::updateFPS, m_videoForm, &VideoForm::updateFPS);
}
}
@ -352,37 +391,13 @@ bool Device::isCurrentCustomKeymap()
return m_controller->isCurrentCustomKeymap();
}
bool Device::saveFrame(const AVFrame *frame)
bool Device::saveFrame(int width, int height, uint8_t* dataRGB32)
{
if (!frame) {
if (!dataRGB32) {
return false;
}
// create buffer
QImage rgbImage(frame->width, frame->height, QImage::Format_RGB32);
AVFrame *rgbFrame = av_frame_alloc();
if (!rgbFrame) {
return false;
}
// bind buffer to AVFrame
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, rgbImage.bits(), AV_PIX_FMT_RGB32, frame->width, frame->height, 4);
// convert
AVFrameConvert convert;
convert.setSrcFrameInfo(frame->width, frame->height, AV_PIX_FMT_YUV420P);
convert.setDstFrameInfo(frame->width, frame->height, AV_PIX_FMT_RGB32);
bool ret = false;
ret = convert.init();
if (!ret) {
return false;
}
ret = convert.convert(frame, rgbFrame);
if (!ret) {
return false;
}
convert.deInit();
av_free(rgbFrame);
QImage rgbImage(dataRGB32, width, height, QImage::Format_RGB32);
// save
QString absFilePath;
@ -396,7 +411,7 @@ bool Device::saveFrame(const AVFrame *frame)
fileName = Config::getInstance().getTitle() + fileName + ".png";
QDir dir(fileDir);
absFilePath = dir.absoluteFilePath(fileName);
ret = rgbImage.save(absFilePath, "PNG", 100);
int ret = rgbImage.save(absFilePath, "PNG", 100);
if (!ret) {
return false;
}

View file

@ -108,7 +108,7 @@ public slots:
private:
void initSignals();
void startServer();
bool saveFrame(const AVFrame *frame);
bool saveFrame(int width, int height, uint8_t* dataRGB32);
private:
// server relevant
@ -117,7 +117,6 @@ private:
QPointer<Controller> m_controller;
QPointer<FileHandler> m_fileHandler;
QPointer<Stream> m_stream;
VideoBuffer *m_vb = Q_NULLPTR;
Recorder *m_recorder = Q_NULLPTR;
// ui

View file

@ -51,8 +51,15 @@ void Recorder::setFormat(Recorder::RecorderFormat format)
m_format = format;
}
bool Recorder::open(const AVCodec *inputCodec)
bool Recorder::open()
{
// codec
AVCodec* inputCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!inputCodec) {
qCritical("H.264 decoder not found");
return false;
}
QString formatName = recorderGetFormatName(m_format);
Q_ASSERT(!formatName.isEmpty());
const AVOutputFormat *format = findMuxer(formatName.toUtf8());

View file

@ -28,7 +28,7 @@ public:
void setFrameSize(const QSize &declaredFrameSize);
void setFormat(Recorder::RecorderFormat format);
bool open(const AVCodec *inputCodec);
bool open();
void close();
bool write(AVPacket *packet);
bool startRecorder();

View file

@ -2,10 +2,7 @@
#include <QTime>
#include "compat.h"
#include "decoder.h"
#include "recorder.h"
#include "stream.h"
#include "videosocket.h"
#define BUFSIZE 0x10000
#define HEADER_SIZE 12
@ -13,7 +10,10 @@
typedef qint32 (*ReadPacketFunc)(void *, quint8 *, qint32);
Stream::Stream(QObject *parent) : QThread(parent) {}
Stream::Stream(std::function<qint32(quint8*, qint32)> recvData, QObject *parent)
: QThread(parent)
, m_recvData(recvData)
{}
Stream::~Stream() {}
@ -64,11 +64,6 @@ void Stream::deInit()
avformat_network_deinit(); // ignore failure
}
void Stream::setDecoder(Decoder *decoder)
{
m_decoder = decoder;
}
static quint32 bufferRead32be(quint8 *buf)
{
return static_cast<quint32>((buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3]);
@ -81,31 +76,19 @@ static quint64 bufferRead64be(quint8 *buf)
return (static_cast<quint64>(msb) << 32) | lsb;
}
void Stream::setVideoSocket(VideoSocket *videoSocket)
{
m_videoSocket = videoSocket;
}
void Stream::setRecoder(Recorder *recorder)
{
m_recorder = recorder;
}
qint32 Stream::recvData(quint8 *buf, qint32 bufSize)
{
if (!buf) {
if (!buf || !m_recvData) {
return 0;
}
if (m_videoSocket) {
qint32 len = m_videoSocket->subThreadRecvData(buf, bufSize);
return len;
}
return 0;
qint32 len = m_recvData(buf, bufSize);
return len;
}
bool Stream::startDecode()
{
if (!m_videoSocket) {
if (!m_recvData) {
return false;
}
start();
@ -114,9 +97,6 @@ bool Stream::startDecode()
void Stream::stopDecode()
{
if (m_decoder) {
m_decoder->interrupt();
}
wait();
}
@ -140,23 +120,6 @@ void Stream::run()
goto runQuit;
}
if (m_decoder && !m_decoder->open(codec)) {
qCritical("Could not open m_decoder");
goto runQuit;
}
if (m_recorder) {
if (!m_recorder->open(codec)) {
qCritical("Could not open recorder");
goto runQuit;
}
if (!m_recorder->startRecorder()) {
qCritical("Could not start recorder");
goto runQuit;
}
}
m_parser = av_parser_init(AV_CODEC_ID_H264);
if (!m_parser) {
qCritical("Could not initialize parser");
@ -192,16 +155,6 @@ void Stream::run()
av_parser_close(m_parser);
runQuit:
if (m_recorder) {
if (m_recorder->isRunning()) {
m_recorder->stopRecorder();
m_recorder->wait();
}
m_recorder->close();
}
if (m_decoder) {
m_decoder->close();
}
if (m_codecCtx) {
avcodec_free_context(&m_codecCtx);
}
@ -309,10 +262,7 @@ bool Stream::pushPacket(AVPacket *packet)
bool Stream::processConfigPacket(AVPacket *packet)
{
if (m_recorder && !m_recorder->push(packet)) {
qCritical("Could not send config packet to recorder");
return false;
}
emit getConfigFrame(packet);
return true;
}
@ -344,18 +294,7 @@ bool Stream::parse(AVPacket *packet)
bool Stream::processFrame(AVPacket *packet)
{
if (m_decoder && !m_decoder->push(packet)) {
return false;
}
if (m_recorder) {
packet->dts = packet->pts;
if (!m_recorder->push(packet)) {
qCritical("Could not send packet to recorder");
return false;
}
}
packet->dts = packet->pts;
emit getFrame(packet);
return true;
}

View file

@ -10,29 +10,24 @@ extern "C"
#include "libavformat/avformat.h"
}
class VideoSocket;
class Recorder;
class Decoder;
class Stream : public QThread
{
Q_OBJECT
public:
Stream(QObject *parent = Q_NULLPTR);
Stream(std::function<qint32(quint8*, qint32)> recvData, QObject *parent = Q_NULLPTR);
virtual ~Stream();
public:
static bool init();
static void deInit();
void setDecoder(Decoder *decoder);
void setRecoder(Recorder *recorder);
void setVideoSocket(VideoSocket *deviceSocket);
qint32 recvData(quint8 *buf, qint32 bufSize);
bool startDecode();
void stopDecode();
signals:
void onStreamStop();
void getFrame(AVPacket* packet);
void getConfigFrame(AVPacket* packet);
protected:
void run();
@ -41,12 +36,10 @@ protected:
bool processConfigPacket(AVPacket *packet);
bool parse(AVPacket *packet);
bool processFrame(AVPacket *packet);
qint32 recvData(quint8 *buf, qint32 bufSize);
private:
QPointer<VideoSocket> m_videoSocket;
// for recorder
Recorder *m_recorder = Q_NULLPTR;
Decoder *m_decoder = Q_NULLPTR;
std::function<qint32(quint8*, qint32)> m_recvData = nullptr;
AVCodecContext *m_codecCtx = Q_NULLPTR;
AVCodecParserContext *m_parser = Q_NULLPTR;

View file

@ -41,6 +41,7 @@ void ToolForm::initStyle()
IconHelper::Instance()->SetIcon(ui->appSwitchBtn, QChar(0xf24d), 15);
IconHelper::Instance()->SetIcon(ui->volumeUpBtn, QChar(0xf028), 15);
IconHelper::Instance()->SetIcon(ui->volumeDownBtn, QChar(0xf027), 15);
IconHelper::Instance()->SetIcon(ui->openScreenBtn, QChar(0xf06e), 15);
IconHelper::Instance()->SetIcon(ui->closeScreenBtn, QChar(0xf070), 15);
IconHelper::Instance()->SetIcon(ui->powerBtn, QChar(0xf011), 15);
IconHelper::Instance()->SetIcon(ui->expandNotifyBtn, QChar(0xf103), 15);
@ -220,3 +221,11 @@ void ToolForm::onControlStateChange(Device *device, Device::GroupControlState ol
Q_UNUSED(newState)
updateGroupControl();
}
void ToolForm::on_openScreenBtn_clicked()
{
if (!m_device) {
return;
}
emit m_device->setScreenPowerMode(ControlMsg::SPM_NORMAL);
}

View file

@ -48,6 +48,8 @@ private slots:
void onControlStateChange(Device *device, Device::GroupControlState oldState, Device::GroupControlState newState);
void on_openScreenBtn_clicked();
private:
void initStyle();
void updateGroupControl();

View file

@ -70,6 +70,16 @@
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="openScreenBtn">
<property name="toolTip">
<string>open screen</string>
</property>
<property name="text">
<string/>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="closeScreenBtn">
<property name="toolTip">

View file

@ -90,8 +90,8 @@ QRect VideoForm::getGrabCursorRect()
#if defined(Q_OS_WIN32)
rc = QRect(ui->keepRatioWidget->mapToGlobal(m_videoWidget->pos()), m_videoWidget->size());
// high dpi support
rc.setTopLeft(rc.topLeft() * m_videoWidget->devicePixelRatio());
rc.setBottomRight(rc.bottomRight() * m_videoWidget->devicePixelRatio());
rc.setTopLeft(rc.topLeft() * m_videoWidget->devicePixelRatioF());
rc.setBottomRight(rc.bottomRight() * m_videoWidget->devicePixelRatioF());
rc.setX(rc.x() + 10);
rc.setY(rc.y() + 10);
@ -109,8 +109,8 @@ QRect VideoForm::getGrabCursorRect()
#elif defined(Q_OS_LINUX)
rc = QRect(ui->keepRatioWidget->mapToGlobal(m_videoWidget->pos()), m_videoWidget->size());
// high dpi support -- taken from the WIN32 section and untested
rc.setTopLeft(rc.topLeft() * m_videoWidget->devicePixelRatio());
rc.setBottomRight(rc.bottomRight() * m_videoWidget->devicePixelRatio());
rc.setTopLeft(rc.topLeft() * m_videoWidget->devicePixelRatioF());
rc.setBottomRight(rc.bottomRight() * m_videoWidget->devicePixelRatioF());
rc.setX(rc.x() + 10);
rc.setY(rc.y() + 10);
@ -148,7 +148,7 @@ void VideoForm::showFPS(bool show)
m_fpsLabel->setVisible(show);
}
void VideoForm::updateRender(const AVFrame *frame)
void VideoForm::updateRender(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV)
{
if (m_videoWidget->isHidden()) {
if (m_loadingWidget) {
@ -157,9 +157,9 @@ void VideoForm::updateRender(const AVFrame *frame)
m_videoWidget->show();
}
updateShowSize(QSize(frame->width, frame->height));
m_videoWidget->setFrameSize(QSize(frame->width, frame->height));
m_videoWidget->updateTextures(frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], frame->linesize[1], frame->linesize[2]);
updateShowSize(QSize(width, height));
m_videoWidget->setFrameSize(QSize(width, height));
m_videoWidget->updateTextures(dataY, dataU, dataV, linesizeY, linesizeU, linesizeV);
}
void VideoForm::showToolForm(bool show)

View file

@ -9,7 +9,6 @@ namespace Ui
class videoForm;
}
struct AVFrame;
class ToolForm;
class Device;
class FileHandler;
@ -24,7 +23,7 @@ public:
void staysOnTop(bool top = true);
void updateShowSize(const QSize &newSize);
void updateRender(const AVFrame *frame);
void updateRender(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV);
void setDevice(Device *device);
QRect getGrabCursorRect();
const QSize &frameSize();

View file

@ -135,16 +135,6 @@ void Dialog::initUI()
on_useSingleModeCheck_clicked();
on_updateDevice_clicked();
#ifdef Q_OS_OSX
// mac need more width
setFixedWidth(550);
#endif
#ifdef Q_OS_LINUX
// linux need more width
setFixedWidth(520);
#endif
}
void Dialog::updateBootConfig(bool toView)
@ -603,19 +593,13 @@ void Dialog::on_updateNameBtn_clicked()
void Dialog::on_useSingleModeCheck_clicked()
{
if (ui->useSingleModeCheck->isChecked()) {
ui->configGroupBox->hide();
ui->adbGroupBox->hide();
ui->wirelessGroupBox->hide();
ui->usbGroupBox->hide();
ui->rightWidget->hide();
} else {
ui->configGroupBox->show();
ui->adbGroupBox->show();
ui->wirelessGroupBox->show();
ui->usbGroupBox->show();
ui->rightWidget->show();
}
QTimer::singleShot(0, this, [this]() {
resize(width(), layout()->sizeHint().height());
resize(layout()->sizeHint().width(), height());
});
}

File diff suppressed because it is too large Load diff

View file

@ -135,6 +135,7 @@ void installTranslator()
case QLocale::English:
default:
languagePath += "en_US.qm";
break;
}
translator.load(languagePath);

Binary file not shown.

View file

@ -376,6 +376,10 @@ You can download it at the following address:</source>
<source>screen shot</source>
<translation>screen shot</translation>
</message>
<message>
<source>open screen</source>
<translation>open screen</translation>
</message>
</context>
<context>
<name>VideoForm</name>

Binary file not shown.

View file

@ -362,6 +362,10 @@
<source>screen shot</source>
<translation></translation>
</message>
<message>
<source>open screen</source>
<translation></translation>
</message>
</context>
<context>
<name>VideoForm</name>

View file

@ -171,7 +171,7 @@ Note: it is not necessary to keep you Android device connected via USB after you
- Screen recording
- Screenshot to png
- Wireless connection
- Supports up to 16 device connections (the number can be higher if your PC performance allows. You need to compile it by yourself)
- Supports multiple device connections
- Full-screen display
- Display on the top
- Install apk: drag and drop apk to the video window to install

View file

@ -173,7 +173,7 @@ Mac OS平台你可以直接使用我编译好的可执行程序:
- 屏幕录制
- 截图为png
- 无线连接
- 最多支持16台设备连接PC性能允许的情况下可以增加需要自己编译
- 支持多台设备连接
- 全屏显示
- 窗口置顶
- 安装apk拖拽apk到视频窗口即可安装

View file

@ -44,7 +44,7 @@ if "%1"=="MinSizeRel" (
if "%1"=="RelWithDebInfo" (
goto build_mode_ok
)
echo error: unkonow build mode -- %1
echo error: unknown build mode -- %1
goto return
:build_mode_ok
@ -111,4 +111,4 @@ set errno=0
:return
cd %old_cd%
exit /B %errno%
exit /B %errno%

View file

@ -1,5 +1,3 @@
最后同步scrcpy 08baaf4b575aef7ee56d14683be3f4e3a86d39aa
# TODO
## 低优先级
- text转换 https://github.com/Genymobile/scrcpy/commit/c916af0984f72a60301d13fa8ef9a85112f54202?tdsourcetag=s_pctim_aiomsg
@ -24,6 +22,9 @@
## ffmpeg
[ffmpeg编译参数详解](https://www.cnblogs.com/wainiwann/p/4204230.html)
## fontawesome
[fontawesome 在线搜索](http://www.fontawesome.com.cn/cheatsheet/)
## adb
以下是 ADB 和 Fastboot 的谷歌官方下载链接: