diff --git a/QtScrcpy/QtScrcpy.pro b/QtScrcpy/QtScrcpy.pro index 001fc79..e69303f 100644 --- a/QtScrcpy/QtScrcpy.pro +++ b/QtScrcpy/QtScrcpy.pro @@ -53,6 +53,7 @@ include ($$PWD/inputcontrol/inputcontrol.pri) include ($$PWD/uibase/uibase.pri) include ($$PWD/fontawesome/fontawesome.pri) include ($$PWD/filehandler/filehandler.pri) +include ($$PWD/recorder/recorder.pri) # 附加包含路径 INCLUDEPATH += \ @@ -66,6 +67,7 @@ INCLUDEPATH += \ $$PWD/inputcontrol \ $$PWD/uibase \ $$PWD/filehandler \ + $$PWD/recorder \ $$PWD/fontawesome diff --git a/QtScrcpy/decoder/decoder.cpp b/QtScrcpy/decoder/decoder.cpp index 29912fd..45c397d 100644 --- a/QtScrcpy/decoder/decoder.cpp +++ b/QtScrcpy/decoder/decoder.cpp @@ -4,8 +4,13 @@ #include "decoder.h" #include "frames.h" #include "devicesocket.h" +#include "recorder.h" #define BUFSIZE 0x10000 +#define HEADER_SIZE 12 +#define NO_PTS UINT64_C(-1) + +typedef qint32 (*ReadPacketFunc)(void*, quint8*, qint32); Decoder::Decoder() { @@ -38,7 +43,116 @@ void Decoder::setFrames(Frames *frames) m_frames = frames; } -static qint32 readPacket(void *opaque, quint8 *buf, qint32 bufSize) { +static quint32 bufferRead32be(quint8* buf) { + return (buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3]; +} + +static quint64 bufferRead64be(quint8* buf) { + quint32 msb = bufferRead32be(buf); + quint32 lsb = bufferRead32be(&buf[4]); + return ((quint64) msb << 32) | lsb; +} + +static Decoder::FrameMeta* frameMetaNew(quint64 pts) { + Decoder::FrameMeta* meta = new Decoder::FrameMeta; + if (!meta) { + return meta; + } + meta->pts = pts; + meta->next = Q_NULLPTR; + return meta; +} + +static void frameMetaDelete(Decoder::FrameMeta* frameMeta) { + if (frameMeta) { + delete frameMeta; + } +} + +static bool receiverStatePushMeta(Decoder::ReceiverState* state, quint64 pts) { + Decoder::FrameMeta* frameMeta = frameMetaNew(pts); + if (!frameMeta) { + return false; + } + + // append to the list + // (iterate to find the last item, in practice the list should be tiny) + Decoder::FrameMeta **p = &state->frameMetaQueue; + while (*p) { + p = &(*p)->next; + } + *p = frameMeta; + return true; +} + +static quint64 receiverStateTakeMeta(Decoder::ReceiverState* state) { + Decoder::FrameMeta *frameMeta = state->frameMetaQueue; // first item + Q_ASSERT(frameMeta); // must not be empty + quint64 pts = frameMeta->pts; + state->frameMetaQueue = frameMeta->next; // remove the item + frameMetaDelete(frameMeta); + return pts; +} + +static qint32 readPacketWithMeta(void *opaque, uint8_t *buf, int bufSize) { + Decoder* decoder = (Decoder*)opaque; + Decoder::ReceiverState* state = decoder->getReceiverState(); + + // The video stream contains raw packets, without time information. When we + // record, we retrieve the timestamps separately, from a "meta" header + // added by the server before each raw packet. + // + // The "meta" header length is 12 bytes: + // [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ... + // <-------------> <-----> <-----------------------------... + // PTS packet raw packet + // size + // + // It is followed by bytes containing the packet/frame. + + if (!state->remaining) { +#define HEADER_SIZE 12 + quint8 header[HEADER_SIZE]; + qint32 r = decoder->recvData(header, HEADER_SIZE); + if (r == -1) { + return AVERROR(errno); + } + if (r == 0) { + return AVERROR_EOF; + } + // no partial read (net_recv_all()) + Q_ASSERT(r == HEADER_SIZE); + + uint64_t pts = bufferRead64be(header); + state->remaining = bufferRead32be(&header[8]); + + if (pts != NO_PTS && !receiverStatePushMeta(state, pts)) { + qCritical("Could not store PTS for recording"); + // we cannot save the PTS, the recording would be broken + return AVERROR(ENOMEM); + } + } + + Q_ASSERT(state->remaining); + + if (bufSize > state->remaining) { + bufSize = state->remaining; + } + + qint32 r = decoder->recvData(buf, bufSize); + if (r == -1) { + return AVERROR(errno); + } + if (r == 0) { + return AVERROR_EOF; + } + + Q_ASSERT(state->remaining >= r); + state->remaining -= r; + return r; +} + +static qint32 readRawPacket(void *opaque, quint8 *buf, qint32 bufSize) { Decoder *decoder = (Decoder*)opaque; if (decoder) { return decoder->recvData(buf, bufSize); @@ -51,6 +165,11 @@ void Decoder::setDeviceSocket(DeviceSocket* deviceSocket) m_deviceSocket = deviceSocket; } +void Decoder::setRecoder(Recorder *recorder) +{ + m_recorder = recorder; +} + qint32 Decoder::recvData(quint8* buf, qint32 bufSize) { if (!buf) { @@ -85,7 +204,12 @@ void Decoder::stopDecode() if (m_frames) { m_frames->stop(); } - wait(); + //wait(); +} + +Decoder::ReceiverState *Decoder::getReceiverState() +{ + return &m_receiverState; } void Decoder::run() @@ -105,6 +229,13 @@ void Decoder::run() goto runQuit; } + // initialize the receiver state + m_receiverState.frameMetaQueue = Q_NULLPTR; + m_receiverState.remaining = 0; + + // if recording is enabled, a "header" is sent between raw packets + ReadPacketFunc readPacket = m_recorder ? readPacketWithMeta: readRawPacket; + // io context avioCtx = avio_alloc_context(decoderBuffer, BUFSIZE, 0, this, readPacket, NULL, NULL); if (!avioCtx) { @@ -147,6 +278,11 @@ void Decoder::run() } isCodecCtxOpen = true; + if (m_recorder && !m_recorder->open(codec)) { + qCritical("Could not open recorder"); + goto runQuit; + } + AVPacket packet; av_init_packet(&packet); packet.data = Q_NULLPTR; @@ -214,6 +350,21 @@ void Decoder::run() packet.data += len; } #endif + if (m_recorder) { + // we retrieve the PTS in order they were received, so they will + // be assigned to the correct frame + quint64 pts = receiverStateTakeMeta(&m_receiverState); + packet.pts = pts; + packet.dts = pts; + // no need to rescale with av_packet_rescale_ts(), the timestamps + // are in microseconds both in input and output + if (!m_recorder->write(&packet)) { + qCritical("Could not write frame to output file"); + av_packet_unref(&packet); + goto runQuit; + } + } + av_packet_unref(&packet); if (avioCtx->eof_reached) { @@ -223,23 +374,26 @@ void Decoder::run() qDebug() << "End of frames"; runQuit: - if (avioCtx) { - av_freep(&avioCtx); - } - if (formatCtx && isFormatCtxOpen) { - avformat_close_input(&formatCtx); - } - if (formatCtx) { - avformat_free_context(formatCtx); - } - if (codecCtx && isCodecCtxOpen) { - avcodec_close(codecCtx); - } - if (codecCtx) { - avcodec_free_context(&codecCtx); - } + if (m_recorder) { + m_recorder->close(); + } + if (avioCtx) { + av_freep(&avioCtx); + } + if (formatCtx && isFormatCtxOpen) { + avformat_close_input(&formatCtx); + } + if (formatCtx) { + avformat_free_context(formatCtx); + } + if (codecCtx && isCodecCtxOpen) { + avcodec_close(codecCtx); + } + if (codecCtx) { + avcodec_free_context(&codecCtx); + } - emit onDecodeStop(); + emit onDecodeStop(); } void Decoder::pushFrame() diff --git a/QtScrcpy/decoder/decoder.h b/QtScrcpy/decoder/decoder.h index 80c9812..a483b31 100644 --- a/QtScrcpy/decoder/decoder.h +++ b/QtScrcpy/decoder/decoder.h @@ -13,10 +13,22 @@ extern "C" class Frames; class DeviceSocket; +class Recorder; class Decoder : public QThread { Q_OBJECT public: + typedef struct FrameMeta { + quint64 pts; + struct FrameMeta* next; + } FrameMeta; + + typedef struct ReceiverState { + // meta (in order) for frames not consumed yet + FrameMeta* frameMetaQueue; + qint32 remaining; // remaining bytes to receive for the current frame + } ReceiverState; + Decoder(); virtual ~Decoder(); @@ -26,9 +38,11 @@ public: void setFrames(Frames* frames); void setDeviceSocket(DeviceSocket* deviceSocket); + void setRecoder(Recorder* recorder); qint32 recvData(quint8* buf, qint32 bufSize); bool startDecode(); void stopDecode(); + ReceiverState* getReceiverState(); signals: void onNewFrame(); @@ -43,6 +57,10 @@ private: QMutex m_mutex; bool m_quit = false; Frames* m_frames; + + // for recorder + Recorder* m_recorder; + ReceiverState m_receiverState; }; #endif // DECODER_H diff --git a/QtScrcpy/recorder/recorder.cpp b/QtScrcpy/recorder/recorder.cpp new file mode 100644 index 0000000..20888d6 --- /dev/null +++ b/QtScrcpy/recorder/recorder.cpp @@ -0,0 +1,117 @@ +#include + +#include "recorder.h" + +Recorder::Recorder(const QString& fileName, const QSize& declaredFrameSize) + : m_fileName(fileName) + , m_declaredFrameSize(declaredFrameSize) +{ + +} + +Recorder::~Recorder() +{ + +} + +bool Recorder::open(AVCodec *inputCodec) +{ + const AVOutputFormat* mp4 = findMp4Muxer(); + if (!mp4) { + qCritical("Could not find mp4 muxer"); + return false; + } + + m_formatCtx = avformat_alloc_context(); + if (!m_formatCtx) { + qCritical("Could not allocate output context"); + return false; + } + + // contrary to the deprecated API (av_oformat_next()), av_muxer_iterate() + // returns (on purpose) a pointer-to-const, but AVFormatContext.oformat + // still expects a pointer-to-non-const (it has not be updated accordingly) + // + + m_formatCtx->oformat = (AVOutputFormat*)mp4; + + AVStream* outStream = avformat_new_stream(m_formatCtx, inputCodec); + if (!outStream) { + avformat_free_context(m_formatCtx); + return false; + } + + // In ffmpeg/doc/APIchanges: + // 2016-04-11 - 6f69f7a / 9200514 - lavf 57.33.100 / 57.5.0 - avformat.h + // Add AVStream.codecpar, deprecate AVStream.codec. +#if (LIBAVFORMAT_VERSION_MICRO >= 100 /* FFmpeg */ && \ + LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)) \ + || (LIBAVFORMAT_VERSION_MICRO < 100 && /* Libav */ \ + LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 5, 0)) + outStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; + outStream->codecpar->codec_id = inputCodec->id; + outStream->codecpar->format = AV_PIX_FMT_YUV420P; + outStream->codecpar->width = m_declaredFrameSize.width(); + outStream->codecpar->height = m_declaredFrameSize.height(); +#else + outStream->codec->codec_type = AVMEDIA_TYPE_VIDEO; + outStream->codec->codec_id = inputCodec->id; + outStream->codec->pix_fmt = AV_PIX_FMT_YUV420P; + outStream->codec->width = m_declaredFrameSize.width(); + outStream->codec->height = m_declaredFrameSize.height(); +#endif + // timestamps in us + outStream->time_base.num = 1; + outStream->time_base.den = 1000000; + + int ret = avio_open(&m_formatCtx->pb, m_fileName.toUtf8().toStdString().c_str(), + AVIO_FLAG_WRITE); + if (ret < 0) { + qCritical(QString("Failed to open output file: %1").arg(m_fileName).toUtf8().toStdString().c_str()); + // ostream will be cleaned up during context cleaning + avformat_free_context(m_formatCtx); + return false; + } + + ret = avformat_write_header(m_formatCtx, Q_NULLPTR); + if (ret < 0) { + qCritical(QString("Failed to write header to %1").arg(m_fileName).toUtf8().toStdString().c_str()); + avio_closep(&m_formatCtx->pb); + avformat_free_context(m_formatCtx); + return false; + } + + return true; +} + +void Recorder::close() +{ + int ret = av_write_trailer(m_formatCtx); + if (ret < 0) { + qCritical(QString("Failed to write trailer to %1").arg(m_fileName).toUtf8().toStdString().c_str()); + } + avio_close(m_formatCtx->pb); + avformat_free_context(m_formatCtx); +} + +bool Recorder::write(AVPacket *packet) +{ + return av_write_frame(m_formatCtx, packet) >= 0; +} + +const AVOutputFormat *Recorder::findMp4Muxer() +{ +#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(58, 9, 100) + void* opaque = Q_NULLPTR; +#endif + const AVOutputFormat* outFormat = Q_NULLPTR; + do { +#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(58, 9, 100) + outFormat = av_muxer_iterate(&opaque); +#else + outFormat = av_oformat_next(oformat); +#endif + // until null or with name "mp4" + } while (outFormat && strcmp(outFormat->name, "mp4")); + return outFormat; +} diff --git a/QtScrcpy/recorder/recorder.h b/QtScrcpy/recorder/recorder.h new file mode 100644 index 0000000..ceec666 --- /dev/null +++ b/QtScrcpy/recorder/recorder.h @@ -0,0 +1,30 @@ +#ifndef RECORDER_H +#define RECORDER_H +#include +#include + +extern "C" +{ +#include "libavformat/avformat.h" +} + +class Recorder +{ +public: + Recorder(const QString& fileName, const QSize& declaredFrameSize); + virtual ~Recorder(); + + bool open(AVCodec* inputCodec); + void close(); + bool write(AVPacket* packet); + +private: + const AVOutputFormat* findMp4Muxer(); + +private: + QString m_fileName = ""; + AVFormatContext* m_formatCtx = Q_NULLPTR; + QSize m_declaredFrameSize; +}; + +#endif // RECORDER_H diff --git a/QtScrcpy/recorder/recorder.pri b/QtScrcpy/recorder/recorder.pri new file mode 100644 index 0000000..ec33f7e --- /dev/null +++ b/QtScrcpy/recorder/recorder.pri @@ -0,0 +1,5 @@ +HEADERS += \ + $$PWD/recorder.h + +SOURCES += \ + $$PWD/recorder.cpp diff --git a/QtScrcpy/server/server.cpp b/QtScrcpy/server/server.cpp index 372fc3a..f8547de 100644 --- a/QtScrcpy/server/server.cpp +++ b/QtScrcpy/server/server.cpp @@ -131,7 +131,7 @@ bool Server::execute() } else { args << m_crop; } - + args << (m_sendFrameMeta ? "true" : "false"); // adb -s P7C0218510000537 shell CLASSPATH=/data/local/tmp/scrcpy-server.jar app_process / com.genymobile.scrcpy.Server 0 8000000 false // mark: crop input format: "width:height:x:y" or - for no crop, for example: "100:200:0:0" // 这条adb命令是阻塞运行的,m_serverProcess进程不会退出了 @@ -139,13 +139,14 @@ bool Server::execute() return true; } -bool Server::start(const QString& serial, quint16 localPort, quint16 maxSize, quint32 bitRate, const QString& crop) +bool Server::start(const QString& serial, quint16 localPort, quint16 maxSize, quint32 bitRate, const QString& crop, bool sendFrameMeta) { m_serial = serial; m_localPort = localPort; m_maxSize = maxSize; m_bitRate = bitRate; m_crop = crop; + m_sendFrameMeta = sendFrameMeta; m_serverStartStep = SSS_PUSH; return startServerByStep(); diff --git a/QtScrcpy/server/server.h b/QtScrcpy/server/server.h index 6643a2c..8a9f4be 100644 --- a/QtScrcpy/server/server.h +++ b/QtScrcpy/server/server.h @@ -24,7 +24,7 @@ public: explicit Server(QObject *parent = nullptr); virtual ~Server(); - bool start(const QString& serial, quint16 localPort, quint16 maxSize, quint32 bitRate, const QString& crop); + bool start(const QString& serial, quint16 localPort, quint16 maxSize, quint32 bitRate, const QString& crop, bool sendFrameMeta = false); bool connectTo(); DeviceSocket* getDeviceSocket(); @@ -66,6 +66,7 @@ private: quint16 m_localPort = 0; bool m_tunnelEnabled = false; bool m_tunnelForward = false; // use "adb forward" instead of "adb reverse" + bool m_sendFrameMeta = false; quint16 m_maxSize = 0; quint32 m_bitRate = 0; QString m_crop = ""; diff --git a/QtScrcpy/videoform.cpp b/QtScrcpy/videoform.cpp index fb9e08a..f2b1a3d 100644 --- a/QtScrcpy/videoform.cpp +++ b/QtScrcpy/videoform.cpp @@ -17,6 +17,7 @@ #include "iconhelper.h" #include "toolform.h" #include "controlevent.h" +#include "recorder.h" VideoForm::VideoForm(const QString& serial, quint16 maxSize, quint32 bitRate,QWidget *parent) : QWidget(parent), @@ -29,8 +30,10 @@ VideoForm::VideoForm(const QString& serial, quint16 maxSize, quint32 bitRate,QWi initUI(); m_server = new Server(); + m_recorder = new Recorder("./test.mp4", QSize(600, 300)); m_frames.init(); m_decoder.setFrames(&m_frames); + m_decoder.setRecoder(m_recorder); initSignals(); @@ -41,7 +44,8 @@ VideoForm::VideoForm(const QString& serial, quint16 maxSize, quint32 bitRate,QWi //m_server->start("192.168.0.174:5555", 27183, m_maxSize, m_bitRate, ""); // only one devices, serial can be null // mark: crop input format: "width:height:x:y" or - for no crop, for example: "100:200:0:0" - m_server->start(m_serial, 27183, m_maxSize, m_bitRate, "-"); + // sendFrameMeta for recorder mp4 + m_server->start(m_serial, 27183, m_maxSize, m_bitRate, "-", true); }); updateShowSize(size()); @@ -52,9 +56,11 @@ VideoForm::VideoForm(const QString& serial, quint16 maxSize, quint32 bitRate,QWi VideoForm::~VideoForm() { - m_server->stop(); m_decoder.stopDecode(); + m_server->stop(); + m_decoder.wait(); delete m_server; + delete m_recorder; m_frames.deInit(); delete ui; } diff --git a/QtScrcpy/videoform.h b/QtScrcpy/videoform.h index 5c1e021..53e7105 100644 --- a/QtScrcpy/videoform.h +++ b/QtScrcpy/videoform.h @@ -16,6 +16,7 @@ class videoForm; } class ToolForm; +class Recorder; class VideoForm : public QWidget { Q_OBJECT @@ -75,6 +76,7 @@ private: QPoint m_dragPosition; float m_widthHeightRatio = 0.5f; QPointer m_toolForm; + Recorder* m_recorder = Q_NULLPTR; }; #endif // VIDEOFORM_H diff --git a/TODO.txt b/TODO.txt index a8f8ff8..b5bd9df 100644 --- a/TODO.txt +++ b/TODO.txt @@ -1,5 +1,5 @@ -ȫ״ֹ̬ -mp4¼ +mp4¼α֤ȷյ + չģָȣ ģָ(ע⣺Ͷû) diff --git a/server/src/main/java/com/genymobile/scrcpy/Options.java b/server/src/main/java/com/genymobile/scrcpy/Options.java index 93df896..0d02451 100644 --- a/server/src/main/java/com/genymobile/scrcpy/Options.java +++ b/server/src/main/java/com/genymobile/scrcpy/Options.java @@ -7,6 +7,7 @@ public class Options { private int bitRate; private boolean tunnelForward; private Rect crop; + private boolean sendFrameMeta; public int getMaxSize() { return maxSize; @@ -39,4 +40,12 @@ public class Options { public void setCrop(Rect crop) { this.crop = crop; } + + public boolean getSendFrameMeta() { + return sendFrameMeta; + } + + public void setSendFrameMeta(boolean sendFrameMeta) { + this.sendFrameMeta = sendFrameMeta; + } } diff --git a/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java b/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java index 636bbb0..ec63e81 100644 --- a/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java +++ b/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java @@ -3,6 +3,7 @@ package com.genymobile.scrcpy; import com.genymobile.scrcpy.wrappers.SurfaceControl; import android.graphics.Rect; +import android.media.MediaMuxer; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; @@ -22,21 +23,26 @@ public class ScreenEncoder implements Device.RotationListener { private static final int REPEAT_FRAME_DELAY = 6; // repeat after 6 frames private static final int MICROSECONDS_IN_ONE_SECOND = 1_000_000; + private static final int NO_PTS = -1; private final AtomicBoolean rotationChanged = new AtomicBoolean(); + private final ByteBuffer headerBuffer = ByteBuffer.allocate(12); private int bitRate; private int frameRate; private int iFrameInterval; + private boolean sendFrameMeta; + private long ptsOrigin; - public ScreenEncoder(int bitRate, int frameRate, int iFrameInterval) { + public ScreenEncoder(boolean sendFrameMeta, int bitRate, int frameRate, int iFrameInterval) { + this.sendFrameMeta = sendFrameMeta; this.bitRate = bitRate; this.frameRate = frameRate; this.iFrameInterval = iFrameInterval; } - public ScreenEncoder(int bitRate) { - this(bitRate, DEFAULT_FRAME_RATE, DEFAULT_I_FRAME_INTERVAL); + public ScreenEncoder(boolean sendFrameMeta, int bitRate) { + this(sendFrameMeta, bitRate, DEFAULT_FRAME_RATE, DEFAULT_I_FRAME_INTERVAL); } @Override @@ -80,6 +86,8 @@ public class ScreenEncoder implements Device.RotationListener { private boolean encode(MediaCodec codec, FileDescriptor fd) throws IOException { boolean eof = false; MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); + + while (!consumeRotationChange() && !eof) { int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1); eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; @@ -90,6 +98,11 @@ public class ScreenEncoder implements Device.RotationListener { } if (outputBufferId >= 0) { ByteBuffer codecBuffer = codec.getOutputBuffer(outputBufferId); + + if (sendFrameMeta) { + writeFrameMeta(fd, bufferInfo, codecBuffer.remaining()); + } + IO.writeFully(fd, codecBuffer); } } finally { @@ -102,6 +115,25 @@ public class ScreenEncoder implements Device.RotationListener { return !eof; } + private void writeFrameMeta(FileDescriptor fd, MediaCodec.BufferInfo bufferInfo, int packetSize) throws IOException { + headerBuffer.clear(); + + long pts; + if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { + pts = NO_PTS; // non-media data packet + } else { + if (ptsOrigin == 0) { + ptsOrigin = bufferInfo.presentationTimeUs; + } + pts = bufferInfo.presentationTimeUs - ptsOrigin; + } + + headerBuffer.putLong(pts); + headerBuffer.putInt(packetSize); + headerBuffer.flip(); + IO.writeFully(fd, headerBuffer); + } + private static MediaCodec createCodec() throws IOException { return MediaCodec.createEncoderByType("video/avc"); } @@ -119,7 +151,7 @@ public class ScreenEncoder implements Device.RotationListener { } private static IBinder createDisplay() { - return SurfaceControl.createDisplay("scrcpy", false); + return SurfaceControl.createDisplay("scrcpy", true); } private static void configure(MediaCodec codec, MediaFormat format) { diff --git a/server/src/main/java/com/genymobile/scrcpy/Server.java b/server/src/main/java/com/genymobile/scrcpy/Server.java index daf9c77..cf9931d 100644 --- a/server/src/main/java/com/genymobile/scrcpy/Server.java +++ b/server/src/main/java/com/genymobile/scrcpy/Server.java @@ -18,7 +18,7 @@ public final class Server { final Device device = new Device(options); boolean tunnelForward = options.isTunnelForward(); try (DesktopConnection connection = DesktopConnection.open(device, tunnelForward)) { - ScreenEncoder screenEncoder = new ScreenEncoder(options.getBitRate()); + ScreenEncoder screenEncoder = new ScreenEncoder(options.getSendFrameMeta(), options.getBitRate()); // asynchronous startEventController(device, connection); @@ -49,8 +49,8 @@ public final class Server { @SuppressWarnings("checkstyle:MagicNumber") private static Options createOptions(String... args) { - if (args.length != 4) - throw new IllegalArgumentException("Expecting 4 parameters"); + if (args.length != 5) + throw new IllegalArgumentException("Expecting 5 parameters"); Options options = new Options(); @@ -67,6 +67,9 @@ public final class Server { Rect crop = parseCrop(args[3]); options.setCrop(crop); + boolean sendFrameMeta = Boolean.parseBoolean(args[4]); + options.setSendFrameMeta(sendFrameMeta); + return options; }