Merge branch 'master' of https://github.com/barry-ran/QtScrcpy into dev

# Conflicts:
#	QtScrcpy/dialog.ui
#	README.md
This commit is contained in:
Tian Zhou 2020-01-18 00:10:14 -05:00
commit bcfb3d6599
58 changed files with 1800 additions and 750 deletions

2
.gitignore vendored
View file

@ -9,5 +9,5 @@
/server/gradlew
/server/gradlew.bat
/server/local.properties
build
build-*
*.DS_Store

View file

@ -58,8 +58,8 @@ INCLUDEPATH += \
# 统一版本号入口,只修改这一个地方即可
VERSION_MAJOR = 1
VERSION_MINOR = 0
VERSION_PATCH = 4
VERSION_MINOR = 1
VERSION_PATCH = 0
# qmake变量的方式定义版本号
VERSION = $${VERSION_MAJOR}.$${VERSION_MINOR}.$${VERSION_PATCH}
@ -140,7 +140,7 @@ macos {
-L$$PWD/../third_party/ffmpeg/lib -lswscale.5
# mac bundle file
APP_SCRCPY_SERVER.files = $$files($$PWD/../third_party/scrcpy-server.jar)
APP_SCRCPY_SERVER.files = $$files($$PWD/../third_party/scrcpy-server)
APP_SCRCPY_SERVER.path = Contents/MacOS
QMAKE_BUNDLE_DATA += APP_SCRCPY_SERVER
@ -152,6 +152,10 @@ macos {
APP_FFMPEG.path = Contents/MacOS
QMAKE_BUNDLE_DATA += APP_FFMPEG
APP_CONFIG.files = $$files($$PWD/../config/config.ini)
APP_CONFIG.path = Contents/MacOS
QMAKE_BUNDLE_DATA += APP_CONFIG
# mac application icon
ICON = $$PWD/res/QtScrcpy.icns
QMAKE_INFO_PLIST = $$PWD/res/Info_mac.plist

View file

@ -40,8 +40,8 @@ void Controller::postControlMsg(ControlMsg *controlMsg)
void Controller::test(QRect rc)
{
ControlMsg* controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_MOUSE);
controlMsg->setInjectMouseMsgData(AMOTION_EVENT_ACTION_DOWN, AMOTION_EVENT_BUTTON_PRIMARY, rc);
ControlMsg* controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TOUCH);
controlMsg->setInjectTouchMsgData(POINTER_ID_MOUSE, AMOTION_EVENT_ACTION_DOWN, AMOTION_EVENT_BUTTON_PRIMARY, rc, 1.0f);
postControlMsg(controlMsg);
}

View file

@ -42,18 +42,13 @@ void ControlMsg::setInjectTextMsgData(QString& text)
m_data.injectText.text[tmp.length()] = '\0';
}
void ControlMsg::setInjectMouseMsgData(AndroidMotioneventAction action, AndroidMotioneventButtons buttons, QRect position)
void ControlMsg::setInjectTouchMsgData(quint64 id, AndroidMotioneventAction action, AndroidMotioneventButtons buttons, QRect position, float pressure)
{
m_data.injectMouse.action = action;
m_data.injectMouse.buttons = buttons;
m_data.injectMouse.position = position;
}
void ControlMsg::setInjectTouchMsgData(quint32 id, AndroidMotioneventAction action, QRect position)
{
m_data.injectTouch.action = action;
m_data.injectTouch.id = id;
m_data.injectTouch.action = action;
m_data.injectTouch.buttons = buttons;
m_data.injectTouch.position = position;
m_data.injectTouch.pressure = pressure;
}
void ControlMsg::setInjectScrollMsgData(QRect position, qint32 hScroll, qint32 vScroll)
@ -85,12 +80,22 @@ void ControlMsg::setSetScreenPowerModeData(ControlMsg::ScreenPowerMode mode)
void ControlMsg::writePosition(QBuffer &buffer, const QRect& value)
{
BufferUtil::write16(buffer, value.left());
BufferUtil::write16(buffer, value.top());
BufferUtil::write32(buffer, value.left());
BufferUtil::write32(buffer, value.top());
BufferUtil::write16(buffer, value.width());
BufferUtil::write16(buffer, value.height());
}
quint16 ControlMsg::toFixedPoint16(float f)
{
assert(f >= 0.0f && f <= 1.0f);
quint32 u = f * 0x1p16f; // 2^16
if (u >= 0xffff) {
u = 0xffff;
}
return (quint16) u;
}
QByteArray ControlMsg::serializeData()
{
QByteArray byteArray;
@ -108,15 +113,15 @@ QByteArray ControlMsg::serializeData()
BufferUtil::write16(buffer, strlen(m_data.injectText.text));
buffer.write(m_data.injectText.text, strlen(m_data.injectText.text));
break;
case CMT_INJECT_MOUSE:
buffer.putChar(m_data.injectMouse.action);
BufferUtil::write32(buffer, m_data.injectMouse.buttons);
writePosition(buffer, m_data.injectMouse.position);
break;
case CMT_INJECT_TOUCH:
buffer.putChar(m_data.injectTouch.id);
{
buffer.putChar(m_data.injectTouch.action);
BufferUtil::write64(buffer, m_data.injectTouch.id);
writePosition(buffer, m_data.injectTouch.position);
quint16 pressure = toFixedPoint16(m_data.injectTouch.pressure);
BufferUtil::write16(buffer, pressure);
BufferUtil::write32(buffer, m_data.injectTouch.buttons);
}
break;
case CMT_INJECT_SCROLL:
writePosition(buffer, m_data.injectScroll.position);

View file

@ -11,6 +11,7 @@
#define CONTROL_MSG_TEXT_MAX_LENGTH 300
#define CONTROL_MSG_CLIPBOARD_TEXT_MAX_LENGTH 4093
#define POINTER_ID_MOUSE UINT64_C(-1)
// ControlMsg
class ControlMsg : public QScrcpyEvent
{
@ -19,16 +20,14 @@ public:
CMT_NULL = -1,
CMT_INJECT_KEYCODE = 0,
CMT_INJECT_TEXT,
CMT_INJECT_MOUSE,
CMT_INJECT_TOUCH,
CMT_INJECT_SCROLL,
CMT_BACK_OR_SCREEN_ON,
CMT_EXPAND_NOTIFICATION_PANEL,
CMT_COLLAPSE_NOTIFICATION_PANEL,
CMT_GET_CLIPBOARD,
CMT_SET_CLIPBOARD,
CMT_SET_SCREEN_POWER_MODE,
CMT_INJECT_TOUCH,
CMT_SET_SCREEN_POWER_MODE
};
enum ScreenPowerMode {
@ -42,11 +41,10 @@ public:
void setInjectKeycodeMsgData(AndroidKeyeventAction action, AndroidKeycode keycode, AndroidMetastate metastate);
void setInjectTextMsgData(QString& text);
void setInjectMouseMsgData(AndroidMotioneventAction action, AndroidMotioneventButtons buttons, QRect position);
// id 代表一个触摸点最多支持10个触摸点[0,9]
// action 只能是AMOTION_EVENT_ACTION_DOWNAMOTION_EVENT_ACTION_UPAMOTION_EVENT_ACTION_MOVE
// position action动作对应的位置
void setInjectTouchMsgData(quint32 id, AndroidMotioneventAction action, QRect position);
void setInjectTouchMsgData(quint64 id, AndroidMotioneventAction action, AndroidMotioneventButtons buttons, QRect position, float pressure);
void setInjectScrollMsgData(QRect position, qint32 hScroll, qint32 vScroll);
void setSetClipboardMsgData(QString& text);
void setSetScreenPowerModeData(ControlMsg::ScreenPowerMode mode);
@ -55,6 +53,7 @@ public:
private:
void writePosition(QBuffer& buffer, const QRect& value);
quint16 toFixedPoint16(float f);
private:
struct ControlMsgData {
@ -69,14 +68,11 @@ private:
char* text = Q_NULLPTR;
} injectText;
struct {
quint64 id;
AndroidMotioneventAction action;
AndroidMotioneventButtons buttons;
QRect position;
} injectMouse;
struct {
quint32 id;
AndroidMotioneventAction action;
QRect position;
float pressure;
} injectTouch;
struct {
QRect position;

View file

@ -185,7 +185,7 @@ void InputConvertGame::sendTouchEvent(int id, QPointF pos, AndroidMotioneventAct
if (!controlMsg) {
return;
}
controlMsg->setInjectTouchMsgData(id, action, QRect(calcFrameAbsolutePos(pos).toPoint(), m_frameSize));
controlMsg->setInjectTouchMsgData(id, action, (AndroidMotioneventButtons)0, QRect(calcFrameAbsolutePos(pos).toPoint(), m_frameSize), 1.0f);
sendControlMsg(controlMsg);
}

View file

@ -40,11 +40,11 @@ void InputConvertNormal::mouseEvent(const QMouseEvent* from, const QSize& frameS
pos.setY(pos.y() * frameSize.height() / showSize.height());
// set data
ControlMsg* controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_MOUSE);
ControlMsg* controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TOUCH);
if (!controlMsg) {
return;
}
controlMsg->setInjectMouseMsgData(action, convertMouseButtons(from->buttons()), QRect(pos.toPoint(), frameSize));
controlMsg->setInjectTouchMsgData(POINTER_ID_MOUSE, action, convertMouseButtons(from->buttons()), QRect(pos.toPoint(), frameSize), 1.0f);
sendControlMsg(controlMsg);
}

View file

@ -10,6 +10,7 @@
#include "stream.h"
#include "videoform.h"
#include "controller.h"
#include "config.h"
Device::Device(DeviceParams params, QObject *parent)
: QObject(parent)
@ -27,8 +28,7 @@ Device::Device(DeviceParams params, QObject *parent)
m_decoder = new Decoder(m_vb, this);
m_fileHandler = new FileHandler(this);
m_controller = new Controller(params.gameScript, this);
//m_videoForm = new VideoForm(false);
m_videoForm = new VideoForm();
m_videoForm = new VideoForm(Config::getInstance().getSkin());
m_videoForm->setSerial(m_params.serial);
if (m_controller) {
m_videoForm->setController(m_controller);
@ -204,14 +204,13 @@ void Device::startServer()
//m_server->start("192.168.0.174:5555", 27183, m_maxSize, m_bitRate, "");
// only one devices, serial can be null
// mark: crop input format: "width:height:x:y" or - for no crop, for example: "100:200:0:0"
// sendFrameMeta for recorder mp4
Server::ServerParams params;
params.serial = m_params.serial;
params.localPort = m_params.localPort;
params.maxSize = m_params.maxSize;
params.bitRate = m_params.bitRate;
params.maxFps = m_params.maxFps;
params.crop = "-";
params.sendFrameMeta = m_recorder ? true : false;
params.control = true;
params.useReverse = m_params.useReverse;
m_server->start(params);

View file

@ -22,6 +22,7 @@ public:
quint16 localPort = 27183; // reverse时本地监听端口
quint16 maxSize = 720; // 视频分辨率
quint32 bitRate = 8000000; // 视频比特率
quint32 maxFps = 60; // 视频最大帧率
bool closeScreen = false; // 启动时自动息屏
bool useReverse = true; // true:先使用adb reverse失败后自动使用adb forwardfalse:直接使用adb forward
bool display = true; // 是否显示画面(或者仅仅后台录制)

View file

@ -1,21 +1,49 @@
#include <QDebug>
#include <QFileInfo>
#include <QCoreApplication>
#include "compat.h"
#include "recorder.h"
static const AVRational SCRCPY_TIME_BASE = {1, 1000000}; // timestamps in us
Recorder::Recorder(const QString& fileName)
: m_fileName(fileName)
Recorder::Recorder(const QString& fileName, QObject* parent)
: QThread(parent)
, m_fileName(fileName)
, m_format(guessRecordFormat(fileName))
{
}
Recorder::~Recorder()
{
}
AVPacket* Recorder::packetNew(const AVPacket *packet) {
AVPacket* rec = new AVPacket;
if (!rec) {
return Q_NULLPTR;
}
// av_packet_ref() does not initialize all fields in old FFmpeg versions
av_init_packet(rec);
if (av_packet_ref(rec, packet)) {
delete rec;
return Q_NULLPTR;
}
return rec;
}
void Recorder::packetDelete(AVPacket* packet) {
av_packet_unref(packet);
delete packet;
}
void Recorder::queueClear()
{
while (!m_queue.isEmpty()) {
packetDelete(m_queue.dequeue());
}
}
void Recorder::setFrameSize(const QSize &declaredFrameSize)
@ -51,6 +79,10 @@ bool Recorder::open(const AVCodec* inputCodec)
m_formatCtx->oformat = (AVOutputFormat*)format;
QString comment = "Recorded by QtScrcpy " + QCoreApplication::applicationVersion();
av_dict_set(&m_formatCtx->metadata, "comment",
comment.toUtf8(), 0);
AVStream* outStream = avformat_new_stream(m_formatCtx, inputCodec);
if (!outStream) {
avformat_free_context(m_formatCtx);
@ -82,7 +114,7 @@ bool Recorder::open(const AVCodec* inputCodec)
avformat_free_context(m_formatCtx);
m_formatCtx = Q_NULLPTR;
return false;
}
}
return true;
}
@ -90,11 +122,17 @@ bool Recorder::open(const AVCodec* inputCodec)
void Recorder::close()
{
if (Q_NULLPTR != m_formatCtx) {
int ret = av_write_trailer(m_formatCtx);
if (ret < 0) {
qCritical(QString("Failed to write trailer to %1").arg(m_fileName).toUtf8().toStdString().c_str());
if (m_headerWritten) {
int ret = av_write_trailer(m_formatCtx);
if (ret < 0) {
qCritical(QString("Failed to write trailer to %1").arg(m_fileName).toUtf8().toStdString().c_str());
m_failed = true;
} else {
qInfo(QString("success record %1").arg(m_fileName).toStdString().c_str());
}
} else {
qInfo(QString("success record %1").arg(m_fileName).toStdString().c_str());
// the recorded file is empty
m_failed = true;
}
avio_close(m_formatCtx->pb);
avformat_free_context(m_formatCtx);
@ -105,12 +143,23 @@ void Recorder::close()
bool Recorder::write(AVPacket *packet)
{
if (!m_headerWritten) {
if (packet->pts != AV_NOPTS_VALUE) {
qCritical("The first packet is not a config packet");
return false;
}
bool ok = recorderWriteHeader(packet);
if (!ok) {
return false;
}
m_headerWritten = true;
return true;
}
if (packet->pts == AV_NOPTS_VALUE) {
// ignore config packets
return true;
}
recorderRescalePacket(packet);
return av_write_frame(m_formatCtx, packet) >= 0;
}
@ -154,9 +203,6 @@ bool Recorder::recorderWriteHeader(const AVPacket* packet)
int ret = avformat_write_header(m_formatCtx, NULL);
if (ret < 0) {
qCritical("Failed to write header recorder file");
free(extradata);
avio_close(m_formatCtx->pb);
avformat_free_context(m_formatCtx);
return false;
}
return true;
@ -193,3 +239,93 @@ Recorder::RecorderFormat Recorder::guessRecordFormat(const QString &fileName)
return Recorder::RECORDER_FORMAT_NULL;
}
void Recorder::run() {
for (;;) {
AVPacket *rec = Q_NULLPTR;
{
QMutexLocker locker(&m_mutex);
while (!m_stopped && m_queue.isEmpty()) {
m_recvDataCond.wait(&m_mutex);
}
// if stopped is set, continue to process the remaining events (to
// finish the recording) before actually stopping
if (m_stopped && m_queue.isEmpty()) {
AVPacket* last = m_previous;
if (last) {
// assign an arbitrary duration to the last packet
last->duration = 100000;
bool ok = write(last);
if (!ok) {
// failing to write the last frame is not very serious, no
// future frame may depend on it, so the resulting file
// will still be valid
qWarning("Could not record last packet");
}
packetDelete(last);
}
break;
}
rec = m_queue.dequeue();
}
// recorder->previous is only written from this thread, no need to lock
AVPacket* previous = m_previous;
m_previous = rec;
if (!previous) {
// we just received the first packet
continue;
}
// config packets have no PTS, we must ignore them
if (rec->pts != AV_NOPTS_VALUE
&& previous->pts != AV_NOPTS_VALUE) {
// we now know the duration of the previous packet
previous->duration = rec->pts - previous->pts;
}
bool ok = write(previous);
packetDelete(previous);
if (!ok) {
qCritical("Could not record packet");
QMutexLocker locker(&m_mutex);
m_failed = true;
// discard pending packets
queueClear();
break;
}
}
qDebug("Recorder thread ended");
}
bool Recorder::startRecorder() {
start();
return true;
}
void Recorder::stopRecorder() {
QMutexLocker locker(&m_mutex);
m_stopped = true;
m_recvDataCond.wakeOne();
}
bool Recorder::push(const AVPacket *packet) {
QMutexLocker locker(&m_mutex);
assert(!m_stopped);
if (m_failed) {
// reject any new packet (this will stop the stream)
return false;
}
AVPacket* rec = packetNew(packet);
if (rec) {
m_queue.enqueue(rec);
m_recvDataCond.wakeOne();
}
return rec != Q_NULLPTR;
}

View file

@ -2,14 +2,19 @@
#define RECORDER_H
#include <QString>
#include <QSize>
#include <QThread>
#include <QMutex>
#include <QWaitCondition>
#include <QQueue>
extern "C"
{
#include "libavformat/avformat.h"
}
class Recorder
class Recorder : public QThread
{
Q_OBJECT
public:
enum RecorderFormat {
RECORDER_FORMAT_NULL = 0,
@ -17,7 +22,7 @@ public:
RECORDER_FORMAT_MKV,
};
Recorder(const QString& fileName);
Recorder(const QString& fileName, QObject *parent = Q_NULLPTR);
virtual ~Recorder();
void setFrameSize(const QSize& declaredFrameSize);
@ -25,6 +30,9 @@ public:
bool open(const AVCodec* inputCodec);
void close();
bool write(AVPacket* packet);
bool startRecorder();
void stopRecorder();
bool push(const AVPacket *packet);
private:
const AVOutputFormat* findMuxer(const char* name);
@ -33,12 +41,30 @@ private:
QString recorderGetFormatName(Recorder::RecorderFormat format);
RecorderFormat guessRecordFormat(const QString& fileName);
private:
AVPacket* packetNew(const AVPacket *packet);
void packetDelete(AVPacket* packet);
void queueClear();
protected:
void run();
private:
QString m_fileName = "";
AVFormatContext* m_formatCtx = Q_NULLPTR;
QSize m_declaredFrameSize;
bool m_headerWritten = false;
RecorderFormat m_format = RECORDER_FORMAT_NULL;
QMutex m_mutex;
QWaitCondition m_recvDataCond;
bool m_stopped = false; // set on recorder_stop() by the stream reader
bool m_failed = false; // set on packet write failure
QQueue<AVPacket*> m_queue;
// we can write a packet only once we received the next one so that we can
// set its duration (next_pts - current_pts)
// "previous" is only accessed from the recorder thread, so it does not
// need to be protected by the mutex
AVPacket* m_previous = Q_NULLPTR;
};
#endif // RECORDER_H

View file

@ -1,5 +1,6 @@
#include <QCoreApplication>
#include <QOpenGLTexture>
#include <QSurfaceFormat>
#include "qyuvopenglwidget.h"
@ -64,7 +65,14 @@ static QString s_fragShader = R"(
QYUVOpenGLWidget::QYUVOpenGLWidget(QWidget *parent) : QOpenGLWidget(parent)
{
/*
QSurfaceFormat format = QSurfaceFormat::defaultFormat();
format.setColorSpace(QSurfaceFormat::sRGBColorSpace);
format.setProfile(QSurfaceFormat::CompatibilityProfile);
format.setMajorVersion(3);
format.setMinorVersion(2);
QSurfaceFormat::setDefaultFormat(format);
*/
}
QYUVOpenGLWidget::~QYUVOpenGLWidget()

View file

@ -6,10 +6,11 @@
#include <QFileInfo>
#include "server.h"
#include "config.h"
#define DEVICE_SERVER_PATH "/data/local/tmp/scrcpy-server.jar"
#define DEVICE_NAME_FIELD_LENGTH 64
#define SOCKET_NAME "qtscrcpy"
#define SOCKET_NAME "scrcpy"
#define MAX_CONNECT_COUNT 30
#define MAX_RESTART_COUNT 1
@ -56,7 +57,7 @@ const QString& Server::getServerPath()
m_serverPath = QString::fromLocal8Bit(qgetenv("QTSCRCPY_SERVER_PATH"));
QFileInfo fileInfo(m_serverPath);
if (m_serverPath.isEmpty() || !fileInfo.isFile()) {
m_serverPath = QCoreApplication::applicationDirPath() + "/scrcpy-server.jar";
m_serverPath = QCoreApplication::applicationDirPath() + "/scrcpy-server";
}
}
return m_serverPath;
@ -129,18 +130,20 @@ bool Server::execute()
args << "app_process";
args << "/"; // unused;
args << "com.genymobile.scrcpy.Server";
args << Config::getInstance().getServerVersion();
args << QString::number(m_params.maxSize);
args << QString::number(m_params.bitRate);
args << QString::number(m_params.maxFps);
args << (m_tunnelForward ? "true" : "false");
if (m_params.crop.isEmpty()) {
args << "-";
} else {
args << m_params.crop;
}
args << (m_params.sendFrameMeta ? "true" : "false");
args << "true"; // always send frame meta (packet boundaries + timestamp)
args << (m_params.control ? "true" : "false");
// adb -s P7C0218510000537 shell CLASSPATH=/data/local/tmp/scrcpy-server.jar app_process / com.genymobile.scrcpy.Server 0 8000000 false
// adb -s P7C0218510000537 shell CLASSPATH=/data/local/tmp/scrcpy-server app_process / com.genymobile.scrcpy.Server 0 8000000 false
// mark: crop input format: "width:height:x:y" or - for no crop, for example: "100:200:0:0"
// 这条adb命令是阻塞运行的m_serverProcess进程不会退出了
m_serverProcess.execute(m_params.serial, args);

View file

@ -27,8 +27,8 @@ public:
quint16 localPort = 27183; // reverse时本地监听端口
quint16 maxSize = 720; // 视频分辨率
quint32 bitRate = 8000000; // 视频比特率
quint32 maxFps = 60; // 视频最大帧率
QString crop = "-"; // 视频裁剪
bool sendFrameMeta = false; // 是否发送mp4帧数据
bool control = true; // 安卓端是否接收键鼠控制
bool useReverse = true; // true:先使用adb reverse失败后自动使用adb forwardfalse:直接使用adb forward
};

View file

@ -55,7 +55,7 @@ bool VideoSocket::event(QEvent *event)
void VideoSocket::onReadyRead()
{
QMutexLocker locker(&m_mutex);
if (m_buffer && 0 < bytesAvailable()) {
if (m_buffer && m_bufferSize <= bytesAvailable()) {
// recv data
qint64 readSize = qMin(bytesAvailable(), (qint64)m_bufferSize);
m_dataSize = read((char*)m_buffer, readSize);

View file

@ -16,12 +16,10 @@ typedef qint32 (*ReadPacketFunc)(void*, quint8*, qint32);
Stream::Stream(QObject *parent)
: QThread(parent)
{
m_quit = false;
}
Stream::~Stream()
{
}
static void avLogCallback(void *avcl, int level, const char *fmt, va_list vl) {
@ -85,121 +83,6 @@ static quint64 bufferRead64be(quint8* buf) {
return ((quint64) msb << 32) | lsb;
}
static Stream::FrameMeta* frameMetaNew(quint64 pts) {
Stream::FrameMeta* meta = new Stream::FrameMeta;
if (!meta) {
return meta;
}
meta->pts = pts;
meta->next = Q_NULLPTR;
return meta;
}
static void frameMetaDelete(Stream::FrameMeta* frameMeta) {
if (frameMeta) {
delete frameMeta;
}
}
static bool receiverStatePushMeta(Stream::ReceiverState* state, quint64 pts) {
Stream::FrameMeta* frameMeta = frameMetaNew(pts);
if (!frameMeta) {
return false;
}
// append to the list
// (iterate to find the last item, in practice the list should be tiny)
Stream::FrameMeta **p = &state->frameMetaQueue;
while (*p) {
p = &(*p)->next;
}
*p = frameMeta;
return true;
}
static quint64 receiverStateTakeMeta(Stream::ReceiverState* state) {
Stream::FrameMeta *frameMeta = state->frameMetaQueue; // first item
Q_ASSERT(frameMeta); // must not be empty
quint64 pts = frameMeta->pts;
state->frameMetaQueue = frameMeta->next; // remove the item
frameMetaDelete(frameMeta);
return pts;
}
static qint32 readPacketWithMeta(void *opaque, uint8_t *buf, int bufSize) {
Stream* stream = (Stream*)opaque;
Stream::ReceiverState* state = stream->getReceiverState();
// The video stream contains raw packets, without time information. When we
// record, we retrieve the timestamps separately, from a "meta" header
// added by the server before each raw packet.
//
// The "meta" header length is 12 bytes:
// [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
// <-------------> <-----> <-----------------------------...
// PTS packet raw packet
// size
//
// It is followed by <packet_size> bytes containing the packet/frame.
if (!state->remaining) {
quint8 header[HEADER_SIZE];
qint32 r = stream->recvData(header, HEADER_SIZE);
if (r == -1) {
return errno ? AVERROR(errno) : AVERROR_EOF;
}
if (r == 0) {
return AVERROR_EOF;
}
// no partial read (net_recv_all())
if (r != HEADER_SIZE) {
return AVERROR(ENOMEM);
}
uint64_t pts = bufferRead64be(header);
state->remaining = bufferRead32be(&header[8]);
if (pts != NO_PTS && !receiverStatePushMeta(state, pts)) {
qCritical("Could not store PTS for recording");
// we cannot save the PTS, the recording would be broken
return AVERROR(ENOMEM);
}
}
Q_ASSERT(state->remaining);
if (bufSize > state->remaining) {
bufSize = state->remaining;
}
qint32 r = stream->recvData(buf, bufSize);
if (r == -1) {
return errno ? AVERROR(errno) : AVERROR_EOF;
}
if (r == 0) {
return AVERROR_EOF;
}
Q_ASSERT(state->remaining >= r);
state->remaining -= r;
return r;
}
static qint32 readRawPacket(void *opaque, quint8 *buf, qint32 bufSize) {
Stream *stream = (Stream*)opaque;
if (stream) {
qint32 len = stream->recvData(buf, bufSize);
if (len == -1) {
return AVERROR(errno);
}
if (len == 0) {
return AVERROR_EOF;
}
return len;
}
return AVERROR_EOF;
}
void Stream::setVideoSocket(VideoSocket* videoSocket)
{
m_videoSocket = videoSocket;
@ -227,71 +110,23 @@ bool Stream::startDecode()
if (!m_videoSocket) {
return false;
}
m_quit = false;
start();
return true;
}
void Stream::stopDecode()
{
m_quit = true;
if (m_decoder) {
m_decoder->interrupt();
}
wait();
}
Stream::ReceiverState *Stream::getReceiverState()
{
return &m_receiverState;
}
void Stream::run()
{
unsigned char *decoderBuffer = Q_NULLPTR;
AVIOContext *avioCtx = Q_NULLPTR;
AVFormatContext *formatCtx = Q_NULLPTR;
AVCodec *codec = Q_NULLPTR;
AVCodecContext *codecCtx = Q_NULLPTR;
ReadPacketFunc readPacket = Q_NULLPTR;
bool isFormatCtxOpen = false;
// decoder buffer
decoderBuffer = (unsigned char*)av_malloc(BUFSIZE);
if (!decoderBuffer) {
qCritical("Could not allocate buffer");
goto runQuit;
}
// initialize the receiver state
m_receiverState.frameMetaQueue = Q_NULLPTR;
m_receiverState.remaining = 0;
// if recording is enabled, a "header" is sent between raw packets
readPacket = m_recorder ? readPacketWithMeta: readRawPacket;
// io context
avioCtx = avio_alloc_context(decoderBuffer, BUFSIZE, 0, this, readPacket, NULL, NULL);
if (!avioCtx) {
qCritical("Could not allocate avio context");
// avformat_open_input takes ownership of 'decoderBuffer'
// so only free the buffer before avformat_open_input()
av_free(decoderBuffer);
goto runQuit;
}
// format context
formatCtx = avformat_alloc_context();
if (!formatCtx) {
qCritical("Could not allocate format context");
goto runQuit;
}
formatCtx->pb = avioCtx;
if (avformat_open_input(&formatCtx, NULL, NULL, NULL) < 0) {
qCritical("Could not open video stream");
goto runQuit;
}
isFormatCtxOpen = true;
m_codecCtx = Q_NULLPTR;
m_parser = Q_NULLPTR;
// codec
codec = avcodec_find_decoder(AV_CODEC_ID_H264);
@ -300,76 +135,231 @@ void Stream::run()
goto runQuit;
}
// codeCtx
m_codecCtx = avcodec_alloc_context3(codec);
if (!m_codecCtx) {
qCritical("Could not allocate codec context");
goto runQuit;
}
if (m_decoder && !m_decoder->open(codec)) {
qCritical("Could not open m_decoder");
goto runQuit;
}
if (m_recorder && !m_recorder->open(codec)) {
qCritical("Could not open recorder");
goto runQuit;
}
if (m_recorder) {
if (!m_recorder->open(codec)) {
qCritical("Could not open recorder");
goto runQuit;
}
AVPacket packet;
av_init_packet(&packet);
packet.data = Q_NULLPTR;
packet.size = 0;
while (!av_read_frame(formatCtx, &packet)) {
if (m_quit) {
// if the stream is stopped, the socket had been shutdown, so the
// last packet is probably corrupted (but not detected as such by
// FFmpeg) and will not be decoded correctly
av_packet_unref(&packet);
goto runQuit;
}
if (m_decoder && !m_decoder->push(&packet)) {
av_packet_unref(&packet);
goto runQuit;
}
if (m_recorder) {
// we retrieve the PTS in order they were received, so they will
// be assigned to the correct frame
quint64 pts = receiverStateTakeMeta(&m_receiverState);
packet.pts = pts;
packet.dts = pts;
// no need to rescale with av_packet_rescale_ts(), the timestamps
// are in microseconds both in input and output
if (!m_recorder->write(&packet)) {
qCritical("Could not write frame to output file");
av_packet_unref(&packet);
if (!m_recorder->startRecorder()) {
qCritical("Could not start recorder");
goto runQuit;
}
}
av_packet_unref(&packet);
m_parser = av_parser_init(AV_CODEC_ID_H264);
if (!m_parser) {
qCritical("Could not initialize parser");
goto runQuit;
}
if (avioCtx->eof_reached) {
// We must only pass complete frames to av_parser_parse2()!
// It's more complicated, but this allows to reduce the latency by 1 frame!
m_parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
for (;;) {
AVPacket packet;
bool ok = recvPacket(&packet);
if (!ok) {
// end of stream
break;
}
ok = pushPacket(&packet);
av_packet_unref(&packet);
if (!ok) {
// cannot process packet (error already logged)
break;
}
}
qDebug() << "End of frames";
qDebug("End of frames");
if (m_hasPending) {
av_packet_unref(&m_pending);
}
av_parser_close(m_parser);
runQuit:
if (m_recorder) {
if (m_recorder->isRunning()) {
m_recorder->stopRecorder();
m_recorder->wait();
}
m_recorder->close();
}
if (avioCtx) {
av_free(avioCtx->buffer);
av_freep(&avioCtx);
}
if (formatCtx && isFormatCtxOpen) {
avformat_close_input(&formatCtx);
}
if (formatCtx) {
avformat_free_context(formatCtx);
}
if (m_decoder) {
m_decoder->close();
}
if (codecCtx) {
avcodec_free_context(&codecCtx);
if (m_codecCtx) {
avcodec_free_context(&m_codecCtx);
}
emit onStreamStop();
}
bool Stream::recvPacket(AVPacket *packet)
{
// The video stream contains raw packets, without time information. When we
// record, we retrieve the timestamps separately, from a "meta" header
// added by the server before each raw packet.
//
// The "meta" header length is 12 bytes:
// [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
// <-------------> <-----> <-----------------------------...
// PTS packet raw packet
// size
//
// It is followed by <packet_size> bytes containing the packet/frame.
quint8 header[HEADER_SIZE];
qint32 r = recvData(header, HEADER_SIZE);
if (r < HEADER_SIZE) {
return false;
}
quint64 pts = bufferRead64be(header);
quint32 len = bufferRead32be(&header[8]);
assert(pts == NO_PTS || (pts & 0x8000000000000000) == 0);
assert(len);
if (av_new_packet(packet, len)) {
qCritical("Could not allocate packet");
return false;
}
r = recvData(packet->data, len);
if (r < 0 || ((uint32_t) r) < len) {
av_packet_unref(packet);
return false;
}
packet->pts = pts != NO_PTS ? (int64_t) pts : AV_NOPTS_VALUE;
return true;
}
bool Stream::pushPacket(AVPacket *packet)
{
bool isConfig = packet->pts == AV_NOPTS_VALUE;
// A config packet must not be decoded immetiately (it contains no
// frame); instead, it must be concatenated with the future data packet.
if (m_hasPending || isConfig) {
qint32 offset;
if (m_hasPending) {
offset = m_pending.size;
if (av_grow_packet(&m_pending, packet->size)) {
qCritical("Could not grow packet");
return false;
}
} else {
offset = 0;
if (av_new_packet(&m_pending, packet->size)) {
qCritical("Could not create packet");
return false;
}
m_hasPending = true;
}
memcpy(m_pending.data + offset, packet->data, packet->size);
if (!isConfig) {
// prepare the concat packet to send to the decoder
m_pending.pts = packet->pts;
m_pending.dts = packet->dts;
m_pending.flags = packet->flags;
packet = &m_pending;
}
}
if (isConfig) {
// config packet
bool ok = processConfigPacket(packet);
if (!ok) {
return false;
}
} else {
// data packet
bool ok = parse(packet);
if (m_hasPending) {
// the pending packet must be discarded (consumed or error)
m_hasPending = false;
av_packet_unref(&m_pending);
}
if (!ok) {
return false;
}
}
return true;
}
bool Stream::processConfigPacket(AVPacket *packet)
{
if (m_recorder && !m_recorder->push(packet)) {
qCritical("Could not send config packet to recorder");
return false;
}
return true;
}
bool Stream::parse(AVPacket *packet)
{
quint8 *inData = packet->data;
int inLen = packet->size;
quint8 *outData = Q_NULLPTR;
int outLen = 0;
int r = av_parser_parse2(m_parser, m_codecCtx,
&outData, &outLen, inData, inLen,
AV_NOPTS_VALUE, AV_NOPTS_VALUE, -1);
// PARSER_FLAG_COMPLETE_FRAMES is set
assert(r == inLen);
(void) r;
assert(outLen == inLen);
if (m_parser->key_frame == 1) {
packet->flags |= AV_PKT_FLAG_KEY;
}
bool ok = processFrame(packet);
if (!ok) {
qCritical("Could not process frame");
return false;
}
return true;
}
bool Stream::processFrame(AVPacket *packet)
{
if (m_decoder && !m_decoder->push(packet)) {
return false;
}
if (m_recorder) {
packet->dts = packet->pts;
if (!m_recorder->push(packet)) {
qCritical("Could not send packet to recorder");
return false;
}
}
return true;
}

View file

@ -3,7 +3,6 @@
#include <QThread>
#include <QPointer>
#include <QMutex>
extern "C"
{
@ -18,17 +17,6 @@ class Stream : public QThread
{
Q_OBJECT
public:
typedef struct FrameMeta {
quint64 pts;
struct FrameMeta* next;
} FrameMeta;
typedef struct ReceiverState {
// meta (in order) for frames not consumed yet
FrameMeta* frameMetaQueue;
qint32 remaining; // remaining bytes to receive for the current frame
} ReceiverState;
Stream(QObject *parent = Q_NULLPTR);
virtual ~Stream();
@ -36,28 +24,36 @@ public:
static bool init();
static void deInit();
void setDecoder(Decoder* vb);
void setVideoSocket(VideoSocket* deviceSocket);
void setDecoder(Decoder* decoder);
void setRecoder(Recorder* recorder);
void setVideoSocket(VideoSocket* deviceSocket);
qint32 recvData(quint8* buf, qint32 bufSize);
bool startDecode();
void stopDecode();
ReceiverState* getReceiverState();
signals:
void onStreamStop();
protected:
void run();
bool recvPacket(AVPacket* packet);
bool pushPacket(AVPacket* packet);
bool processConfigPacket(AVPacket *packet);
bool parse(AVPacket *packet);
bool processFrame(AVPacket *packet);
private:
QPointer<VideoSocket> m_videoSocket;
std::atomic_bool m_quit;
// for recorder
Recorder* m_recorder = Q_NULLPTR;
ReceiverState m_receiverState;
Decoder* m_decoder = Q_NULLPTR;
AVCodecContext* m_codecCtx = Q_NULLPTR;
AVCodecParserContext *m_parser = Q_NULLPTR;
// successive packets may need to be concatenated, until a non-config
// packet is available
bool m_hasPending = false;
AVPacket m_pending;
};
#endif // STREAM_H

View file

@ -64,7 +64,7 @@ bool DeviceManage::disconnectDevice(const QString &serial)
if (!serial.isEmpty() && m_devices.contains(serial)) {
auto it = m_devices.find(serial);
if (it->data()) {
it->data()->deleteLater();
delete it->data();
ret = true;
}
}
@ -77,7 +77,7 @@ void DeviceManage::disconnectAllDevice()
while (i.hasNext()) {
i.next();
if (i.value()) {
i.value()->deleteLater();
delete i.value();
}
}
}

View file

@ -9,6 +9,7 @@
#include "device.h"
#include "videoform.h"
#include "keymap.h"
#include "config.h"
Dialog::Dialog(QWidget *parent) :
QDialog(parent),
@ -63,8 +64,8 @@ Dialog::Dialog(QWidget *parent) :
}
Dialog::~Dialog()
{
on_stopServerBtn_clicked();
{
m_deviceManage.disconnectAllDevice();
delete ui;
}
@ -92,6 +93,8 @@ void Dialog::initUI()
// game only windows
ui->gameCheck->setEnabled(false);
#endif
ui->recordPathEdt->setText(Config::getInstance().getRecordPath());
}
void Dialog::execAdbCmd()
@ -149,6 +152,8 @@ void Dialog::on_startServerBtn_clicked()
params.serial = ui->serialBox->currentText().trimmed();
params.maxSize = videoSize;
params.bitRate = bitRate;
// on devices with Android >= 10, the capture frame rate can be limited
params.maxFps = Config::getInstance().getMaxFps();
params.recordFileName = absFilePath;
params.closeScreen = ui->closeScreenCheck->isChecked();
params.useReverse = ui->useReverseCheck->isChecked();
@ -285,6 +290,7 @@ void Dialog::on_selectRecordPathBtn_clicked()
void Dialog::on_recordPathEdt_textChanged(const QString &arg1)
{
Config::getInstance().setRecordPath(arg1);
ui->recordPathEdt->setToolTip(arg1.trimmed());
ui->notDisplayCheck->setCheckable(!arg1.trimmed().isEmpty());
}

View file

@ -6,8 +6,8 @@
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>723</height>
<width>513</width>
<height>637</height>
</rect>
</property>
<property name="maximumSize">
@ -187,7 +187,7 @@
<property name="minimumSize">
<size>
<width>0</width>
<height>240</height>
<height>200</height>
</size>
</property>
<property name="focusPolicy">

View file

@ -8,6 +8,7 @@
#include "dialog.h"
#include "stream.h"
#include "mousetap/mousetap.h"
#include "config.h"
Dialog* g_mainDlg = Q_NULLPTR;
@ -17,9 +18,7 @@ void installTranslator();
int main(int argc, char *argv[])
{
//QApplication::setAttribute(Qt::AA_UseDesktopOpenGL);
//QApplication::setAttribute(Qt::AA_UseOpenGLES);
//QApplication::setAttribute(Qt::AA_UseSoftwareOpenGL);
QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
g_oldMessageHandler = qInstallMessageHandler(myMessageOutput);
Stream::init();
@ -30,6 +29,11 @@ int main(int argc, char *argv[])
qDebug() << a.applicationVersion();
qDebug() << a.applicationName();
//update version
QStringList versionList = QCoreApplication::applicationVersion().split(".");
QString version = versionList[0] + "." + versionList[1] + "." + versionList[2];
a.setApplicationVersion(version);
installTranslator();
#if defined(Q_OS_WIN32) || defined(Q_OS_OSX)
MouseTap::getInstance()->initMouseEventTap();
@ -37,13 +41,15 @@ int main(int argc, char *argv[])
#ifdef Q_OS_WIN32
qputenv("QTSCRCPY_ADB_PATH", "../../../../third_party/adb/win/adb.exe");
qputenv("QTSCRCPY_SERVER_PATH", "../../../../third_party/scrcpy-server.jar");
qputenv("QTSCRCPY_SERVER_PATH", "../../../../third_party/scrcpy-server");
qputenv("QTSCRCPY_KEYMAP_PATH", "../../../../keymap");
qputenv("QTSCRCPY_CONFIG_PATH", "../../../../config/config.ini");
#endif
#ifdef Q_OS_LINUX
qputenv("QTSCRCPY_ADB_PATH", "../../../third_party/adb/linux/adb");
qputenv("QTSCRCPY_SERVER_PATH", "../../../third_party/scrcpy-server.jar");
qputenv("QTSCRCPY_SERVER_PATH", "../../../third_party/scrcpy-server");
qputenv("QTSCRCPY_CONFIG_PATH", "../../../config/config.ini");
#endif
//加载样式表
@ -56,9 +62,21 @@ int main(int argc, char *argv[])
file.close();
}
int opengl = Config::getInstance().getDesktopOpenGL();
if (0 == opengl) {
QApplication::setAttribute(Qt::AA_UseSoftwareOpenGL);
} else if (1 == opengl){
QApplication::setAttribute(Qt::AA_UseOpenGLES);
} else if (2 == opengl) {
QApplication::setAttribute(Qt::AA_UseDesktopOpenGL);
}
g_mainDlg = new Dialog;
g_mainDlg->setWindowTitle(Config::getInstance().getTitle());
g_mainDlg->show();
qInfo(QString("QtScrcpy %1 <https://github.com/barry-ran/QtScrcpy>").arg(QCoreApplication::applicationVersion()).toUtf8());
int ret = a.exec();
#if defined(Q_OS_WIN32) || defined(Q_OS_OSX)
@ -94,7 +112,7 @@ void myMessageOutput(QtMsgType type, const QMessageLogContext &context, const QS
}
if (QtDebugMsg < type) {
if (g_mainDlg && !msg.contains("app_proces")) {
if (g_mainDlg && g_mainDlg->isVisible() && !msg.contains("app_proces")) {
g_mainDlg->outLog(msg);
}
}

View file

@ -8,6 +8,12 @@ void BufferUtil::write32(QBuffer &buffer, quint32 value)
buffer.putChar(value);
}
void BufferUtil::write64(QBuffer &buffer, quint64 value)
{
write32(buffer, value >> 32);
write32(buffer, (quint32) value);
}
void BufferUtil::write16(QBuffer &buffer, quint32 value)
{
buffer.putChar(value >> 8);

View file

@ -4,9 +4,10 @@
class BufferUtil
{
public:
static void write32(QBuffer& buffer, quint32 value);
public:
static void write16(QBuffer& buffer, quint32 value);
static void write32(QBuffer& buffer, quint32 value);
static void write64(QBuffer& buffer, quint64 value);
static quint16 read16(QBuffer& buffer);
static quint32 read32(QBuffer& buffer);
static quint64 read64(QBuffer& buffer);

115
QtScrcpy/util/config.cpp Normal file
View file

@ -0,0 +1,115 @@
#include <QSettings>
#include <QCoreApplication>
#include <QFileInfo>
#include "config.h"
#define GROUP_COMMON "common"
#define COMMON_TITLE_KEY "WindowTitle"
#define COMMON_TITLE_DEF QCoreApplication::applicationName()
#define COMMON_RECORD_KEY "RecordPath"
#define COMMON_RECORD_DEF ""
#define COMMON_SERVER_VERSION_KEY "ServerVersion"
#define COMMON_SERVER_VERSION_DEF "1.12.1"
#define COMMON_MAX_FPS_KEY "MaxFps"
#define COMMON_MAX_FPS_DEF 60
#define COMMON_DESKTOP_OPENGL_KEY "UseDesktopOpenGL"
#define COMMON_DESKTOP_OPENGL_DEF -1
#define COMMON_SKIN_KEY "UseSkin"
#define COMMON_SKIN_DEF 1
QString Config::s_configPath = "";
Config::Config(QObject *parent) : QObject(parent)
{
m_settings = new QSettings(getConfigPath(), QSettings::IniFormat);
m_settings->setIniCodec("UTF-8");
}
Config &Config::getInstance()
{
static Config config;
return config;
}
const QString& Config::getConfigPath()
{
if (s_configPath.isEmpty()) {
s_configPath = QString::fromLocal8Bit(qgetenv("QTSCRCPY_CONFIG_PATH"));
QFileInfo fileInfo(s_configPath);
if (s_configPath.isEmpty() || !fileInfo.isFile()) {
s_configPath = QCoreApplication::applicationDirPath() + "/config/config.ini";
}
}
return s_configPath;
}
QString Config::getRecordPath()
{
QString record;
m_settings->beginGroup(GROUP_COMMON);
record = m_settings->value(COMMON_RECORD_KEY, COMMON_RECORD_DEF).toString();
m_settings->endGroup();
return record;
}
void Config::setRecordPath(const QString &path)
{
m_settings->beginGroup(GROUP_COMMON);
m_settings->setValue(COMMON_RECORD_KEY, path);
m_settings->endGroup();
}
QString Config::getServerVersion()
{
QString server;
m_settings->beginGroup(GROUP_COMMON);
server = m_settings->value(COMMON_SERVER_VERSION_KEY, COMMON_SERVER_VERSION_DEF).toString();
m_settings->endGroup();
return server;
}
int Config::getMaxFps()
{
int fps = 60;
m_settings->beginGroup(GROUP_COMMON);
fps = m_settings->value(COMMON_MAX_FPS_KEY, COMMON_MAX_FPS_DEF).toInt();
m_settings->endGroup();
return fps;
}
int Config::getDesktopOpenGL()
{
int opengl = 0;
m_settings->beginGroup(GROUP_COMMON);
opengl = m_settings->value(COMMON_DESKTOP_OPENGL_KEY, COMMON_DESKTOP_OPENGL_DEF).toInt();
m_settings->endGroup();
return opengl;
}
int Config::getSkin()
{
int skin = 1;
m_settings->beginGroup(GROUP_COMMON);
skin = m_settings->value(COMMON_SKIN_KEY, COMMON_SKIN_DEF).toInt();
m_settings->endGroup();
return skin;
}
QString Config::getTitle()
{
QString title;
m_settings->beginGroup(GROUP_COMMON);
title = m_settings->value(COMMON_TITLE_KEY, COMMON_TITLE_DEF).toString();
m_settings->endGroup();
return title;
}

30
QtScrcpy/util/config.h Normal file
View file

@ -0,0 +1,30 @@
#ifndef CONFIG_H
#define CONFIG_H
#include <QObject>
#include <QPointer>
class QSettings;
class Config : public QObject
{
Q_OBJECT
public:
static Config& getInstance();
QString getTitle();
QString getRecordPath();
void setRecordPath(const QString& path);
QString getServerVersion();
int getMaxFps();
int getDesktopOpenGL();
int getSkin();
private:
explicit Config(QObject *parent = nullptr);
const QString& getConfigPath();
private:
static QString s_configPath;
QPointer<QSettings> m_settings;
};
#endif // CONFIG_H

View file

@ -2,7 +2,9 @@ include ($$PWD/mousetap/mousetap.pri)
HEADERS += \
$$PWD/compat.h \
$$PWD/bufferutil.h
$$PWD/bufferutil.h \
$$PWD/config.h
SOURCES += \
$$PWD/bufferutil.cpp
$$PWD/bufferutil.cpp \
$$PWD/config.cpp

View file

@ -12,6 +12,17 @@ A single instance supports up to 16 Android device connections at the same time.
It supports three major platforms: GNU/Linux, Windows and MacOS.
It focuses on:
- **lightness** (native, displays only the device screen)
- **performance** (30~60fps)
- **quality** (1920×1080 or above)
- **low latency** ([35~70ms][lowlatency])
- **low startup time** (~1 second to display the first image)
- **non-intrusiveness** (nothing is left installed on the device)
[lowlatency]: https://github.com/Genymobile/scrcpy/pull/646
![win](screenshot/win.png)
![mac](screenshot/mac.jpg)
@ -153,8 +164,17 @@ Note: it is not necessary to keep you Android device connected via USB after you
- Full-screen display
- Display on the top
- Install apk: drag and drop apk to the video window to install
- Transfer files: drag files to the video window to send files to Android devices
- Background recording: record only, no screen display
- Transfer files: Drag files to the video window to send files to Android devices
- Background recording: record only, no display interface
- Copy-paste
It is possible to synchronize clipboards between the computer and the device, in
both directions:
- `Ctrl`+`c` copies the device clipboard to the computer clipboard;
- `Ctrl`+`Shift`+`v` copies the computer clipboard to the device clipboard;
- `Ctrl`+`v` _pastes_ the computer clipboard as a sequence of text events (but
breaks non-ASCII characters).
## TODO
[TODO](docs/TODO.md)
@ -162,6 +182,9 @@ Note: it is not necessary to keep you Android device connected via USB after you
## FAQ
[FAQ](docs/FAQ.md)
## DEVELOP
[DEVELOP](docs/DEVELOP.md)
## Why develop QtScrcpy?
There are several reasons listed as below according to importance (high to low).
1. In the process of learning Qt, I need a real project to try
@ -180,10 +203,12 @@ All the dependencies are provided and it is easy to compile.
4. Compile and run
### Android (If you do not have special requirements, you can directly use the built-in scrcpy-server.jar)
1. Set up an Android development environment on the target platform
2. Open the server project in the project root directory using Android Studio
3. Build it
4. After compiling apk, rename it to scrcpy-server.jar and replace third_party/scrcpy-server.jar.
2. Open server project in project root with Android Studio
3. The first time you open it, if you do not have the corresponding version of gradle, you will be prompted to find gradle, whether to upgrade gradle and create it. Select Cancel. After canceling, you will be prompted to select the location of the existing gradle. You can also cancel it (it will download automatically).
4. Edit the code as needed, but of course you do nt need to.
4. After compiling the apk, rename it to scrcpy-server and replace third_party/scrcpy-server.
## Licence
Since it is based on scrcpy, respect its Licence

View file

@ -12,6 +12,18 @@ QtScrcpy可以通过USB(或通过TCP/IP)连接Android设备并进行显示和
同时支持GNU/LinuxWindows和MacOS三大主流桌面平台
它专注于:
- **精致** (仅显示设备屏幕)
- **性能** (30~60fps)
- **质量** (1920×1080以上)
- **低延迟** ([35~70ms][低延迟])
- **快速启动** (1s内就可以看到第一帧图像)
- **非侵入性** (不在设备上安装任何软件)
[低延迟]: https://github.com/Genymobile/scrcpy/pull/646
![win](screenshot/win.png)
![mac](screenshot/mac.jpg)
@ -157,6 +169,12 @@ Mac OS平台你可以直接使用我编译好的可执行程序:
- 安装apk拖拽apk到视频窗口即可安装
- 传输文件拖拽文件到视频窗口即可发送文件到Android设备
- 后台录制:只录制,不显示界面
- 复制粘贴
在计算机和设备之间双向同步剪贴板:
- `Ctrl` + `c`将设备剪贴板复制到计算机剪贴板;
- `Ctrl` + `Shift` + `v`将计算机剪贴板复制到设备剪贴板;
- `Ctrl` +`v` 将计算机剪贴板作为一系列文本事件发送到设备不支持非ASCII字符
## TODO
[后期计划](docs/TODO.md)
@ -164,6 +182,9 @@ Mac OS平台你可以直接使用我编译好的可执行程序:
## FAQ
[常见问题说明](docs/FAQ.md)
## 开发者
[开发者相关](docs/DEVELOP.md)
## 为什么开发QtScrcpy
综合起来有以下几个原因,比重从大到小排列:
1. 学习Qt的过程中需要一个项目实战一下
@ -176,16 +197,17 @@ Mac OS平台你可以直接使用我编译好的可执行程序:
尽量提供了所有依赖资源,方便傻瓜式编译。
### PC端
1. 目标平台上搭建Qt开发环境(Qt >= 5.9.7, vs >= 2015 (不支持mingw))
1. 目标平台上搭建Qt开发环境(Qt >= 5.9.7, vs >= 2015 (**不支持mingw**))
2. 克隆该项目
3. 使用QtCreator打开项目根目录all.pro
4. 编译,运行即可
### Android端 没有修改需求的话直接使用自带的scrcpy-server.jar即可)
### Android端 没有修改需求的话直接使用自带的scrcpy-server即可
1. 目标平台上搭建Android开发环境
2. 使用Android Studio打开项目根目录中的server项目
3. 编辑即可
4. 编译出apk以后改名为scrcpy-server.jar并替换third_party/scrcpy-server.jar即可
3. 第一次打开如果你没有对应版本的gradle会提示找不到gradle是否升级gradle并创建选择取消取消后会弹出选择已有gradle的位置同样取消即可会自动下载
4. 按需编辑代码即可,当然也可以不编辑
4. 编译出apk以后改名为scrcpy-server并替换third_party/scrcpy-server即可
## Licence
由于是复刻的scrcpy尊重它的Licence

13
config/config.ini Normal file
View file

@ -0,0 +1,13 @@
[common]
# 窗口标题
WindowTitle=QtScrcpy
# 录制文件路径
RecordPath=
# 最大fps仅支持Android 10以上
MaxFps=60
# scrcpy-server的版本号不要修改
ServerVersion=1.12.1
# 是否显示手机皮肤0不显示
UseSkin=1
# 视频解码方式:-1 自动0 软解1 dx硬解2 opengl硬解
UseDesktopOpenGL=-1

300
docs/DEVELOP.md Normal file
View file

@ -0,0 +1,300 @@
# scrcpy for developers
## Overview
This application is composed of two parts:
- the server (`scrcpy-server`), to be executed on the device,
- the client (the `scrcpy` binary), executed on the host computer.
The client is responsible to push the server to the device and start its
execution.
Once the client and the server are connected to each other, the server initially
sends device information (name and initial screen dimensions), then starts to
send a raw H.264 video stream of the device screen. The client decodes the video
frames, and display them as soon as possible, without buffering, to minimize
latency. The client is not aware of the device rotation (which is handled by the
server), it just knows the dimensions of the video frames.
The client captures relevant keyboard and mouse events, that it transmits to the
server, which injects them to the device.
## Server
### Privileges
Capturing the screen requires some privileges, which are granted to `shell`.
The server is a Java application (with a [`public static void main(String...
args)`][main] method), compiled against the Android framework, and executed as
`shell` on the Android device.
[main]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/Server.java#L123
To run such a Java application, the classes must be [_dexed_][dex] (typically,
to `classes.dex`). If `my.package.MainClass` is the main class, compiled to
`classes.dex`, pushed to the device in `/data/local/tmp`, then it can be run
with:
adb shell CLASSPATH=/data/local/tmp/classes.dex \
app_process / my.package.MainClass
_The path `/data/local/tmp` is a good candidate to push the server, since it's
readable and writable by `shell`, but not world-writable, so a malicious
application may not replace the server just before the client executes it._
Instead of a raw _dex_ file, `app_process` accepts a _jar_ containing
`classes.dex` (e.g. an [APK]). For simplicity, and to benefit from the gradle
build system, the server is built to an (unsigned) APK (renamed to
`scrcpy-server`).
[dex]: https://en.wikipedia.org/wiki/Dalvik_(software)
[apk]: https://en.wikipedia.org/wiki/Android_application_package
### Hidden methods
Although compiled against the Android framework, [hidden] methods and classes are
not directly accessible (and they may differ from one Android version to
another).
They can be called using reflection though. The communication with hidden
components is provided by [_wrappers_ classes][wrappers] and [aidl].
[hidden]: https://stackoverflow.com/a/31908373/1987178
[wrappers]: https://github.com/Genymobile/scrcpy/tree/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/wrappers
[aidl]: https://github.com/Genymobile/scrcpy/tree/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/aidl/android/view
### Threading
The server uses 3 threads:
- the **main** thread, encoding and streaming the video to the client;
- the **controller** thread, listening for _control messages_ (typically,
keyboard and mouse events) from the client;
- the **receiver** thread (managed by the controller), sending _device messges_
to the clients (currently, it is only used to send the device clipboard
content).
Since the video encoding is typically hardware, there would be no benefit in
encoding and streaming in two different threads.
### Screen video encoding
The encoding is managed by [`ScreenEncoder`].
The video is encoded using the [`MediaCodec`] API. The codec takes its input
from a [surface] associated to the display, and writes the resulting H.264
stream to the provided output stream (the socket connected to the client).
[`ScreenEncoder`]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java
[`MediaCodec`]: https://developer.android.com/reference/android/media/MediaCodec.html
[surface]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L68-L69
On device [rotation], the codec, surface and display are reinitialized, and a
new video stream is produced.
New frames are produced only when changes occur on the surface. This is good
because it avoids to send unnecessary frames, but there are drawbacks:
- it does not send any frame on start if the device screen does not change,
- after fast motion changes, the last frame may have poor quality.
Both problems are [solved][repeat] by the flag
[`KEY_REPEAT_PREVIOUS_FRAME_AFTER`][repeat-flag].
[rotation]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L90
[repeat]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/ScreenEncoder.java#L147-L148
[repeat-flag]: https://developer.android.com/reference/android/media/MediaFormat.html#KEY_REPEAT_PREVIOUS_FRAME_AFTER
### Input events injection
_Control messages_ are received from the client by the [`Controller`] (run in a
separate thread). There are several types of input events:
- keycode (cf [`KeyEvent`]),
- text (special characters may not be handled by keycodes directly),
- mouse motion/click,
- mouse scroll,
- other commands (e.g. to switch the screen on or to copy the clipboard).
Some of them need to inject input events to the system. To do so, they use the
_hidden_ method [`InputManager.injectInputEvent`] (exposed by our
[`InputManager` wrapper][inject-wrapper]).
[`Controller`]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/Controller.java#L81
[`KeyEvent`]: https://developer.android.com/reference/android/view/KeyEvent.html
[`MotionEvent`]: https://developer.android.com/reference/android/view/MotionEvent.html
[`InputManager.injectInputEvent`]: https://android.googlesource.com/platform/frameworks/base/+/oreo-release/core/java/android/hardware/input/InputManager.java#857
[inject-wrapper]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/server/src/main/java/com/genymobile/scrcpy/wrappers/InputManager.java#L27
## Client
The client relies on [SDL], which provides cross-platform API for UI, input
events, threading, etc.
The video stream is decoded by [libav] (FFmpeg).
[SDL]: https://www.libsdl.org
[libav]: https://www.libav.org/
### Initialization
On startup, in addition to _libav_ and _SDL_ initialization, the client must
push and start the server on the device, and open two sockets (one for the video
stream, one for control) so that they may communicate.
Note that the client-server roles are expressed at the application level:
- the server _serves_ video stream and handle requests from the client,
- the client _controls_ the device through the server.
However, the roles are reversed at the network level:
- the client opens a server socket and listen on a port before starting the
server,
- the server connects to the client.
This role inversion guarantees that the connection will not fail due to race
conditions, and avoids polling.
_(Note that over TCP/IP, the roles are not reversed, due to a bug in `adb
reverse`. See commit [1038bad] and [issue #5].)_
Once the server is connected, it sends the device information (name and initial
screen dimensions). Thus, the client may init the window and renderer, before
the first frame is available.
To minimize startup time, SDL initialization is performed while listening for
the connection from the server (see commit [90a46b4]).
[1038bad]: https://github.com/Genymobile/scrcpy/commit/1038bad3850f18717a048a4d5c0f8110e54ee172
[issue #5]: https://github.com/Genymobile/scrcpy/issues/5
[90a46b4]: https://github.com/Genymobile/scrcpy/commit/90a46b4c45637d083e877020d85ade52a9a5fa8e
### Threading
The client uses 4 threads:
- the **main** thread, executing the SDL event loop,
- the **stream** thread, receiving the video and used for decoding and
recording,
- the **controller** thread, sending _control messages_ to the server,
- the **receiver** thread (managed by the controller), receiving _device
messages_ from the client.
In addition, another thread can be started if necessary to handle APK
installation or file push requests (via drag&drop on the main window) or to
print the framerate regularly in the console.
### Stream
The video [stream] is received from the socket (connected to the server on the
device) in a separate thread.
If a [decoder] is present (i.e. `--no-display` is not set), then it uses _libav_
to decode the H.264 stream from the socket, and notifies the main thread when a
new frame is available.
There are two [frames][video_buffer] simultaneously in memory:
- the **decoding** frame, written by the decoder from the decoder thread,
- the **rendering** frame, rendered in a texture from the main thread.
When a new decoded frame is available, the decoder _swaps_ the decoding and
rendering frame (with proper synchronization). Thus, it immediatly starts
to decode a new frame while the main thread renders the last one.
If a [recorder] is present (i.e. `--record` is enabled), then its muxes the raw
H.264 packet to the output video file.
[stream]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/stream.h
[decoder]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/decoder.h
[video_buffer]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/video_buffer.h
[recorder]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/recorder.h
```
+----------+ +----------+
---> | decoder | ---> | screen |
+---------+ / +----------+ +----------+
socket ---> | stream | ----
+---------+ \ +----------+
---> | recorder |
+----------+
```
### Controller
The [controller] is responsible to send _control messages_ to the device. It
runs in a separate thread, to avoid I/O on the main thread.
On SDL event, received on the main thread, the [input manager][inputmanager]
creates appropriate [_control messages_][controlmsg]. It is responsible to
convert SDL events to Android events (using [convert]). It pushes the _control
messages_ to a queue hold by the controller. On its own thread, the controller
takes messages from the queue, that it serializes and sends to the client.
[controller]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/controller.h
[controlmsg]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/control_msg.h
[inputmanager]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/input_manager.h
[convert]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/convert.h
### UI and event loop
Initialization, input events and rendering are all [managed][scrcpy] in the main
thread.
Events are handled in the [event loop], which either updates the [screen] or
delegates to the [input manager][inputmanager].
[scrcpy]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/scrcpy.c
[event loop]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/scrcpy.c#L201
[screen]: https://github.com/Genymobile/scrcpy/blob/ffe0417228fb78ab45b7ee4e202fc06fc8875bf3/app/src/screen.h
## Hack
For more details, go read the code!
If you find a bug, or have an awesome idea to implement, please discuss and
contribute ;-)
### Debug the server
The server is pushed to the device by the client on startup.
To debug it, enable the server debugger during configuration:
```bash
meson x -Dserver_debugger=true
# or, if x is already configured
meson configure x -Dserver_debugger=true
```
Then recompile.
When you start scrcpy, it will start a debugger on port 5005 on the device.
Redirect that port to the computer:
```bash
adb forward tcp:5005 tcp:5005
```
In Android Studio, _Run_ > _Debug_ > _Edit configurations..._ On the left, click on
`+`, _Remote_, and fill the form:
- Host: `localhost`
- Port: `5005`
Then click on _Debug_.

View file

@ -1,9 +1,18 @@
# 可以看到画面,但无法控制
# Frequently Asked Questions
一些经常问的问题
## 小米手机
检查是否USB调试里打开了允许模拟点击
如果在此文档没有解决你的问题描述你的问题截图软件控制台中打印的日志一起发到QQ群里提问。
## 无法输入中文
安装搜狗输入法/QQ输入法就可以支持输入中文了
## 可以看到画面,但无法控制
有些手机(小米等手机)需要额外打开控制权限检查是否USB调试里打开了允许模拟点击
![image](image/USB调试(安全设置).jpg)
## 错误信息Could not open video stream
导致这个错误的原因有很多,最简单的解决方法是在分辨率设置中,选择一个较低的分辨率
## 声音
[关于转发安卓声音到PC的讨论](https://github.com/Genymobile/scrcpy/issues/14#issuecomment-543204526)

View file

@ -1,33 +1,26 @@
最后同步scrcpy b91ecf52256da73f5c8dca04fb82c13ec826cbd7
最后同步scrcpy 31bd95022bc525be42ca273d59a3211d964d278b
# TODO
## 低优先级
- 中文输入server需要改为apk作为一个输入法暂不实现或者有其他方式案件注入方式例如搜狗手机输入法可以监听当前注入
- 鼠标事件相关系列 b35733edb6df2a00b6af9b1c98627d344c377963
- [跳过帧改为动态配置,而不是静态编译](https://github.com/Genymobile/scrcpy/commit/ebccb9f6cc111e8acfbe10d656cac5c1f1b744a0)
- [单独线程统计帧率](https://github.com/Genymobile/scrcpy/commit/e2a272bf99ecf48fcb050177113f903b3fb323c4)
- text转换 https://github.com/Genymobile/scrcpy/commit/c916af0984f72a60301d13fa8ef9a85112f54202?tdsourcetag=s_pctim_aiomsg
- ui提供show touch设置
- 隐藏手机皮肤开关
## 中优先级
- [截屏保存为jpg](https://blog.csdn.net/m0_37684310/article/details/77950390)
- 版本号升级优化
- linux打包以及版本号
- 自动打包脚本
- 按键映射可配置
- 脚本
- 群控
- 配置文件
- 软硬解配置,去皮肤配置
- 窗口可改变大小
- 竖屏全屏不拉伸画面
- 分辨率码率可自定义
- opengles 3.0
## 高优先级
- 同步延迟优化
- linux打包以及版本号
# BUG
1. 魅族手机提示cant open video stream解决方法 https://dim.red/2019/03/03/scrcpy_usage/
# mark
[ffmpeg编译参数详解](https://www.cnblogs.com/wainiwann/p/4204230.html)

View file

@ -19,8 +19,9 @@ if /i "%1"=="x64" (
:: 뻔쓱긴좆<EAB8B4>
set adb_path=%script_path%third_party\adb\win\*.*
set jar_path=%script_path%third_party\scrcpy-server.jar
set jar_path=%script_path%third_party\scrcpy-server
set keymap_path=%script_path%keymap
set config_path=%script_path%config
if /i %cpu_mode% == x86 (
set publish_path=%script_path%QtScrcpy-win32\
@ -42,6 +43,7 @@ xcopy %release_path% %publish_path% /E /Y
xcopy %adb_path% %publish_path% /Y
xcopy %jar_path% %publish_path% /Y
xcopy %keymap_path% %publish_path%keymap\ /E /Y
xcopy %config_path% %publish_path%config\ /E /Y
:: 警속qt弩윳관
windeployqt %publish_path%\QtScrcpy.exe

View file

@ -28,7 +28,7 @@ android {
minSdkVersion 21
targetSdkVersion 29
versionCode 5
versionName "1.4"
versionName "1.12.1"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {

View file

@ -1,23 +1,21 @@
package com.genymobile.scrcpy;
/**
* Union of all supported msg types, identified by their {@code type}.
* Union of all supported event types, identified by their {@code type}.
*/
public final class ControlMessage {
public static final int TYPE_INJECT_KEYCODE = 0;
public static final int TYPE_INJECT_TEXT = 1;
public static final int TYPE_INJECT_MOUSE = 2;
public static final int TYPE_INJECT_SCROLL = 3;
public static final int TYPE_INJECT_TOUCH_EVENT = 2;
public static final int TYPE_INJECT_SCROLL_EVENT = 3;
public static final int TYPE_BACK_OR_SCREEN_ON = 4;
public static final int TYPE_EXPAND_NOTIFICATION_PANEL = 5;
public static final int TYPE_COLLAPSE_NOTIFICATION_PANEL = 6;
public static final int TYPE_GET_CLIPBOARD = 7;
public static final int TYPE_SET_CLIPBOARD = 8;
public static final int TYPE_SET_SCREEN_POWER_MODE = 9;
public static final int TYPE_INJECT_TOUCH = 10;
public static final int TYPE_ROTATE_DEVICE = 10;
private int type;
private String text;
@ -25,7 +23,8 @@ public final class ControlMessage {
private int action; // KeyEvent.ACTION_* or MotionEvent.ACTION_* or POWER_MODE_*
private int keycode; // KeyEvent.KEYCODE_*
private int buttons; // MotionEvent.BUTTON_*
private int id;
private long pointerId;
private float pressure;
private Position position;
private int hScroll;
private int vScroll;
@ -34,69 +33,62 @@ public final class ControlMessage {
}
public static ControlMessage createInjectKeycode(int action, int keycode, int metaState) {
ControlMessage event = new ControlMessage();
event.type = TYPE_INJECT_KEYCODE;
event.action = action;
event.keycode = keycode;
event.metaState = metaState;
return event;
ControlMessage msg = new ControlMessage();
msg.type = TYPE_INJECT_KEYCODE;
msg.action = action;
msg.keycode = keycode;
msg.metaState = metaState;
return msg;
}
public static ControlMessage createInjectText(String text) {
ControlMessage event = new ControlMessage();
event.type = TYPE_INJECT_TEXT;
event.text = text;
return event;
ControlMessage msg = new ControlMessage();
msg.type = TYPE_INJECT_TEXT;
msg.text = text;
return msg;
}
public static ControlMessage createInjectMotion(int action, int buttons, Position position) {
ControlMessage event = new ControlMessage();
event.type = TYPE_INJECT_MOUSE;
event.action = action;
event.buttons = buttons;
event.position = position;
return event;
public static ControlMessage createInjectTouchEvent(int action, long pointerId, Position position, float pressure, int buttons) {
ControlMessage msg = new ControlMessage();
msg.type = TYPE_INJECT_TOUCH_EVENT;
msg.action = action;
msg.pointerId = pointerId;
msg.pressure = pressure;
msg.position = position;
msg.buttons = buttons;
return msg;
}
public static ControlMessage createInjectMotionTouch(int id, int action, Position position) {
ControlMessage event = new ControlMessage();
event.type = TYPE_INJECT_TOUCH;
event.action = action;
event.id = id;
event.position = position;
return event;
}
public static ControlMessage createInjectScroll(Position position, int hScroll, int vScroll) {
ControlMessage event = new ControlMessage();
event.type = TYPE_INJECT_SCROLL;
event.position = position;
event.hScroll = hScroll;
event.vScroll = vScroll;
return event;
public static ControlMessage createInjectScrollEvent(Position position, int hScroll, int vScroll) {
ControlMessage msg = new ControlMessage();
msg.type = TYPE_INJECT_SCROLL_EVENT;
msg.position = position;
msg.hScroll = hScroll;
msg.vScroll = vScroll;
return msg;
}
public static ControlMessage createSetClipboard(String text) {
ControlMessage event = new ControlMessage();
event.type = TYPE_SET_CLIPBOARD;
event.text = text;
return event;
ControlMessage msg = new ControlMessage();
msg.type = TYPE_SET_CLIPBOARD;
msg.text = text;
return msg;
}
/**
* @param mode one of the {@code Device.SCREEN_POWER_MODE_*} constants
*/
public static ControlMessage createSetScreenPowerMode(int mode) {
ControlMessage event = new ControlMessage();
event.type = TYPE_SET_SCREEN_POWER_MODE;
event.action = mode;
return event;
ControlMessage msg = new ControlMessage();
msg.type = TYPE_SET_SCREEN_POWER_MODE;
msg.action = mode;
return msg;
}
public static ControlMessage createEmpty(int type) {
ControlMessage event = new ControlMessage();
event.type = type;
return event;
ControlMessage msg = new ControlMessage();
msg.type = type;
return msg;
}
public int getType() {
@ -123,8 +115,12 @@ public final class ControlMessage {
return buttons;
}
public int getId() {
return id;
public long getPointerId() {
return pointerId;
}
public float getPressure() {
return pressure;
}
public Position getPosition() {

View file

@ -9,9 +9,9 @@ import java.nio.charset.StandardCharsets;
public class ControlMessageReader {
private static final int INJECT_KEYCODE_PAYLOAD_LENGTH = 9;
private static final int INJECT_MOUSE_PAYLOAD_LENGTH = 13;
private static final int INJECT_SCROLL_PAYLOAD_LENGTH = 16;
private static final int INJECT_TOUCH_PAYLOAD_LENGTH = 10;
private static final int INJECT_MOUSE_EVENT_PAYLOAD_LENGTH = 17;
private static final int INJECT_TOUCH_EVENT_PAYLOAD_LENGTH = 21;
private static final int INJECT_SCROLL_EVENT_PAYLOAD_LENGTH = 20;
private static final int SET_SCREEN_POWER_MODE_PAYLOAD_LENGTH = 1;
public static final int TEXT_MAX_LENGTH = 300;
@ -50,6 +50,7 @@ public class ControlMessageReader {
return null;
}
int savedPosition = buffer.position();
int type = buffer.get();
ControlMessage msg;
switch (type) {
@ -59,14 +60,11 @@ public class ControlMessageReader {
case ControlMessage.TYPE_INJECT_TEXT:
msg = parseInjectText();
break;
case ControlMessage.TYPE_INJECT_MOUSE:
msg = parseInjectMouse();
case ControlMessage.TYPE_INJECT_TOUCH_EVENT:
msg = parseInjectTouchEvent();
break;
case ControlMessage.TYPE_INJECT_TOUCH:
msg = parseInjectMouseTouch();
break;
case ControlMessage.TYPE_INJECT_SCROLL:
msg = parseInjectScroll();
case ControlMessage.TYPE_INJECT_SCROLL_EVENT:
msg = parseInjectScrollEvent();
break;
case ControlMessage.TYPE_SET_CLIPBOARD:
msg = parseSetClipboard();
@ -78,6 +76,7 @@ public class ControlMessageReader {
case ControlMessage.TYPE_EXPAND_NOTIFICATION_PANEL:
case ControlMessage.TYPE_COLLAPSE_NOTIFICATION_PANEL:
case ControlMessage.TYPE_GET_CLIPBOARD:
case ControlMessage.TYPE_ROTATE_DEVICE:
msg = ControlMessage.createEmpty(type);
break;
default:
@ -123,34 +122,30 @@ public class ControlMessageReader {
return ControlMessage.createInjectText(text);
}
private ControlMessage parseInjectMouse() {
if (buffer.remaining() < INJECT_MOUSE_PAYLOAD_LENGTH) {
@SuppressWarnings("checkstyle:MagicNumber")
private ControlMessage parseInjectTouchEvent() {
if (buffer.remaining() < INJECT_TOUCH_EVENT_PAYLOAD_LENGTH) {
return null;
}
int action = toUnsigned(buffer.get());
long pointerId = buffer.getLong();
Position position = readPosition(buffer);
// 16 bits fixed-point
int pressureInt = toUnsigned(buffer.getShort());
// convert it to a float between 0 and 1 (0x1p16f is 2^16 as float)
float pressure = pressureInt == 0xffff ? 1f : (pressureInt / 0x1p16f);
int buttons = buffer.getInt();
Position position = readPosition(buffer);
return ControlMessage.createInjectMotion(action, buttons, position);
return ControlMessage.createInjectTouchEvent(action, pointerId, position, pressure, buttons);
}
private ControlMessage parseInjectMouseTouch() {
if (buffer.remaining() < INJECT_TOUCH_PAYLOAD_LENGTH) {
return null;
}
int id = toUnsigned(buffer.get());
int action = toUnsigned(buffer.get());
Position position = readPosition(buffer);
return ControlMessage.createInjectMotionTouch(id, action, position);
}
private ControlMessage parseInjectScroll() {
if (buffer.remaining() < INJECT_SCROLL_PAYLOAD_LENGTH) {
private ControlMessage parseInjectScrollEvent() {
if (buffer.remaining() < INJECT_SCROLL_EVENT_PAYLOAD_LENGTH) {
return null;
}
Position position = readPosition(buffer);
int hScroll = buffer.getInt();
int vScroll = buffer.getInt();
return ControlMessage.createInjectScroll(position, hScroll, vScroll);
return ControlMessage.createInjectScrollEvent(position, hScroll, vScroll);
}
private ControlMessage parseSetClipboard() {
@ -170,8 +165,8 @@ public class ControlMessageReader {
}
private static Position readPosition(ByteBuffer buffer) {
int x = toUnsigned(buffer.getShort());
int y = toUnsigned(buffer.getShort());
int x = buffer.getInt();
int y = buffer.getInt();
int screenWidth = toUnsigned(buffer.getShort());
int screenHeight = toUnsigned(buffer.getShort());
return new Position(x, y, screenWidth, screenHeight);

View file

@ -2,7 +2,6 @@ package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.InputManager;
import android.graphics.Point;
import android.os.SystemClock;
import android.view.InputDevice;
import android.view.InputEvent;
@ -11,95 +10,41 @@ import android.view.KeyEvent;
import android.view.MotionEvent;
import java.io.IOException;
import java.util.Vector;
public class Controller {
private static final int DEVICE_ID_VIRTUAL = -1;
private final Device device;
private final DesktopConnection connection;
private final DeviceMessageSender sender;
private final KeyCharacterMap charMap = KeyCharacterMap.load(KeyCharacterMap.VIRTUAL_KEYBOARD);
private long lastMouseDown;
private Vector<MotionEvent.PointerProperties> pointerProperties = new Vector<MotionEvent.PointerProperties>();
private Vector<MotionEvent.PointerCoords> pointerCoords = new Vector<MotionEvent.PointerCoords>();
private long lastTouchDown;
private final PointersState pointersState = new PointersState();
private final MotionEvent.PointerProperties[] pointerProperties = new MotionEvent.PointerProperties[PointersState.MAX_POINTERS];
private final MotionEvent.PointerCoords[] pointerCoords = new MotionEvent.PointerCoords[PointersState.MAX_POINTERS];
public Controller(Device device, DesktopConnection connection) {
this.device = device;
this.connection = connection;
initPointers();
sender = new DeviceMessageSender(connection);
}
private int getPointer(int id) {
for (int i = 0; i < pointerProperties.size(); i++) {
if (id == pointerProperties.get(i).id) {
return i;
}
private void initPointers() {
for (int i = 0; i < PointersState.MAX_POINTERS; ++i) {
MotionEvent.PointerProperties props = new MotionEvent.PointerProperties();
props.toolType = MotionEvent.TOOL_TYPE_FINGER;
MotionEvent.PointerCoords coords = new MotionEvent.PointerCoords();
coords.orientation = 0;
coords.size = 1;
pointerProperties[i] = props;
pointerCoords[i] = coords;
}
MotionEvent.PointerProperties props = new MotionEvent.PointerProperties();
props.id = id;
props.toolType = MotionEvent.TOOL_TYPE_FINGER;
pointerProperties.addElement(props);
MotionEvent.PointerCoords coords = new MotionEvent.PointerCoords();
coords.orientation = 0;
coords.pressure = 1;
coords.size = 1;
pointerCoords.addElement(coords);
return pointerProperties.size() - 1;
}
private void releasePointer(int id) {
int index = -1;
for (int i = 0; i < pointerProperties.size(); i++) {
if (id == pointerProperties.get(i).id) {
index = i;
break;
}
}
if ( -1 != index) {
pointerProperties.remove(index);
pointerCoords.remove(index);
}
}
private void setPointerCoords(int id, Point point) {
int index = -1;
for (int i = 0; i < pointerProperties.size(); i++) {
if (id == pointerProperties.get(i).id) {
index = i;
break;
}
}
if ( -1 != index) {
MotionEvent.PointerCoords coords = pointerCoords.get(index);
coords.x = point.x;
coords.y = point.y;
}
}
private void setScroll(int id, int hScroll, int vScroll) {
int index = -1;
for (int i = 0; i < pointerProperties.size(); i++) {
if (id == pointerProperties.get(i).id) {
index = i;
break;
}
}
if ( -1 != index) {
MotionEvent.PointerCoords coords = pointerCoords.get(index);
coords.setAxisValue(MotionEvent.AXIS_HSCROLL, hScroll);
coords.setAxisValue(MotionEvent.AXIS_VSCROLL, vScroll);
}
}
public DeviceMessageSender getSender() {
return sender;
}
@SuppressWarnings("checkstyle:MagicNumber")
@ -123,6 +68,10 @@ public class Controller {
}
}
public DeviceMessageSender getSender() {
return sender;
}
private void handleEvent() throws IOException {
ControlMessage msg = connection.receiveControlMessage();
switch (msg.getType()) {
@ -132,13 +81,10 @@ public class Controller {
case ControlMessage.TYPE_INJECT_TEXT:
injectText(msg.getText());
break;
case ControlMessage.TYPE_INJECT_MOUSE:
injectMouse(msg.getAction(), msg.getButtons(), msg.getPosition());
case ControlMessage.TYPE_INJECT_TOUCH_EVENT:
injectTouch(msg.getAction(), msg.getPointerId(), msg.getPosition(), msg.getPressure(), msg.getButtons());
break;
case ControlMessage.TYPE_INJECT_TOUCH:
injectTouch(msg.getId(), msg.getAction(), msg.getPosition());
break;
case ControlMessage.TYPE_INJECT_SCROLL:
case ControlMessage.TYPE_INJECT_SCROLL_EVENT:
injectScroll(msg.getPosition(), msg.getHScroll(), msg.getVScroll());
break;
case ControlMessage.TYPE_BACK_OR_SCREEN_ON:
@ -160,6 +106,9 @@ public class Controller {
case ControlMessage.TYPE_SET_SCREEN_POWER_MODE:
device.setScreenPowerMode(msg.getAction());
break;
case ControlMessage.TYPE_ROTATE_DEVICE:
device.rotateDevice();
break;
default:
// do nothing
}
@ -196,87 +145,43 @@ public class Controller {
return successCount;
}
private boolean injectTouch(int id, int action, Position position) {
if (action != MotionEvent.ACTION_DOWN
&& action != MotionEvent.ACTION_UP
&& action != MotionEvent.ACTION_MOVE) {
Ln.w("Unsupported action: " + action);
return false;
}
if (id < 0 || id > 9) {
Ln.w("Unsupported id[0-9]: " + id);
return false;
}
int index = getPointer(id);
int convertAction = action;
switch (action) {
case MotionEvent.ACTION_DOWN:
if (1 != pointerProperties.size()) {
convertAction = (index << 8) | MotionEvent.ACTION_POINTER_DOWN;
}
break;
case MotionEvent.ACTION_MOVE:
if (1 != pointerProperties.size()) {
convertAction = (index << 8) | convertAction;
}
break;
case MotionEvent.ACTION_UP:
if (1 != pointerProperties.size()) {
convertAction = (index << 8) | MotionEvent.ACTION_POINTER_UP;
}
break;
}
Point point = device.getPhysicalPoint(position);
if (point == null) {
// ignore event
return false;
}
if (pointerProperties.isEmpty()) {
// ignore event
return false;
}
setPointerCoords(id, point);
MotionEvent.PointerProperties[] props = pointerProperties.toArray(new MotionEvent.PointerProperties[pointerProperties.size()]);
MotionEvent.PointerCoords[] coords = pointerCoords.toArray(new MotionEvent.PointerCoords[pointerCoords.size()]);
MotionEvent event = MotionEvent.obtain(SystemClock.uptimeMillis(), SystemClock.uptimeMillis(), convertAction,
pointerProperties.size(), props, coords, 0, 0, 1f, 1f, 0, 0,
InputDevice.SOURCE_TOUCHSCREEN, 0);
if (action == MotionEvent.ACTION_UP) {
releasePointer(id);
}
return injectEvent(event);
}
private boolean injectMouse(int action, int buttons, Position position) {
private boolean injectTouch(int action, long pointerId, Position position, float pressure, int buttons) {
long now = SystemClock.uptimeMillis();
if (action == MotionEvent.ACTION_DOWN) {
getPointer(0);
lastMouseDown = now;
}
Point point = device.getPhysicalPoint(position);
if (point == null) {
// ignore event
return false;
}
if (pointerProperties.isEmpty()) {
// ignore event
int pointerIndex = pointersState.getPointerIndex(pointerId);
if (pointerIndex == -1) {
Ln.w("Too many pointers for touch event");
return false;
}
setPointerCoords(0, point);
MotionEvent.PointerProperties[] props = pointerProperties.toArray(new MotionEvent.PointerProperties[pointerProperties.size()]);
MotionEvent.PointerCoords[] coords = pointerCoords.toArray(new MotionEvent.PointerCoords[pointerCoords.size()]);
MotionEvent event = MotionEvent.obtain(lastMouseDown, now, action,
pointerProperties.size(), props, coords, 0, buttons, 1f, 1f, 0, 0,
InputDevice.SOURCE_TOUCHSCREEN, 0);
Pointer pointer = pointersState.get(pointerIndex);
pointer.setPoint(point);
pointer.setPressure(pressure);
pointer.setUp(action == MotionEvent.ACTION_UP);
if (action == MotionEvent.ACTION_UP) {
releasePointer(0);
int pointerCount = pointersState.update(pointerProperties, pointerCoords);
if (pointerCount == 1) {
if (action == MotionEvent.ACTION_DOWN) {
lastTouchDown = now;
}
} else {
// secondary pointers must use ACTION_POINTER_* ORed with the pointerIndex
if (action == MotionEvent.ACTION_UP) {
action = MotionEvent.ACTION_POINTER_UP | (pointerIndex << MotionEvent.ACTION_POINTER_INDEX_SHIFT);
} else if (action == MotionEvent.ACTION_DOWN) {
action = MotionEvent.ACTION_POINTER_DOWN | (pointerIndex << MotionEvent.ACTION_POINTER_INDEX_SHIFT);
}
}
MotionEvent event = MotionEvent
.obtain(lastTouchDown, now, action, pointerCount, pointerProperties, pointerCoords, 0, buttons, 1f, 1f, DEVICE_ID_VIRTUAL, 0,
InputDevice.SOURCE_TOUCHSCREEN, 0);
return injectEvent(event);
}
@ -288,23 +193,18 @@ public class Controller {
return false;
}
// init
MotionEvent.PointerProperties[] props = {new MotionEvent.PointerProperties()};
props[0].id = 0;
props[0].toolType = MotionEvent.TOOL_TYPE_FINGER;
MotionEvent.PointerCoords[] coords = {new MotionEvent.PointerCoords()};
coords[0].orientation = 0;
coords[0].pressure = 1;
coords[0].size = 1;
MotionEvent.PointerProperties props = pointerProperties[0];
props.id = 0;
// set data
coords[0].x = point.x;
coords[0].y = point.y;
coords[0].setAxisValue(MotionEvent.AXIS_HSCROLL, hScroll);
coords[0].setAxisValue(MotionEvent.AXIS_VSCROLL, vScroll);
MotionEvent.PointerCoords coords = pointerCoords[0];
coords.x = point.getX();
coords.y = point.getY();
coords.setAxisValue(MotionEvent.AXIS_HSCROLL, hScroll);
coords.setAxisValue(MotionEvent.AXIS_VSCROLL, vScroll);
MotionEvent event = MotionEvent.obtain(lastMouseDown, now, MotionEvent.ACTION_SCROLL, 1, props, coords, 0, 0, 1f, 1f, 0,
0, InputDevice.SOURCE_MOUSE, 0);
MotionEvent event = MotionEvent
.obtain(lastTouchDown, now, MotionEvent.ACTION_SCROLL, 1, pointerProperties, pointerCoords, 0, 0, 1f, 1f, DEVICE_ID_VIRTUAL, 0,
InputDevice.SOURCE_MOUSE, 0);
return injectEvent(event);
}
@ -316,8 +216,7 @@ public class Controller {
}
private boolean injectKeycode(int keyCode) {
return injectKeyEvent(KeyEvent.ACTION_DOWN, keyCode, 0, 0)
&& injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0);
return injectKeyEvent(KeyEvent.ACTION_DOWN, keyCode, 0, 0) && injectKeyEvent(KeyEvent.ACTION_UP, keyCode, 0, 0);
}
private boolean injectEvent(InputEvent event) {

View file

@ -15,7 +15,7 @@ public final class DesktopConnection implements Closeable {
private static final int DEVICE_NAME_FIELD_LENGTH = 64;
private static final String SOCKET_NAME = "qtscrcpy";
private static final String SOCKET_NAME = "scrcpy";
private final LocalSocket videoSocket;
private final FileDescriptor videoFd;
@ -24,7 +24,6 @@ public final class DesktopConnection implements Closeable {
private final InputStream controlInputStream;
private final OutputStream controlOutputStream;
private final ControlMessageReader reader = new ControlMessageReader();
private final DeviceMessageWriter writer = new DeviceMessageWriter();
@ -90,7 +89,7 @@ public final class DesktopConnection implements Closeable {
byte[] buffer = new byte[DEVICE_NAME_FIELD_LENGTH + 4];
byte[] deviceNameBytes = deviceName.getBytes(StandardCharsets.UTF_8);
int len = Math.min(DEVICE_NAME_FIELD_LENGTH - 1, deviceNameBytes.length);
int len = StringUtils.getUtf8TruncationIndex(deviceNameBytes, DEVICE_NAME_FIELD_LENGTH - 1);
System.arraycopy(deviceNameBytes, 0, buffer, 0, len);
// byte[] are always 0-initialized in java, no need to set '\0' explicitly

View file

@ -2,8 +2,8 @@ package com.genymobile.scrcpy;
import com.genymobile.scrcpy.wrappers.ServiceManager;
import com.genymobile.scrcpy.wrappers.SurfaceControl;
import com.genymobile.scrcpy.wrappers.WindowManager;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Build;
import android.os.IBinder;
@ -74,7 +74,6 @@ public final class Device {
@SuppressWarnings("checkstyle:MagicNumber")
private static Size computeVideoSize(int w, int h, int maxSize) {
// Compute the video size and the padding of the content inside this video.
/*
// Principle:
// - scale down the great side of the screen to maxSize (if necessary);
// - scale down the other side so that the aspect ratio is preserved;
@ -97,46 +96,7 @@ public final class Device {
w = portrait ? minor : major;
h = portrait ? major : minor;
}
*/
// Principle:480p/720p/1080p and not larger than device size.
w &= ~7; // in case it's not a multiple of 8
h &= ~7;
boolean vertival = h > w;
boolean validSize = false;
int newWidth = w;
int newHeight = h;
// 480p/720p/1080p
switch (maxSize) {
case 480: // 480p:640x480
newWidth = 640;
newHeight = 480;
validSize = true;
break;
case 720: // 720p:1280x720
newWidth = 1280;
newHeight = 720;
validSize = true;
break;
case 1080: // 1080p:1920x1080
newWidth = 1920;
newHeight = 1080;
validSize = true;
break;
}
// vertival convert
if (validSize && vertival) {
int temp = newWidth;
newWidth = newHeight;
newHeight = temp;
}
// not larger than device size.
if (newWidth > w || newHeight > h) {
newWidth = w;
newHeight = h;
}
return new Size(newWidth, newHeight);
return new Size(w, h);
}
public Point getPhysicalPoint(Position position) {
@ -152,8 +112,8 @@ public final class Device {
}
Rect contentRect = screenInfo.getContentRect();
Point point = position.getPoint();
int scaledX = contentRect.left + point.x * contentRect.width() / videoSize.getWidth();
int scaledY = contentRect.top + point.y * contentRect.height() / videoSize.getHeight();
int scaledX = contentRect.left + point.getX() * contentRect.width() / videoSize.getWidth();
int scaledY = contentRect.top + point.getY() * contentRect.height() / videoSize.getHeight();
return new Point(scaledX, scaledY);
}
@ -202,9 +162,34 @@ public final class Device {
* @param mode one of the {@code SCREEN_POWER_MODE_*} constants
*/
public void setScreenPowerMode(int mode) {
IBinder d = SurfaceControl.getBuiltInDisplay(0);
IBinder d = SurfaceControl.getBuiltInDisplay();
if (d == null) {
Ln.e("Could not get built-in display");
return;
}
SurfaceControl.setDisplayPowerMode(d, mode);
Ln.i("Device screen turned " + (mode == Device.POWER_MODE_OFF ? "off " : "on ") + mode);
Ln.i("Device screen turned " + (mode == Device.POWER_MODE_OFF ? "off" : "on"));
}
/**
* Disable auto-rotation (if enabled), set the screen rotation and re-enable auto-rotation (if it was enabled).
*/
public void rotateDevice() {
WindowManager wm = serviceManager.getWindowManager();
boolean accelerometerRotation = !wm.isRotationFrozen();
int currentRotation = wm.getRotation();
int newRotation = (currentRotation & 1) ^ 1; // 0->1, 1->0, 2->1, 3->0
String newRotationString = newRotation == 0 ? "portrait" : "landscape";
Ln.i("Device rotation requested: " + newRotationString);
wm.freezeRotation(newRotation);
// restore auto-rotate if necessary
if (accelerometerRotation) {
wm.thawRotation();
}
}
static Rect flipRect(Rect crop) {

View file

@ -27,8 +27,8 @@ public final class DeviceMessageSender {
text = clipboardText;
clipboardText = null;
}
DeviceMessage msg = DeviceMessage.createClipboard(text);
connection.sendDeviceMessage(msg);
DeviceMessage event = DeviceMessage.createClipboard(text);
connection.sendDeviceMessage(event);
}
}
}

View file

@ -27,7 +27,7 @@ public class DeviceMessageWriter {
output.write(rawBuffer, 0, buffer.position());
break;
default:
Ln.w("Unknown device msg: " + msg.getType());
Ln.w("Unknown device message: " + msg.getType());
break;
}
}

View file

@ -8,14 +8,11 @@ import android.util.Log;
*/
public final class Ln {
private static final String TAG = "qtscrcpy";
private static final String TAG = "scrcpy";
private static final String PREFIX = "[server] ";
enum Level {
DEBUG,
INFO,
WARN,
ERROR;
DEBUG, INFO, WARN, ERROR
}
private static final Level THRESHOLD = BuildConfig.DEBUG ? Level.DEBUG : Level.INFO;

View file

@ -5,9 +5,10 @@ import android.graphics.Rect;
public class Options {
private int maxSize;
private int bitRate;
private int maxFps;
private boolean tunnelForward;
private Rect crop;
private boolean sendFrameMeta;
private boolean sendFrameMeta; // send PTS so that the client may record properly
private boolean control;
public int getMaxSize() {
@ -26,6 +27,14 @@ public class Options {
this.bitRate = bitRate;
}
public int getMaxFps() {
return maxFps;
}
public void setMaxFps(int maxFps) {
this.maxFps = maxFps;
}
public boolean isTunnelForward() {
return tunnelForward;
}

View file

@ -0,0 +1,43 @@
package com.genymobile.scrcpy;
import java.util.Objects;
public class Point {
private final int x;
private final int y;
public Point(int x, int y) {
this.x = x;
this.y = y;
}
public int getX() {
return x;
}
public int getY() {
return y;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Point point = (Point) o;
return x == point.x && y == point.y;
}
@Override
public int hashCode() {
return Objects.hash(x, y);
}
@Override
public String toString() {
return "Point{" + "x=" + x + ", y=" + y + '}';
}
}

View file

@ -0,0 +1,55 @@
package com.genymobile.scrcpy;
public class Pointer {
/**
* Pointer id as received from the client.
*/
private final long id;
/**
* Local pointer id, using the lowest possible values to fill the {@link android.view.MotionEvent.PointerProperties PointerProperties}.
*/
private final int localId;
private Point point;
private float pressure;
private boolean up;
public Pointer(long id, int localId) {
this.id = id;
this.localId = localId;
}
public long getId() {
return id;
}
public int getLocalId() {
return localId;
}
public Point getPoint() {
return point;
}
public void setPoint(Point point) {
this.point = point;
}
public float getPressure() {
return pressure;
}
public void setPressure(float pressure) {
this.pressure = pressure;
}
public boolean isUp() {
return up;
}
public void setUp(boolean up) {
this.up = up;
}
}

View file

@ -0,0 +1,103 @@
package com.genymobile.scrcpy;
import android.view.MotionEvent;
import java.util.ArrayList;
import java.util.List;
public class PointersState {
public static final int MAX_POINTERS = 10;
private final List<Pointer> pointers = new ArrayList<>();
private int indexOf(long id) {
for (int i = 0; i < pointers.size(); ++i) {
Pointer pointer = pointers.get(i);
if (pointer.getId() == id) {
return i;
}
}
return -1;
}
private boolean isLocalIdAvailable(int localId) {
for (int i = 0; i < pointers.size(); ++i) {
Pointer pointer = pointers.get(i);
if (pointer.getLocalId() == localId) {
return false;
}
}
return true;
}
private int nextUnusedLocalId() {
for (int localId = 0; localId < MAX_POINTERS; ++localId) {
if (isLocalIdAvailable(localId)) {
return localId;
}
}
return -1;
}
public Pointer get(int index) {
return pointers.get(index);
}
public int getPointerIndex(long id) {
int index = indexOf(id);
if (index != -1) {
// already exists, return it
return index;
}
if (pointers.size() >= MAX_POINTERS) {
// it's full
return -1;
}
// id 0 is reserved for mouse events
int localId = nextUnusedLocalId();
if (localId == -1) {
throw new AssertionError("pointers.size() < maxFingers implies that a local id is available");
}
Pointer pointer = new Pointer(id, localId);
pointers.add(pointer);
// return the index of the pointer
return pointers.size() - 1;
}
/**
* Initialize the motion event parameters.
*
* @param props the pointer properties
* @param coords the pointer coordinates
* @return The number of items initialized (the number of pointers).
*/
public int update(MotionEvent.PointerProperties[] props, MotionEvent.PointerCoords[] coords) {
int count = pointers.size();
for (int i = 0; i < count; ++i) {
Pointer pointer = pointers.get(i);
// id 0 is reserved for mouse events
props[i].id = pointer.getLocalId();
Point point = pointer.getPoint();
coords[i].x = point.getX();
coords[i].y = point.getY();
coords[i].pressure = pointer.getPressure();
}
cleanUp();
return count;
}
/**
* Remove all pointers which are UP.
*/
private void cleanUp() {
for (int i = pointers.size() - 1; i >= 0; --i) {
Pointer pointer = pointers.get(i);
if (pointer.isUp()) {
pointers.remove(i);
}
}
}
}

View file

@ -1,7 +1,5 @@
package com.genymobile.scrcpy;
import android.graphics.Point;
import java.util.Objects;
public class Position {
@ -34,8 +32,7 @@ public class Position {
return false;
}
Position position = (Position) o;
return Objects.equals(point, position.point)
&& Objects.equals(screenSize, position.screenSize);
return Objects.equals(point, position.point) && Objects.equals(screenSize, position.screenSize);
}
@Override
@ -45,10 +42,7 @@ public class Position {
@Override
public String toString() {
return "Position{"
+ "point=" + point
+ ", screenSize=" + screenSize
+ '}';
return "Position{" + "point=" + point + ", screenSize=" + screenSize + '}';
}
}

View file

@ -6,6 +6,7 @@ import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.IBinder;
import android.view.Surface;
@ -16,32 +17,29 @@ import java.util.concurrent.atomic.AtomicBoolean;
public class ScreenEncoder implements Device.RotationListener {
private static final int DEFAULT_FRAME_RATE = 60; // fps
private static final int DEFAULT_I_FRAME_INTERVAL = 10; // seconds
private static final int REPEAT_FRAME_DELAY_US = 100_000; // repeat after 100ms
private static final int REPEAT_FRAME_DELAY = 6; // repeat after 6 frames
private static final int MICROSECONDS_IN_ONE_SECOND = 1_000_000;
private static final int NO_PTS = -1;
private final AtomicBoolean rotationChanged = new AtomicBoolean();
private final ByteBuffer headerBuffer = ByteBuffer.allocate(12);
private int bitRate;
private int frameRate;
private int maxFps;
private int iFrameInterval;
private boolean sendFrameMeta;
private long ptsOrigin;
public ScreenEncoder(boolean sendFrameMeta, int bitRate, int frameRate, int iFrameInterval) {
public ScreenEncoder(boolean sendFrameMeta, int bitRate, int maxFps, int iFrameInterval) {
this.sendFrameMeta = sendFrameMeta;
this.bitRate = bitRate;
this.frameRate = frameRate;
this.maxFps = maxFps;
this.iFrameInterval = iFrameInterval;
}
public ScreenEncoder(boolean sendFrameMeta, int bitRate) {
this(sendFrameMeta, bitRate, DEFAULT_FRAME_RATE, DEFAULT_I_FRAME_INTERVAL);
public ScreenEncoder(boolean sendFrameMeta, int bitRate, int maxFps) {
this(sendFrameMeta, bitRate, maxFps, DEFAULT_I_FRAME_INTERVAL);
}
@Override
@ -54,7 +52,10 @@ public class ScreenEncoder implements Device.RotationListener {
}
public void streamScreen(Device device, FileDescriptor fd) throws IOException {
MediaFormat format = createFormat(bitRate, frameRate, iFrameInterval);
Workarounds.prepareMainLooper();
Workarounds.fillAppInfo();
MediaFormat format = createFormat(bitRate, maxFps, iFrameInterval);
device.setRotationListener(this);
boolean alive;
try {
@ -87,7 +88,6 @@ public class ScreenEncoder implements Device.RotationListener {
boolean eof = false;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (!consumeRotationChange() && !eof) {
int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, -1);
eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
@ -138,15 +138,24 @@ public class ScreenEncoder implements Device.RotationListener {
return MediaCodec.createEncoderByType("video/avc");
}
private static MediaFormat createFormat(int bitRate, int frameRate, int iFrameInterval) throws IOException {
@SuppressWarnings("checkstyle:MagicNumber")
private static MediaFormat createFormat(int bitRate, int maxFps, int iFrameInterval) {
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "video/avc");
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
// must be present to configure the encoder, but does not impact the actual frame rate, which is variable
format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval);
// display the very first frame, and recover from bad quality when no new frames
format.setLong(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, MICROSECONDS_IN_ONE_SECOND * REPEAT_FRAME_DELAY / frameRate); // µs
format.setLong(MediaFormat.KEY_REPEAT_PREVIOUS_FRAME_AFTER, REPEAT_FRAME_DELAY_US); // µs
if (maxFps > 0) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
format.setFloat(MediaFormat.KEY_MAX_FPS_TO_ENCODER, maxFps);
} else {
Ln.w("Max FPS is only supported since Android 10, the option has been ignored");
}
}
return format;
}

View file

@ -1,9 +1,10 @@
package com.genymobile.scrcpy;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.os.Build;
import java.io.File;
import java.io.IOException;
public final class Server {
@ -18,7 +19,7 @@ public final class Server {
final Device device = new Device(options);
boolean tunnelForward = options.isTunnelForward();
try (DesktopConnection connection = DesktopConnection.open(device, tunnelForward)) {
ScreenEncoder screenEncoder = new ScreenEncoder(options.getSendFrameMeta(), options.getBitRate());
ScreenEncoder screenEncoder = new ScreenEncoder(options.getSendFrameMeta(), options.getBitRate(), options.getMaxFps());
if (options.getControl()) {
Controller controller = new Controller(device, connection);
@ -60,7 +61,7 @@ public final class Server {
sender.loop();
} catch (IOException | InterruptedException e) {
// this is expected on close
Ln.d("Devide message sender stopped");
Ln.d("Device message sender stopped");
}
}
}).start();
@ -68,29 +69,42 @@ public final class Server {
@SuppressWarnings("checkstyle:MagicNumber")
private static Options createOptions(String... args) {
if (args.length != 6) {
throw new IllegalArgumentException("Expecting 5 parameters");
if (args.length < 1) {
throw new IllegalArgumentException("Missing client version");
}
String clientVersion = args[0];
if (!clientVersion.equals(BuildConfig.VERSION_NAME)) {
throw new IllegalArgumentException(
"The server version (" + clientVersion + ") does not match the client " + "(" + BuildConfig.VERSION_NAME + ")");
}
if (args.length != 8) {
throw new IllegalArgumentException("Expecting 8 parameters");
}
Options options = new Options();
int maxSize = Integer.parseInt(args[0]) & ~7; // multiple of 8
int maxSize = Integer.parseInt(args[1]) & ~7; // multiple of 8
options.setMaxSize(maxSize);
int bitRate = Integer.parseInt(args[1]);
int bitRate = Integer.parseInt(args[2]);
options.setBitRate(bitRate);
int maxFps = Integer.parseInt(args[3]);
options.setMaxFps(maxFps);
// use "adb forward" instead of "adb tunnel"? (so the server must listen)
boolean tunnelForward = Boolean.parseBoolean(args[2]);
boolean tunnelForward = Boolean.parseBoolean(args[4]);
options.setTunnelForward(tunnelForward);
Rect crop = parseCrop(args[3]);
Rect crop = parseCrop(args[5]);
options.setCrop(crop);
boolean sendFrameMeta = Boolean.parseBoolean(args[4]);
boolean sendFrameMeta = Boolean.parseBoolean(args[6]);
options.setSendFrameMeta(sendFrameMeta);
boolean control = Boolean.parseBoolean(args[5]);
boolean control = Boolean.parseBoolean(args[7]);
options.setControl(control);
return options;
@ -117,7 +131,21 @@ public final class Server {
try {
new File(SERVER_PATH).delete();
} catch (Exception e) {
Ln.e("Cannot unlink server", e);
Ln.e("Could not unlink server", e);
}
}
@SuppressWarnings("checkstyle:MagicNumber")
private static void suggestFix(Throwable e) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (e instanceof MediaCodec.CodecException) {
MediaCodec.CodecException mce = (MediaCodec.CodecException) e;
if (mce.getErrorCode() == 0xfffffc0e) {
Ln.e("The hardware encoder is not able to encode at the given definition.");
Ln.e("Try with a lower definition:");
Ln.e(" scrcpy -m 1024");
}
}
}
}
@ -126,6 +154,7 @@ public final class Server {
@Override
public void uncaughtException(Thread t, Throwable e) {
Ln.e("Exception on thread " + t, e);
suggestFix(e);
}
});

View file

@ -38,8 +38,7 @@ public final class Size {
return false;
}
Size size = (Size) o;
return width == size.width
&& height == size.height;
return width == size.width && height == size.height;
}
@Override
@ -49,9 +48,6 @@ public final class Size {
@Override
public String toString() {
return "Size{"
+ "width=" + width
+ ", height=" + height
+ '}';
return "Size{" + "width=" + width + ", height=" + height + '}';
}
}

View file

@ -0,0 +1,79 @@
package com.genymobile.scrcpy;
import android.annotation.SuppressLint;
import android.app.Application;
import android.app.Instrumentation;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.os.Looper;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
public final class Workarounds {
private Workarounds() {
// not instantiable
}
public static void prepareMainLooper() {
// Some devices internally create a Handler when creating an input Surface, causing an exception:
// "Can't create handler inside thread that has not called Looper.prepare()"
// <https://github.com/Genymobile/scrcpy/issues/240>
//
// Use Looper.prepareMainLooper() instead of Looper.prepare() to avoid a NullPointerException:
// "Attempt to read from field 'android.os.MessageQueue android.os.Looper.mQueue'
// on a null object reference"
// <https://github.com/Genymobile/scrcpy/issues/921>
Looper.prepareMainLooper();
}
@SuppressLint("PrivateApi")
public static void fillAppInfo() {
try {
// ActivityThread activityThread = new ActivityThread();
Class<?> activityThreadClass = Class.forName("android.app.ActivityThread");
Constructor<?> activityThreadConstructor = activityThreadClass.getDeclaredConstructor();
activityThreadConstructor.setAccessible(true);
Object activityThread = activityThreadConstructor.newInstance();
// ActivityThread.sCurrentActivityThread = activityThread;
Field sCurrentActivityThreadField = activityThreadClass.getDeclaredField("sCurrentActivityThread");
sCurrentActivityThreadField.setAccessible(true);
sCurrentActivityThreadField.set(null, activityThread);
// ActivityThread.AppBindData appBindData = new ActivityThread.AppBindData();
Class<?> appBindDataClass = Class.forName("android.app.ActivityThread$AppBindData");
Constructor<?> appBindDataConstructor = appBindDataClass.getDeclaredConstructor();
appBindDataConstructor.setAccessible(true);
Object appBindData = appBindDataConstructor.newInstance();
ApplicationInfo applicationInfo = new ApplicationInfo();
applicationInfo.packageName = "com.genymobile.scrcpy";
// appBindData.appInfo = applicationInfo;
Field appInfoField = appBindDataClass.getDeclaredField("appInfo");
appInfoField.setAccessible(true);
appInfoField.set(appBindData, applicationInfo);
// activityThread.mBoundApplication = appBindData;
Field mBoundApplicationField = activityThreadClass.getDeclaredField("mBoundApplication");
mBoundApplicationField.setAccessible(true);
mBoundApplicationField.set(activityThread, appBindData);
// Context ctx = activityThread.getSystemContext();
Method getSystemContextMethod = activityThreadClass.getDeclaredMethod("getSystemContext");
Context ctx = (Context) getSystemContextMethod.invoke(activityThread);
Application app = Instrumentation.newApplication(Application.class, ctx);
// activityThread.mInitialApplication = app;
Field mInitialApplicationField = activityThreadClass.getDeclaredField("mInitialApplication");
mInitialApplicationField.setAccessible(true);
mInitialApplicationField.set(activityThread, app);
} catch (Throwable throwable) {
// this is a workaround, so failing is not an error
Ln.w("Could not fill app info: " + throwable.getMessage());
}
}
}

View file

@ -1,44 +1,86 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.Ln;
import android.content.ClipData;
import android.os.Build;
import android.os.IInterface;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public class ClipboardManager {
private static final String PACKAGE_NAME = "com.android.shell";
private static final int USER_ID = 0;
private final IInterface manager;
private final Method getPrimaryClipMethod;
private final Method setPrimaryClipMethod;
private Method getPrimaryClipMethod;
private Method setPrimaryClipMethod;
public ClipboardManager(IInterface manager) {
this.manager = manager;
try {
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class);
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class);
} catch (NoSuchMethodException e) {
throw new AssertionError(e);
}
private Method getGetPrimaryClipMethod() throws NoSuchMethodException {
if (getPrimaryClipMethod == null) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class);
} else {
getPrimaryClipMethod = manager.getClass().getMethod("getPrimaryClip", String.class, int.class);
}
}
return getPrimaryClipMethod;
}
private Method getSetPrimaryClipMethod() throws NoSuchMethodException {
if (setPrimaryClipMethod == null) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class);
} else {
setPrimaryClipMethod = manager.getClass().getMethod("setPrimaryClip", ClipData.class, String.class, int.class);
}
}
return setPrimaryClipMethod;
}
private static ClipData getPrimaryClip(Method method, IInterface manager) throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return (ClipData) method.invoke(manager, PACKAGE_NAME);
}
return (ClipData) method.invoke(manager, PACKAGE_NAME, USER_ID);
}
private static void setPrimaryClip(Method method, IInterface manager, ClipData clipData)
throws InvocationTargetException, IllegalAccessException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
method.invoke(manager, clipData, PACKAGE_NAME);
} else {
method.invoke(manager, clipData, PACKAGE_NAME, USER_ID);
}
}
public CharSequence getText() {
try {
ClipData clipData = (ClipData) getPrimaryClipMethod.invoke(manager, "com.android.shell");
Method method = getGetPrimaryClipMethod();
ClipData clipData = getPrimaryClip(method, manager);
if (clipData == null || clipData.getItemCount() == 0) {
return null;
}
return clipData.getItemAt(0).getText();
} catch (InvocationTargetException | IllegalAccessException e) {
throw new AssertionError(e);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return null;
}
}
public void setText(CharSequence text) {
ClipData clipData = ClipData.newPlainText(null, text);
try {
setPrimaryClipMethod.invoke(manager, clipData, "com.android.shell");
} catch (InvocationTargetException | IllegalAccessException e) {
throw new AssertionError(e);
Method method = getSetPrimaryClipMethod();
ClipData clipData = ClipData.newPlainText(null, text);
setPrimaryClip(method, manager, clipData);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
}
}
}

View file

@ -1,5 +1,7 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.Ln;
import android.os.IInterface;
import android.view.InputEvent;
@ -13,22 +15,26 @@ public final class InputManager {
public static final int INJECT_INPUT_EVENT_MODE_WAIT_FOR_FINISH = 2;
private final IInterface manager;
private final Method injectInputEventMethod;
private Method injectInputEventMethod;
public InputManager(IInterface manager) {
this.manager = manager;
try {
}
private Method getInjectInputEventMethod() throws NoSuchMethodException {
if (injectInputEventMethod == null) {
injectInputEventMethod = manager.getClass().getMethod("injectInputEvent", InputEvent.class, int.class);
} catch (NoSuchMethodException e) {
throw new AssertionError(e);
}
return injectInputEventMethod;
}
public boolean injectInputEvent(InputEvent inputEvent, int mode) {
try {
return (Boolean) injectInputEventMethod.invoke(manager, inputEvent, mode);
} catch (InvocationTargetException | IllegalAccessException e) {
throw new AssertionError(e);
Method method = getInjectInputEventMethod();
return (boolean) method.invoke(manager, inputEvent, mode);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return false;
}
}
}

View file

@ -1,5 +1,7 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.Ln;
import android.annotation.SuppressLint;
import android.os.Build;
import android.os.IInterface;
@ -9,24 +11,28 @@ import java.lang.reflect.Method;
public final class PowerManager {
private final IInterface manager;
private final Method isScreenOnMethod;
private Method isScreenOnMethod;
public PowerManager(IInterface manager) {
this.manager = manager;
try {
}
private Method getIsScreenOnMethod() throws NoSuchMethodException {
if (isScreenOnMethod == null) {
@SuppressLint("ObsoleteSdkInt") // we may lower minSdkVersion in the future
String methodName = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH ? "isInteractive" : "isScreenOn";
String methodName = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH ? "isInteractive" : "isScreenOn";
isScreenOnMethod = manager.getClass().getMethod(methodName);
} catch (NoSuchMethodException e) {
throw new AssertionError(e);
}
return isScreenOnMethod;
}
public boolean isScreenOn() {
try {
return (Boolean) isScreenOnMethod.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException e) {
throw new AssertionError(e);
Method method = getIsScreenOnMethod();
return (boolean) method.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return false;
}
}
}

View file

@ -17,35 +17,35 @@ public class StatusBarManager {
this.manager = manager;
}
public void expandNotificationsPanel() {
private Method getExpandNotificationsPanelMethod() throws NoSuchMethodException {
if (expandNotificationsPanelMethod == null) {
try {
expandNotificationsPanelMethod = manager.getClass().getMethod("expandNotificationsPanel");
} catch (NoSuchMethodException e) {
Ln.e("ServiceBarManager.expandNotificationsPanel() is not available on this device");
return;
}
expandNotificationsPanelMethod = manager.getClass().getMethod("expandNotificationsPanel");
}
return expandNotificationsPanelMethod;
}
private Method getCollapsePanelsMethod() throws NoSuchMethodException {
if (collapsePanelsMethod == null) {
collapsePanelsMethod = manager.getClass().getMethod("collapsePanels");
}
return collapsePanelsMethod;
}
public void expandNotificationsPanel() {
try {
expandNotificationsPanelMethod.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException e) {
Ln.e("Cannot invoke ServiceBarManager.expandNotificationsPanel()", e);
Method method = getExpandNotificationsPanelMethod();
method.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
}
}
public void collapsePanels() {
if (collapsePanelsMethod == null) {
try {
collapsePanelsMethod = manager.getClass().getMethod("collapsePanels");
} catch (NoSuchMethodException e) {
Ln.e("ServiceBarManager.collapsePanels() is not available on this device");
return;
}
}
try {
collapsePanelsMethod.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException e) {
Ln.e("Cannot invoke ServiceBarManager.collapsePanels()", e);
Method method = getCollapsePanelsMethod();
method.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
}
}
}
}

View file

@ -1,11 +1,16 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.Ln;
import android.annotation.SuppressLint;
import android.graphics.Rect;
import android.os.Build;
import android.os.IBinder;
import android.view.Surface;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@SuppressLint("PrivateApi")
public final class SurfaceControl {
@ -23,6 +28,9 @@ public final class SurfaceControl {
}
}
private static Method getBuiltInDisplayMethod;
private static Method setDisplayPowerModeMethod;
private SurfaceControl() {
// only static methods
}
@ -76,24 +84,49 @@ public final class SurfaceControl {
}
}
public static IBinder getBuiltInDisplay(int builtInDisplayId) {
try {
private static Method getGetBuiltInDisplayMethod() throws NoSuchMethodException {
if (getBuiltInDisplayMethod == null) {
// the method signature has changed in Android Q
// <https://github.com/Genymobile/scrcpy/issues/586>
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return (IBinder) CLASS.getMethod("getBuiltInDisplay", int.class).invoke(null, builtInDisplayId);
getBuiltInDisplayMethod = CLASS.getMethod("getBuiltInDisplay", int.class);
} else {
getBuiltInDisplayMethod = CLASS.getMethod("getInternalDisplayToken");
}
return (IBinder) CLASS.getMethod("getPhysicalDisplayToken", long.class).invoke(null, builtInDisplayId);
} catch (Exception e) {
throw new AssertionError(e);
}
return getBuiltInDisplayMethod;
}
public static IBinder getBuiltInDisplay() {
try {
Method method = getGetBuiltInDisplayMethod();
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
// call getBuiltInDisplay(0)
return (IBinder) method.invoke(null, 0);
}
// call getInternalDisplayToken()
return (IBinder) method.invoke(null);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return null;
}
}
private static Method getSetDisplayPowerModeMethod() throws NoSuchMethodException {
if (setDisplayPowerModeMethod == null) {
setDisplayPowerModeMethod = CLASS.getMethod("setDisplayPowerMode", IBinder.class, int.class);
}
return setDisplayPowerModeMethod;
}
public static void setDisplayPowerMode(IBinder displayToken, int mode) {
try {
CLASS.getMethod("setDisplayPowerMode", IBinder.class, int.class).invoke(null, displayToken, mode);
} catch (Exception e) {
throw new AssertionError(e);
Method method = getSetDisplayPowerModeMethod();
method.invoke(null, displayToken, mode);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
}
}

View file

@ -1,27 +1,95 @@
package com.genymobile.scrcpy.wrappers;
import com.genymobile.scrcpy.Ln;
import android.os.IInterface;
import android.view.IRotationWatcher;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public final class WindowManager {
private final IInterface manager;
private Method getRotationMethod;
private Method freezeRotationMethod;
private Method isRotationFrozenMethod;
private Method thawRotationMethod;
public WindowManager(IInterface manager) {
this.manager = manager;
}
public int getRotation() {
try {
private Method getGetRotationMethod() throws NoSuchMethodException {
if (getRotationMethod == null) {
Class<?> cls = manager.getClass();
try {
return (Integer) manager.getClass().getMethod("getRotation").invoke(manager);
} catch (NoSuchMethodException e) {
// method changed since this commit:
// https://android.googlesource.com/platform/frameworks/base/+/8ee7285128c3843401d4c4d0412cd66e86ba49e3%5E%21/#F2
return (Integer) cls.getMethod("getDefaultDisplayRotation").invoke(manager);
getRotationMethod = cls.getMethod("getDefaultDisplayRotation");
} catch (NoSuchMethodException e) {
// old version
getRotationMethod = cls.getMethod("getRotation");
}
} catch (Exception e) {
throw new AssertionError(e);
}
return getRotationMethod;
}
private Method getFreezeRotationMethod() throws NoSuchMethodException {
if (freezeRotationMethod == null) {
freezeRotationMethod = manager.getClass().getMethod("freezeRotation", int.class);
}
return freezeRotationMethod;
}
private Method getIsRotationFrozenMethod() throws NoSuchMethodException {
if (isRotationFrozenMethod == null) {
isRotationFrozenMethod = manager.getClass().getMethod("isRotationFrozen");
}
return isRotationFrozenMethod;
}
private Method getThawRotationMethod() throws NoSuchMethodException {
if (thawRotationMethod == null) {
thawRotationMethod = manager.getClass().getMethod("thawRotation");
}
return thawRotationMethod;
}
public int getRotation() {
try {
Method method = getGetRotationMethod();
return (int) method.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return 0;
}
}
public void freezeRotation(int rotation) {
try {
Method method = getFreezeRotationMethod();
method.invoke(manager, rotation);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
}
}
public boolean isRotationFrozen() {
try {
Method method = getIsRotationFrozenMethod();
return (boolean) method.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
return false;
}
}
public void thawRotation() {
try {
Method method = getThawRotationMethod();
method.invoke(manager);
} catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
Ln.e("Could not invoke method", e);
}
}
@ -29,11 +97,12 @@ public final class WindowManager {
try {
Class<?> cls = manager.getClass();
try {
cls.getMethod("watchRotation", IRotationWatcher.class).invoke(manager, rotationWatcher);
} catch (NoSuchMethodException e) {
// display parameter added since this commit:
// https://android.googlesource.com/platform/frameworks/base/+/35fa3c26adcb5f6577849fd0df5228b1f67cf2c6%5E%21/#F1
cls.getMethod("watchRotation", IRotationWatcher.class, int.class).invoke(manager, rotationWatcher, 0);
} catch (NoSuchMethodException e) {
// old version
cls.getMethod("watchRotation", IRotationWatcher.class).invoke(manager, rotationWatcher);
}
} catch (Exception e) {
throw new AssertionError(e);

BIN
third_party/scrcpy-server vendored Normal file

Binary file not shown.