chore: remove QtScrcpyCore

This commit is contained in:
Barry 2022-06-08 21:00:14 +08:00
parent ee8be50e33
commit 2a7919850c
162 changed files with 0 additions and 36547 deletions

View file

@ -1,220 +0,0 @@
set(QSC_PROJECT_NAME "QtScrcpyCore")
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOUIC ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTORCC ON)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
find_package(QT NAMES Qt6 Qt5 COMPONENTS Widgets Network REQUIRED)
find_package(Qt${QT_VERSION_MAJOR} COMPONENTS Widgets Network REQUIRED)
# check arch
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
set(QSC_CPU_ARCH x64)
else()
set(QSC_CPU_ARCH x86)
endif()
message(STATUS "[${PROJECT_NAME}] QSC_CPU_ARCH:${QSC_CPU_ARCH}")
if(NOT DEFINED QSC_DEPLOY_PATH)
message(FATAL_ERROR "-- [QtScrcpyCore] need QSC_DEPLOY_PATH")
endif()
message("-- [QtScrcpyCore] QSC_DEPLOY_PATH: ${QSC_DEPLOY_PATH}")
#
# Sources
#
# adb
set(QSC_ADB_SOURCES
src/adb/adbprocessimpl.h
src/adb/adbprocessimpl.cpp
src/adb/adbprocess.cpp
)
source_group(src/adb FILES ${QSC_ADB_SOURCES})
# common
set(QSC_COMMON_SOURCES
src/common/qscrcpyevent.h
)
source_group(src/common FILES ${QSC_COMMON_SOURCES})
# include
set(QSC_INCLUDE_SOURCES
include/QtScrcpyCore.h
include/QtScrcpyCoreDef.h
include/adbprocess.h
)
source_group(include FILES ${QSC_INCLUDE_SOURCES})
# device
set(QSC_DEVICE_SOURCES
src/device/device.h
src/device/device.cpp
src/device/compat.h
src/device/android/input.h
src/device/android/keycodes.h
src/device/controller/controller.h
src/device/controller/controller.cpp
src/device/controller/bufferutil.h
src/device/controller/bufferutil.cpp
src/device/controller/inputconvert/inputconvertbase.h
src/device/controller/inputconvert/inputconvertbase.cpp
src/device/controller/inputconvert/inputconvertnormal.h
src/device/controller/inputconvert/inputconvertnormal.cpp
src/device/controller/inputconvert/inputconvertgame.h
src/device/controller/inputconvert/inputconvertgame.cpp
src/device/controller/inputconvert/controlmsg.h
src/device/controller/inputconvert/controlmsg.cpp
src/device/controller/inputconvert/keymap/keymap.h
src/device/controller/inputconvert/keymap/keymap.cpp
src/device/controller/receiver/devicemsg.h
src/device/controller/receiver/devicemsg.cpp
src/device/controller/receiver/receiver.h
src/device/controller/receiver/receiver.cpp
src/device/decoder/avframeconvert.h
src/device/decoder/avframeconvert.cpp
src/device/decoder/decoder.h
src/device/decoder/decoder.cpp
src/device/decoder/fpscounter.h
src/device/decoder/fpscounter.cpp
src/device/decoder/videobuffer.h
src/device/decoder/videobuffer.cpp
src/device/filehandler/filehandler.h
src/device/filehandler/filehandler.cpp
src/device/recorder/recorder.h
src/device/recorder/recorder.cpp
src/device/server/server.h
src/device/server/server.cpp
src/device/server/tcpserver.h
src/device/server/tcpserver.cpp
src/device/server/videosocket.h
src/device/server/videosocket.cpp
src/device/stream/stream.h
src/device/stream/stream.cpp
)
source_group(src/device FILES ${QSC_DEVICE_SOURCES})
# devicemanage
set(QSC_DEVICEMANAGE_SOURCES
src/devicemanage/devicemanage.h
src/devicemanage/devicemanage.cpp
)
source_group(src/devicemanage FILES ${QSC_DEVICEMANAGE_SOURCES})
add_library(QtScrcpyCore STATIC
${QSC_ADB_SOURCES}
${QSC_COMMON_SOURCES}
${QSC_INCLUDE_SOURCES}
${QSC_DEVICE_SOURCES}
${QSC_DEVICEMANAGE_SOURCES}
)
target_link_libraries(QtScrcpyCore PRIVATE
Qt${QT_VERSION_MAJOR}::Widgets
Qt${QT_VERSION_MAJOR}::Network
)
set_target_properties(${QSC_PROJECT_NAME} PROPERTIES
ARCHIVE_OUTPUT_DIRECTORY "${QSC_DEPLOY_PATH}/$<0:>"
)
target_include_directories(${QSC_PROJECT_NAME} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/adb)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/common)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/filehandler)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/android)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/decoder)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/controller)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/controller/receiver)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/controller/inputconvert)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/controller/inputconvert/keymap)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/server)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/stream)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/ui)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/device/recorder)
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/devicemanage)
#
# plantform deps
#
# windows
if(CMAKE_SYSTEM_NAME STREQUAL "Windows")
# ffmpeg
# include
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/include)
# link
set(FFMPEG_LIB_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib/${QSC_CPU_ARCH}")
target_link_directories(${QSC_PROJECT_NAME} PUBLIC ${FFMPEG_LIB_PATH})
target_link_libraries(${QSC_PROJECT_NAME} PRIVATE
avformat
avcodec
avutil
swscale
)
# copy
set(THIRD_PARTY_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party")
set(FFMPEG_BIN_PATH "${THIRD_PARTY_PATH}/ffmpeg/bin/${QSC_CPU_ARCH}")
add_custom_command(TARGET ${QSC_PROJECT_NAME} POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${FFMPEG_BIN_PATH}/avcodec-58.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${FFMPEG_BIN_PATH}/avformat-58.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${FFMPEG_BIN_PATH}/avutil-56.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${FFMPEG_BIN_PATH}/swscale-5.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${FFMPEG_BIN_PATH}/swresample-3.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${THIRD_PARTY_PATH}/adb/win/adb.exe" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${THIRD_PARTY_PATH}/adb/win/AdbWinApi.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${THIRD_PARTY_PATH}/adb/win/AdbWinUsbApi.dll" "${QSC_DEPLOY_PATH}"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${THIRD_PARTY_PATH}/scrcpy-server" "${QSC_DEPLOY_PATH}"
)
endif()
# MacOS
if(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
# ffmpeg
# include
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/include)
# link
set(FFMPEG_LIB_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib")
target_link_directories(${QSC_PROJECT_NAME} PUBLIC ${FFMPEG_LIB_PATH})
target_link_libraries(${QSC_PROJECT_NAME} PRIVATE
avformat.58
avcodec.58
avutil.56
swscale.5
)
# copy bundle file
add_custom_command(TARGET ${QSC_PROJECT_NAME} POST_BUILD
# dylib,scrcpy-server,adb copy to Contents/MacOS
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib/libavcodec.58.dylib" "${QSC_DEPLOY_PATH}/MacOS"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib/libavformat.58.dylib" "${QSC_DEPLOY_PATH}/MacOS"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib/libavutil.56.dylib" "${QSC_DEPLOY_PATH}/MacOS"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib/libswscale.5.dylib" "${QSC_DEPLOY_PATH}/MacOS"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib/libswresample.3.dylib" "${QSC_DEPLOY_PATH}/MacOS"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/scrcpy-server" "${QSC_DEPLOY_PATH}/MacOS"
COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/adb/mac/adb" "${QSC_DEPLOY_PATH}/MacOS"
)
endif()
# Linux
if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
# include
target_include_directories(${QSC_PROJECT_NAME} PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/include)
# link
set(FFMPEG_LIB_PATH "${CMAKE_CURRENT_SOURCE_DIR}/src/third_party/ffmpeg/lib")
target_link_directories(${QSC_PROJECT_NAME} PUBLIC ${FFMPEG_LIB_PATH})
target_link_libraries(${QSC_PROJECT_NAME} PRIVATE
# ffmpeg
avformat
avcodec
avutil
swscale
)
endif()

View file

@ -1,140 +0,0 @@
#pragma once
#include <QMouseEvent>
#include "QtScrcpyCoreDef.h"
namespace qsc {
class DeviceObserver {
protected:
DeviceObserver() {
}
virtual ~DeviceObserver() {
}
public:
virtual void onFrame(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV) {
Q_UNUSED(width);
Q_UNUSED(height);
Q_UNUSED(dataY);
Q_UNUSED(dataU);
Q_UNUSED(dataV);
Q_UNUSED(linesizeY);
Q_UNUSED(linesizeU);
Q_UNUSED(linesizeV);
}
virtual void updateFPS(quint32 fps) { Q_UNUSED(fps); }
virtual void grabCursor(bool grab) {Q_UNUSED(grab);}
virtual void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize) {
Q_UNUSED(from);
Q_UNUSED(frameSize);
Q_UNUSED(showSize);
}
virtual void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize) {
Q_UNUSED(from);
Q_UNUSED(frameSize);
Q_UNUSED(showSize);
}
virtual void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize) {
Q_UNUSED(from);
Q_UNUSED(frameSize);
Q_UNUSED(showSize);
}
virtual void postGoBack() {}
virtual void postGoHome() {}
virtual void postGoMenu() {}
virtual void postAppSwitch() {}
virtual void postPower() {}
virtual void postVolumeUp() {}
virtual void postVolumeDown() {}
virtual void postCopy() {}
virtual void postCut() {}
virtual void setScreenPowerMode(bool open) { Q_UNUSED(open); }
virtual void expandNotificationPanel() {}
virtual void collapsePanel() {}
virtual void postBackOrScreenOn(bool down) { Q_UNUSED(down); }
virtual void postTextInput(QString &text) { Q_UNUSED(text); }
virtual void requestDeviceClipboard() {}
virtual void setDeviceClipboard(bool pause = true) { Q_UNUSED(pause); }
virtual void clipboardPaste() {}
virtual void pushFileRequest(const QString &file, const QString &devicePath) {
Q_UNUSED(file);
Q_UNUSED(devicePath);
}
virtual void installApkRequest(const QString &apkFile) { Q_UNUSED(apkFile); }
virtual void screenshot() {}
virtual void showTouch(bool show) { Q_UNUSED(show); }
};
class IDevice : public QObject {
Q_OBJECT
public:
IDevice(QObject *parent = nullptr) : QObject(parent) {}
virtual ~IDevice(){}
signals:
void deviceConnected(bool success, const QString& serial, const QString& deviceName, const QSize& size);
void deviceDisconnected(QString serial);
public:
virtual void setUserData(void* data) = 0;
virtual void* getUserData() = 0;
virtual void registerDeviceObserver(DeviceObserver* observer) = 0;
virtual void deRegisterDeviceObserver(DeviceObserver* observer) = 0;
virtual bool connectDevice() = 0;
virtual void disconnectDevice() = 0;
virtual void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize) = 0;
virtual void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize) = 0;
virtual void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize) = 0;
virtual void postGoBack() = 0;
virtual void postGoHome() = 0;
virtual void postGoMenu() = 0;
virtual void postAppSwitch() = 0;
virtual void postPower() = 0;
virtual void postVolumeUp() = 0;
virtual void postVolumeDown() = 0;
virtual void postCopy() = 0;
virtual void postCut() = 0;
virtual void setScreenPowerMode(bool open) = 0;
virtual void expandNotificationPanel() = 0;
virtual void collapsePanel() = 0;
virtual void postBackOrScreenOn(bool down) = 0;
virtual void postTextInput(QString &text) = 0;
virtual void requestDeviceClipboard() = 0;
virtual void setDeviceClipboard(bool pause = true) = 0;
virtual void clipboardPaste() = 0;
virtual void pushFileRequest(const QString &file, const QString &devicePath = "") = 0;
virtual void installApkRequest(const QString &apkFile) = 0;
virtual void screenshot() = 0;
virtual void showTouch(bool show) = 0;
virtual bool isReversePort(quint16 port) = 0;
virtual const QString &getSerial() = 0;
virtual void updateScript(QString script) = 0;
virtual bool isCurrentCustomKeymap() = 0;
};
class IDeviceManage : public QObject {
Q_OBJECT
public:
static IDeviceManage& getInstance();
virtual bool connectDevice(DeviceParams params) = 0;
virtual bool disconnectDevice(const QString &serial) = 0;
virtual void disconnectAllDevice() = 0;
virtual QPointer<IDevice> getDevice(const QString& serial) = 0;
signals:
void deviceConnected(bool success, const QString& serial, const QString& deviceName, const QSize& size);
void deviceDisconnected(QString serial);
};
}

View file

@ -1,42 +0,0 @@
#pragma once
#include <QString>
namespace qsc {
struct DeviceParams {
// necessary
QString serial = ""; // 设备序列号
QString serverLocalPath = ""; // 本地安卓server路径
// optional
QString serverRemotePath = "/data/local/tmp/scrcpy-server.jar"; // 要推送到远端设备的server路径
quint16 localPort = 27183; // reverse时本地监听端口
quint16 maxSize = 720; // 视频分辨率
quint32 bitRate = 2000000; // 视频比特率
quint32 maxFps = 60; // 视频最大帧率
bool useReverse = true; // true:先使用adb reverse失败后自动使用adb forwardfalse:直接使用adb forward
int lockVideoOrientation = -1; // 是否锁定视频方向
bool stayAwake = false; // 是否保持唤醒
QString serverVersion = "1.21";// server版本
QString logLevel = "info"; // log级别 debug/info/warn/error
// 编码选项 ""表示默认
// 例如 CodecOptions="profile=1,level=2"
// 更多编码选项参考 https://d.android.com/reference/android/media/MediaFormat
QString codecOptions = "";
// 指定编码器名称(必须是H.264编码器)""表示默认
// 例如 CodecName="OMX.qcom.video.encoder.avc"
QString codecName = "";
QString recordPath = ""; // 视频保存路径
QString recordFileFormat = "mp4"; // 视频保存格式 mp4/mkv
bool recordFile = false; // 录制到文件
QString pushFilePath = "/sdcard/"; // 推送到安卓设备的文件保存路径(必须以/结尾)
bool closeScreen = false; // 启动时自动息屏
bool display = true; // 是否显示画面(或者仅仅后台录制)
bool renderExpiredFrames = false; // 是否渲染延迟视频帧
QString gameScript = ""; // 游戏映射脚本
};
}

View file

@ -1,54 +0,0 @@
#ifndef ADBPROCESS_H
#define ADBPROCESS_H
#include <QObject>
class AdbProcessImpl;
namespace qsc {
class AdbProcess : public QObject
{
Q_OBJECT
public:
enum ADB_EXEC_RESULT
{
AER_SUCCESS_START, // 启动成功
AER_ERROR_START, // 启动失败
AER_SUCCESS_EXEC, // 执行成功
AER_ERROR_EXEC, // 执行失败
AER_ERROR_MISSING_BINARY, // 找不到文件
};
explicit AdbProcess(QObject *parent = nullptr);
virtual ~AdbProcess();
static void setAdbPath(const QString& adbPath);
void execute(const QString &serial, const QStringList &args);
void forward(const QString &serial, quint16 localPort, const QString &deviceSocketName);
void forwardRemove(const QString &serial, quint16 localPort);
void reverse(const QString &serial, const QString &deviceSocketName, quint16 localPort);
void reverseRemove(const QString &serial, const QString &deviceSocketName);
void push(const QString &serial, const QString &local, const QString &remote);
void install(const QString &serial, const QString &local);
void removePath(const QString &serial, const QString &path);
bool isRuning();
void setShowTouchesEnabled(const QString &serial, bool enabled);
void kill();
QStringList arguments();
QStringList getDevicesSerialFromStdOut();
QString getDeviceIPFromStdOut();
QString getDeviceIPByIpFromStdOut();
QString getStdOut();
QString getErrorOut();
signals:
void adbProcessResult(ADB_EXEC_RESULT processResult);
private:
AdbProcessImpl* m_adbImpl = nullptr;
};
}
#endif // ADBPROCESS_H

View file

@ -1,116 +0,0 @@
#include <QCoreApplication>
#include <QDebug>
#include <QDir>
#include <QFileInfo>
#include <QProcess>
#include "adbprocess.h"
#include "adbprocessimpl.h"
QString g_adbPath;
namespace qsc {
AdbProcess::AdbProcess(QObject *parent)
: QObject(parent)
, m_adbImpl(new AdbProcessImpl())
{
connect(m_adbImpl, &AdbProcessImpl::adbProcessImplResult, this, &qsc::AdbProcess::adbProcessResult);
}
AdbProcess::~AdbProcess()
{
delete m_adbImpl;
}
void AdbProcess::setAdbPath(const QString &adbPath)
{
g_adbPath = adbPath;
}
void AdbProcess::execute(const QString &serial, const QStringList &args)
{
m_adbImpl->execute(serial, args);
}
bool AdbProcess::isRuning()
{
return m_adbImpl->isRuning();
}
void AdbProcess::setShowTouchesEnabled(const QString &serial, bool enabled)
{
m_adbImpl->setShowTouchesEnabled(serial, enabled);
}
void AdbProcess::kill()
{
m_adbImpl->kill();
}
QStringList AdbProcess::arguments()
{
return m_adbImpl->arguments();
}
QStringList AdbProcess::getDevicesSerialFromStdOut()
{
return m_adbImpl->getDevicesSerialFromStdOut();
}
QString AdbProcess::getDeviceIPFromStdOut()
{
return m_adbImpl->getDeviceIPFromStdOut();
}
QString AdbProcess::getDeviceIPByIpFromStdOut()
{
return m_adbImpl->getDeviceIPByIpFromStdOut();
}
QString AdbProcess::getStdOut()
{
return m_adbImpl->getStdOut();
}
QString AdbProcess::getErrorOut()
{
return m_adbImpl->getErrorOut();
}
void AdbProcess::forward(const QString &serial, quint16 localPort, const QString &deviceSocketName)
{
m_adbImpl->forward(serial, localPort, deviceSocketName);
}
void AdbProcess::forwardRemove(const QString &serial, quint16 localPort)
{
m_adbImpl->forwardRemove(serial, localPort);
}
void AdbProcess::reverse(const QString &serial, const QString &deviceSocketName, quint16 localPort)
{
m_adbImpl->reverse(serial, deviceSocketName, localPort);
}
void AdbProcess::reverseRemove(const QString &serial, const QString &deviceSocketName)
{
m_adbImpl->reverseRemove(serial, deviceSocketName);
}
void AdbProcess::push(const QString &serial, const QString &local, const QString &remote)
{
m_adbImpl->push(serial, local, remote);
}
void AdbProcess::install(const QString &serial, const QString &local)
{
m_adbImpl->install(serial, local);
}
void AdbProcess::removePath(const QString &serial, const QString &path)
{
m_adbImpl->removePath(serial, path);
}
}

View file

@ -1,244 +0,0 @@
#include <QCoreApplication>
#include <QDebug>
#include <QDir>
#include <QFileInfo>
#include <QProcess>
#include "adbprocessimpl.h"
QString AdbProcessImpl::s_adbPath = "";
extern QString g_adbPath;
AdbProcessImpl::AdbProcessImpl(QObject *parent) : QProcess(parent)
{
initSignals();
}
AdbProcessImpl::~AdbProcessImpl()
{
if (isRuning()) {
close();
}
}
const QString &AdbProcessImpl::getAdbPath()
{
if (s_adbPath.isEmpty()) {
s_adbPath = QString::fromLocal8Bit(qgetenv("QTSCRCPY_ADB_PATH"));
QFileInfo fileInfo(s_adbPath);
if (s_adbPath.isEmpty() || !fileInfo.isFile()) {
s_adbPath = g_adbPath;
}
fileInfo = s_adbPath;
if (s_adbPath.isEmpty() || !fileInfo.isFile()) {
s_adbPath = QCoreApplication::applicationDirPath() + "/adb";
}
qInfo("adb path: %s", QDir(s_adbPath).absolutePath().toUtf8().data());
}
return s_adbPath;
}
void AdbProcessImpl::initSignals()
{
// aboutToQuit not exit event loop, so deletelater is ok
//connect(QCoreApplication::instance(), &QCoreApplication::aboutToQuit, this, &AdbProcessImpl::deleteLater);
connect(this, static_cast<void (QProcess::*)(int, QProcess::ExitStatus)>(&QProcess::finished), this, [this](int exitCode, QProcess::ExitStatus exitStatus) {
if (NormalExit == exitStatus && 0 == exitCode) {
emit adbProcessImplResult(qsc::AdbProcess::AER_SUCCESS_EXEC);
} else {
//P7C0218510000537 unauthorized ,手机端此时弹出调试认证,要允许调试
emit adbProcessImplResult(qsc::AdbProcess::AER_ERROR_EXEC);
}
qDebug() << "adb return " << exitCode << "exit status " << exitStatus;
});
connect(this, &QProcess::errorOccurred, this, [this](QProcess::ProcessError error) {
if (QProcess::FailedToStart == error) {
emit adbProcessImplResult(qsc::AdbProcess::AER_ERROR_MISSING_BINARY);
} else {
emit adbProcessImplResult(qsc::AdbProcess::AER_ERROR_START);
QString err = QString("qprocess start error:%1 %2").arg(program()).arg(arguments().join(" "));
qCritical() << err.toStdString().c_str();
}
});
connect(this, &QProcess::readyReadStandardError, this, [this]() {
QString tmp = QString::fromUtf8(readAllStandardError()).trimmed();
m_errorOutput += tmp;
qWarning() << QString("AdbProcessImpl::error:%1").arg(tmp).toStdString().data();
});
connect(this, &QProcess::readyReadStandardOutput, this, [this]() {
QString tmp = QString::fromUtf8(readAllStandardOutput()).trimmed();
m_standardOutput += tmp;
qInfo() << QString("AdbProcessImpl::out:%1").arg(tmp).toStdString().data();
});
connect(this, &QProcess::started, this, [this]() { emit adbProcessImplResult(qsc::AdbProcess::AER_SUCCESS_START); });
}
void AdbProcessImpl::execute(const QString &serial, const QStringList &args)
{
m_standardOutput = "";
m_errorOutput = "";
QStringList adbArgs;
if (!serial.isEmpty()) {
adbArgs << "-s" << serial;
}
adbArgs << args;
qDebug() << getAdbPath() << adbArgs.join(" ");
start(getAdbPath(), adbArgs);
}
bool AdbProcessImpl::isRuning()
{
if (QProcess::NotRunning == state()) {
return false;
} else {
return true;
}
}
void AdbProcessImpl::setShowTouchesEnabled(const QString &serial, bool enabled)
{
QStringList adbArgs;
adbArgs << "shell"
<< "settings"
<< "put"
<< "system"
<< "show_touches";
adbArgs << (enabled ? "1" : "0");
execute(serial, adbArgs);
}
QStringList AdbProcessImpl::getDevicesSerialFromStdOut()
{
// get devices serial by adb devices
QStringList serials;
#if QT_VERSION >= QT_VERSION_CHECK(5, 15, 0)
QStringList devicesInfoList = m_standardOutput.split(QRegExp("\r\n|\n"), Qt::SkipEmptyParts);
#else
QStringList devicesInfoList = m_standardOutput.split(QRegExp("\r\n|\n"), QString::SkipEmptyParts);
#endif
for (QString deviceInfo : devicesInfoList) {
#if QT_VERSION >= QT_VERSION_CHECK(5, 15, 0)
QStringList deviceInfos = deviceInfo.split(QRegExp("\t"), Qt::SkipEmptyParts);
#else
QStringList deviceInfos = deviceInfo.split(QRegExp("\t"), QString::SkipEmptyParts);
#endif
if (2 == deviceInfos.count() && 0 == deviceInfos[1].compare("device")) {
serials << deviceInfos[0];
}
}
return serials;
}
QString AdbProcessImpl::getDeviceIPFromStdOut()
{
QString ip = "";
#if 0
QString strIPExp = "inet [\\d.]*";
QRegExp ipRegExp(strIPExp, Qt::CaseInsensitive);
if (ipRegExp.indexIn(m_standardOutput) != -1) {
ip = ipRegExp.cap(0);
ip = ip.right(ip.size() - 5);
}
#else
QString strIPExp = "inet addr:[\\d.]*";
QRegExp ipRegExp(strIPExp, Qt::CaseInsensitive);
if (ipRegExp.indexIn(m_standardOutput) != -1) {
ip = ipRegExp.cap(0);
ip = ip.right(ip.size() - 10);
}
#endif
return ip;
}
QString AdbProcessImpl::getDeviceIPByIpFromStdOut()
{
QString ip = "";
QString strIPExp = "wlan0 inet [\\d.]*";
QRegExp ipRegExp(strIPExp, Qt::CaseInsensitive);
if (ipRegExp.indexIn(m_standardOutput) != -1) {
ip = ipRegExp.cap(0);
ip = ip.right(ip.size() - 14);
}
qDebug() << "get ip: " << ip;
return ip;
}
QString AdbProcessImpl::getStdOut()
{
return m_standardOutput;
}
QString AdbProcessImpl::getErrorOut()
{
return m_errorOutput;
}
void AdbProcessImpl::forward(const QString &serial, quint16 localPort, const QString &deviceSocketName)
{
QStringList adbArgs;
adbArgs << "forward";
adbArgs << QString("tcp:%1").arg(localPort);
adbArgs << QString("localabstract:%1").arg(deviceSocketName);
execute(serial, adbArgs);
}
void AdbProcessImpl::forwardRemove(const QString &serial, quint16 localPort)
{
QStringList adbArgs;
adbArgs << "forward";
adbArgs << "--remove";
adbArgs << QString("tcp:%1").arg(localPort);
execute(serial, adbArgs);
}
void AdbProcessImpl::reverse(const QString &serial, const QString &deviceSocketName, quint16 localPort)
{
QStringList adbArgs;
adbArgs << "reverse";
adbArgs << QString("localabstract:%1").arg(deviceSocketName);
adbArgs << QString("tcp:%1").arg(localPort);
execute(serial, adbArgs);
}
void AdbProcessImpl::reverseRemove(const QString &serial, const QString &deviceSocketName)
{
QStringList adbArgs;
adbArgs << "reverse";
adbArgs << "--remove";
adbArgs << QString("localabstract:%1").arg(deviceSocketName);
execute(serial, adbArgs);
}
void AdbProcessImpl::push(const QString &serial, const QString &local, const QString &remote)
{
QStringList adbArgs;
adbArgs << "push";
adbArgs << local;
adbArgs << remote;
execute(serial, adbArgs);
}
void AdbProcessImpl::install(const QString &serial, const QString &local)
{
QStringList adbArgs;
adbArgs << "install";
adbArgs << "-r";
adbArgs << local;
execute(serial, adbArgs);
}
void AdbProcessImpl::removePath(const QString &serial, const QString &path)
{
QStringList adbArgs;
adbArgs << "shell";
adbArgs << "rm";
adbArgs << path;
execute(serial, adbArgs);
}

View file

@ -1,42 +0,0 @@
#pragma once
#include <QProcess>
#include "adbprocess.h"
class AdbProcessImpl : public QProcess
{
Q_OBJECT
public:
explicit AdbProcessImpl(QObject *parent = nullptr);
virtual ~AdbProcessImpl();
void execute(const QString &serial, const QStringList &args);
void forward(const QString &serial, quint16 localPort, const QString &deviceSocketName);
void forwardRemove(const QString &serial, quint16 localPort);
void reverse(const QString &serial, const QString &deviceSocketName, quint16 localPort);
void reverseRemove(const QString &serial, const QString &deviceSocketName);
void push(const QString &serial, const QString &local, const QString &remote);
void install(const QString &serial, const QString &local);
void removePath(const QString &serial, const QString &path);
bool isRuning();
void setShowTouchesEnabled(const QString &serial, bool enabled);
QStringList getDevicesSerialFromStdOut();
QString getDeviceIPFromStdOut();
QString getDeviceIPByIpFromStdOut();
QString getStdOut();
QString getErrorOut();
static const QString &getAdbPath();
signals:
void adbProcessImplResult(qsc::AdbProcess::ADB_EXEC_RESULT processResult);
private:
void initSignals();
private:
QString m_standardOutput = "";
QString m_errorOutput = "";
static QString s_adbPath;
};

View file

@ -1,2 +0,0 @@
HEADERS += \
$$PWD/qscrcpyevent.h

View file

@ -1,22 +0,0 @@
#ifndef QSCRCPYEVENT_H
#define QSCRCPYEVENT_H
#include <QEvent>
class QScrcpyEvent : public QEvent
{
public:
enum Type
{
VideoSocket = QEvent::User + 1,
Control,
};
QScrcpyEvent(Type type) : QEvent(QEvent::Type(type)) {}
};
// VideoSocketEvent
class VideoSocketEvent : public QScrcpyEvent
{
public:
VideoSocketEvent() : QScrcpyEvent(VideoSocket) {}
};
#endif // QSCRCPYEVENT_H

View file

@ -1,840 +0,0 @@
// copied from <https://android.googlesource.com/platform/frameworks/native/+/master/include/android/input.h>
// blob 08299899b6305a0fe74d7d2b8471b7cd0af49dc7
// (and modified)
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _ANDROID_INPUT_H
#define _ANDROID_INPUT_H
/**
* Meta key / modifer state.
*/
enum AndroidMetastate
{
/** No meta keys are pressed. */
AMETA_NONE = 0,
/** This mask is used to check whether one of the ALT meta keys is pressed. */
AMETA_ALT_ON = 0x02,
/** This mask is used to check whether the left ALT meta key is pressed. */
AMETA_ALT_LEFT_ON = 0x10,
/** This mask is used to check whether the right ALT meta key is pressed. */
AMETA_ALT_RIGHT_ON = 0x20,
/** This mask is used to check whether one of the SHIFT meta keys is pressed. */
AMETA_SHIFT_ON = 0x01,
/** This mask is used to check whether the left SHIFT meta key is pressed. */
AMETA_SHIFT_LEFT_ON = 0x40,
/** This mask is used to check whether the right SHIFT meta key is pressed. */
AMETA_SHIFT_RIGHT_ON = 0x80,
/** This mask is used to check whether the SYM meta key is pressed. */
AMETA_SYM_ON = 0x04,
/** This mask is used to check whether the FUNCTION meta key is pressed. */
AMETA_FUNCTION_ON = 0x08,
/** This mask is used to check whether one of the CTRL meta keys is pressed. */
AMETA_CTRL_ON = 0x1000,
/** This mask is used to check whether the left CTRL meta key is pressed. */
AMETA_CTRL_LEFT_ON = 0x2000,
/** This mask is used to check whether the right CTRL meta key is pressed. */
AMETA_CTRL_RIGHT_ON = 0x4000,
/** This mask is used to check whether one of the META meta keys is pressed. */
AMETA_META_ON = 0x10000,
/** This mask is used to check whether the left META meta key is pressed. */
AMETA_META_LEFT_ON = 0x20000,
/** This mask is used to check whether the right META meta key is pressed. */
AMETA_META_RIGHT_ON = 0x40000,
/** This mask is used to check whether the CAPS LOCK meta key is on. */
AMETA_CAPS_LOCK_ON = 0x100000,
/** This mask is used to check whether the NUM LOCK meta key is on. */
AMETA_NUM_LOCK_ON = 0x200000,
/** This mask is used to check whether the SCROLL LOCK meta key is on. */
AMETA_SCROLL_LOCK_ON = 0x400000,
};
/**
* Input event types.
*/
enum AndroidInputEventType
{
/** Indicates that the input event is a key event. */
AINPUT_EVENT_TYPE_KEY = 1,
/** Indicates that the input event is a motion event. */
AINPUT_EVENT_TYPE_MOTION = 2
};
/**
* Key event actions.
*/
enum AndroidKeyeventAction
{
/** The key has been pressed down. */
AKEY_EVENT_ACTION_DOWN = 0,
/** The key has been released. */
AKEY_EVENT_ACTION_UP = 1,
/**
* Multiple duplicate key events have occurred in a row, or a
* complex string is being delivered. The repeat_count property
* of the key event contains the number of times the given key
* code should be executed.
*/
AKEY_EVENT_ACTION_MULTIPLE = 2
};
/**
* Key event flags.
*/
enum AndroidKeyeventFlags
{
/** This mask is set if the device woke because of this key event. */
AKEY_EVENT_FLAG_WOKE_HERE = 0x1,
/** This mask is set if the key event was generated by a software keyboard. */
AKEY_EVENT_FLAG_SOFT_KEYBOARD = 0x2,
/** This mask is set if we don't want the key event to cause us to leave touch mode. */
AKEY_EVENT_FLAG_KEEP_TOUCH_MODE = 0x4,
/**
* This mask is set if an event was known to come from a trusted
* part of the system. That is, the event is known to come from
* the user, and could not have been spoofed by a third party
* component.
*/
AKEY_EVENT_FLAG_FROM_SYSTEM = 0x8,
/**
* This mask is used for compatibility, to identify enter keys that are
* coming from an IME whose enter key has been auto-labelled "next" or
* "done". This allows TextView to dispatch these as normal enter keys
* for old applications, but still do the appropriate action when
* receiving them.
*/
AKEY_EVENT_FLAG_EDITOR_ACTION = 0x10,
/**
* When associated with up key events, this indicates that the key press
* has been canceled. Typically this is used with virtual touch screen
* keys, where the user can slide from the virtual key area on to the
* display: in that case, the application will receive a canceled up
* event and should not perform the action normally associated with the
* key. Note that for this to work, the application can not perform an
* action for a key until it receives an up or the long press timeout has
* expired.
*/
AKEY_EVENT_FLAG_CANCELED = 0x20,
/**
* This key event was generated by a virtual (on-screen) hard key area.
* Typically this is an area of the touchscreen, outside of the regular
* display, dedicated to "hardware" buttons.
*/
AKEY_EVENT_FLAG_VIRTUAL_HARD_KEY = 0x40,
/**
* This flag is set for the first key repeat that occurs after the
* long press timeout.
*/
AKEY_EVENT_FLAG_LONG_PRESS = 0x80,
/**
* Set when a key event has AKEY_EVENT_FLAG_CANCELED set because a long
* press action was executed while it was down.
*/
AKEY_EVENT_FLAG_CANCELED_LONG_PRESS = 0x100,
/**
* Set for AKEY_EVENT_ACTION_UP when this event's key code is still being
* tracked from its initial down. That is, somebody requested that tracking
* started on the key down and a long press has not caused
* the tracking to be canceled.
*/
AKEY_EVENT_FLAG_TRACKING = 0x200,
/**
* Set when a key event has been synthesized to implement default behavior
* for an event that the application did not handle.
* Fallback key events are generated by unhandled trackball motions
* (to emulate a directional keypad) and by certain unhandled key presses
* that are declared in the key map (such as special function numeric keypad
* keys when numlock is off).
*/
AKEY_EVENT_FLAG_FALLBACK = 0x400,
};
/**
* Bit shift for the action bits holding the pointer index as
* defined by AMOTION_EVENT_ACTION_POINTER_INDEX_MASK.
*/
#define AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT 8
/** Motion event actions */
enum AndroidMotioneventAction
{
/** Bit mask of the parts of the action code that are the action itself. */
AMOTION_EVENT_ACTION_MASK = 0xff,
/**
* Bits in the action code that represent a pointer index, used with
* AMOTION_EVENT_ACTION_POINTER_DOWN and AMOTION_EVENT_ACTION_POINTER_UP. Shifting
* down by AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT provides the actual pointer
* index where the data for the pointer going up or down can be found.
*/
AMOTION_EVENT_ACTION_POINTER_INDEX_MASK = 0xff00,
/** A pressed gesture has started, the motion contains the initial starting location. */
AMOTION_EVENT_ACTION_DOWN = 0,
/**
* A pressed gesture has finished, the motion contains the final release location
* as well as any intermediate points since the last down or move event.
*/
AMOTION_EVENT_ACTION_UP = 1,
/**
* A change has happened during a press gesture (between AMOTION_EVENT_ACTION_DOWN and
* AMOTION_EVENT_ACTION_UP). The motion contains the most recent point, as well as
* any intermediate points since the last down or move event.
*/
AMOTION_EVENT_ACTION_MOVE = 2,
/**
* The current gesture has been aborted.
* You will not receive any more points in it. You should treat this as
* an up event, but not perform any action that you normally would.
*/
AMOTION_EVENT_ACTION_CANCEL = 3,
/**
* A movement has happened outside of the normal bounds of the UI element.
* This does not provide a full gesture, but only the initial location of the movement/touch.
*/
AMOTION_EVENT_ACTION_OUTSIDE = 4,
/**
* A non-primary pointer has gone down.
* The bits in AMOTION_EVENT_ACTION_POINTER_INDEX_MASK indicate which pointer changed.
*/
AMOTION_EVENT_ACTION_POINTER_DOWN = 5,
/**
* A non-primary pointer has gone up.
* The bits in AMOTION_EVENT_ACTION_POINTER_INDEX_MASK indicate which pointer changed.
*/
AMOTION_EVENT_ACTION_POINTER_UP = 6,
/**
* A change happened but the pointer is not down (unlike AMOTION_EVENT_ACTION_MOVE).
* The motion contains the most recent point, as well as any intermediate points since
* the last hover move event.
*/
AMOTION_EVENT_ACTION_HOVER_MOVE = 7,
/**
* The motion event contains relative vertical and/or horizontal scroll offsets.
* Use getAxisValue to retrieve the information from AMOTION_EVENT_AXIS_VSCROLL
* and AMOTION_EVENT_AXIS_HSCROLL.
* The pointer may or may not be down when this event is dispatched.
* This action is always delivered to the winder under the pointer, which
* may not be the window currently touched.
*/
AMOTION_EVENT_ACTION_SCROLL = 8,
/** The pointer is not down but has entered the boundaries of a window or view. */
AMOTION_EVENT_ACTION_HOVER_ENTER = 9,
/** The pointer is not down but has exited the boundaries of a window or view. */
AMOTION_EVENT_ACTION_HOVER_EXIT = 10,
/* One or more buttons have been pressed. */
AMOTION_EVENT_ACTION_BUTTON_PRESS = 11,
/* One or more buttons have been released. */
AMOTION_EVENT_ACTION_BUTTON_RELEASE = 12,
};
/**
* Motion event flags.
*/
enum AndroidMotioneventFlags
{
/**
* This flag indicates that the window that received this motion event is partly
* or wholly obscured by another visible window above it. This flag is set to true
* even if the event did not directly pass through the obscured area.
* A security sensitive application can check this flag to identify situations in which
* a malicious application may have covered up part of its content for the purpose
* of misleading the user or hijacking touches. An appropriate response might be
* to drop the suspect touches or to take additional precautions to confirm the user's
* actual intent.
*/
AMOTION_EVENT_FLAG_WINDOW_IS_OBSCURED = 0x1,
};
/**
* Motion event edge touch flags.
*/
enum AndroidMotioneventEdgeTouchTlags
{
/** No edges intersected. */
AMOTION_EVENT_EDGE_FLAG_NONE = 0,
/** Flag indicating the motion event intersected the top edge of the screen. */
AMOTION_EVENT_EDGE_FLAG_TOP = 0x01,
/** Flag indicating the motion event intersected the bottom edge of the screen. */
AMOTION_EVENT_EDGE_FLAG_BOTTOM = 0x02,
/** Flag indicating the motion event intersected the left edge of the screen. */
AMOTION_EVENT_EDGE_FLAG_LEFT = 0x04,
/** Flag indicating the motion event intersected the right edge of the screen. */
AMOTION_EVENT_EDGE_FLAG_RIGHT = 0x08
};
/**
* Constants that identify each individual axis of a motion event.
* @anchor AMOTION_EVENT_AXIS
*/
enum AndroidMotioneventAxis
{
/**
* Axis constant: X axis of a motion event.
*
* - For a touch screen, reports the absolute X screen position of the center of
* the touch contact area. The units are display pixels.
* - For a touch pad, reports the absolute X surface position of the center of the touch
* contact area. The units are device-dependent.
* - For a mouse, reports the absolute X screen position of the mouse pointer.
* The units are display pixels.
* - For a trackball, reports the relative horizontal displacement of the trackball.
* The value is normalized to a range from -1.0 (left) to 1.0 (right).
* - For a joystick, reports the absolute X position of the joystick.
* The value is normalized to a range from -1.0 (left) to 1.0 (right).
*/
AMOTION_EVENT_AXIS_X = 0,
/**
* Axis constant: Y axis of a motion event.
*
* - For a touch screen, reports the absolute Y screen position of the center of
* the touch contact area. The units are display pixels.
* - For a touch pad, reports the absolute Y surface position of the center of the touch
* contact area. The units are device-dependent.
* - For a mouse, reports the absolute Y screen position of the mouse pointer.
* The units are display pixels.
* - For a trackball, reports the relative vertical displacement of the trackball.
* The value is normalized to a range from -1.0 (up) to 1.0 (down).
* - For a joystick, reports the absolute Y position of the joystick.
* The value is normalized to a range from -1.0 (up or far) to 1.0 (down or near).
*/
AMOTION_EVENT_AXIS_Y = 1,
/**
* Axis constant: Pressure axis of a motion event.
*
* - For a touch screen or touch pad, reports the approximate pressure applied to the surface
* by a finger or other tool. The value is normalized to a range from
* 0 (no pressure at all) to 1 (normal pressure), although values higher than 1
* may be generated depending on the calibration of the input device.
* - For a trackball, the value is set to 1 if the trackball button is pressed
* or 0 otherwise.
* - For a mouse, the value is set to 1 if the primary mouse button is pressed
* or 0 otherwise.
*/
AMOTION_EVENT_AXIS_PRESSURE = 2,
/**
* Axis constant: Size axis of a motion event.
*
* - For a touch screen or touch pad, reports the approximate size of the contact area in
* relation to the maximum detectable size for the device. The value is normalized
* to a range from 0 (smallest detectable size) to 1 (largest detectable size),
* although it is not a linear scale. This value is of limited use.
* To obtain calibrated size information, see
* {@link AMOTION_EVENT_AXIS_TOUCH_MAJOR} or {@link AMOTION_EVENT_AXIS_TOOL_MAJOR}.
*/
AMOTION_EVENT_AXIS_SIZE = 3,
/**
* Axis constant: TouchMajor axis of a motion event.
*
* - For a touch screen, reports the length of the major axis of an ellipse that
* represents the touch area at the point of contact.
* The units are display pixels.
* - For a touch pad, reports the length of the major axis of an ellipse that
* represents the touch area at the point of contact.
* The units are device-dependent.
*/
AMOTION_EVENT_AXIS_TOUCH_MAJOR = 4,
/**
* Axis constant: TouchMinor axis of a motion event.
*
* - For a touch screen, reports the length of the minor axis of an ellipse that
* represents the touch area at the point of contact.
* The units are display pixels.
* - For a touch pad, reports the length of the minor axis of an ellipse that
* represents the touch area at the point of contact.
* The units are device-dependent.
*
* When the touch is circular, the major and minor axis lengths will be equal to one another.
*/
AMOTION_EVENT_AXIS_TOUCH_MINOR = 5,
/**
* Axis constant: ToolMajor axis of a motion event.
*
* - For a touch screen, reports the length of the major axis of an ellipse that
* represents the size of the approaching finger or tool used to make contact.
* - For a touch pad, reports the length of the major axis of an ellipse that
* represents the size of the approaching finger or tool used to make contact.
* The units are device-dependent.
*
* When the touch is circular, the major and minor axis lengths will be equal to one another.
*
* The tool size may be larger than the touch size since the tool may not be fully
* in contact with the touch sensor.
*/
AMOTION_EVENT_AXIS_TOOL_MAJOR = 6,
/**
* Axis constant: ToolMinor axis of a motion event.
*
* - For a touch screen, reports the length of the minor axis of an ellipse that
* represents the size of the approaching finger or tool used to make contact.
* - For a touch pad, reports the length of the minor axis of an ellipse that
* represents the size of the approaching finger or tool used to make contact.
* The units are device-dependent.
*
* When the touch is circular, the major and minor axis lengths will be equal to one another.
*
* The tool size may be larger than the touch size since the tool may not be fully
* in contact with the touch sensor.
*/
AMOTION_EVENT_AXIS_TOOL_MINOR = 7,
/**
* Axis constant: Orientation axis of a motion event.
*
* - For a touch screen or touch pad, reports the orientation of the finger
* or tool in radians relative to the vertical plane of the device.
* An angle of 0 radians indicates that the major axis of contact is oriented
* upwards, is perfectly circular or is of unknown orientation. A positive angle
* indicates that the major axis of contact is oriented to the right. A negative angle
* indicates that the major axis of contact is oriented to the left.
* The full range is from -PI/2 radians (finger pointing fully left) to PI/2 radians
* (finger pointing fully right).
* - For a stylus, the orientation indicates the direction in which the stylus
* is pointing in relation to the vertical axis of the current orientation of the screen.
* The range is from -PI radians to PI radians, where 0 is pointing up,
* -PI/2 radians is pointing left, -PI or PI radians is pointing down, and PI/2 radians
* is pointing right. See also {@link AMOTION_EVENT_AXIS_TILT}.
*/
AMOTION_EVENT_AXIS_ORIENTATION = 8,
/**
* Axis constant: Vertical Scroll axis of a motion event.
*
* - For a mouse, reports the relative movement of the vertical scroll wheel.
* The value is normalized to a range from -1.0 (down) to 1.0 (up).
*
* This axis should be used to scroll views vertically.
*/
AMOTION_EVENT_AXIS_VSCROLL = 9,
/**
* Axis constant: Horizontal Scroll axis of a motion event.
*
* - For a mouse, reports the relative movement of the horizontal scroll wheel.
* The value is normalized to a range from -1.0 (left) to 1.0 (right).
*
* This axis should be used to scroll views horizontally.
*/
AMOTION_EVENT_AXIS_HSCROLL = 10,
/**
* Axis constant: Z axis of a motion event.
*
* - For a joystick, reports the absolute Z position of the joystick.
* The value is normalized to a range from -1.0 (high) to 1.0 (low).
* <em>On game pads with two analog joysticks, this axis is often reinterpreted
* to report the absolute X position of the second joystick instead.</em>
*/
AMOTION_EVENT_AXIS_Z = 11,
/**
* Axis constant: X Rotation axis of a motion event.
*
* - For a joystick, reports the absolute rotation angle about the X axis.
* The value is normalized to a range from -1.0 (counter-clockwise) to 1.0 (clockwise).
*/
AMOTION_EVENT_AXIS_RX = 12,
/**
* Axis constant: Y Rotation axis of a motion event.
*
* - For a joystick, reports the absolute rotation angle about the Y axis.
* The value is normalized to a range from -1.0 (counter-clockwise) to 1.0 (clockwise).
*/
AMOTION_EVENT_AXIS_RY = 13,
/**
* Axis constant: Z Rotation axis of a motion event.
*
* - For a joystick, reports the absolute rotation angle about the Z axis.
* The value is normalized to a range from -1.0 (counter-clockwise) to 1.0 (clockwise).
* On game pads with two analog joysticks, this axis is often reinterpreted
* to report the absolute Y position of the second joystick instead.
*/
AMOTION_EVENT_AXIS_RZ = 14,
/**
* Axis constant: Hat X axis of a motion event.
*
* - For a joystick, reports the absolute X position of the directional hat control.
* The value is normalized to a range from -1.0 (left) to 1.0 (right).
*/
AMOTION_EVENT_AXIS_HAT_X = 15,
/**
* Axis constant: Hat Y axis of a motion event.
*
* - For a joystick, reports the absolute Y position of the directional hat control.
* The value is normalized to a range from -1.0 (up) to 1.0 (down).
*/
AMOTION_EVENT_AXIS_HAT_Y = 16,
/**
* Axis constant: Left Trigger axis of a motion event.
*
* - For a joystick, reports the absolute position of the left trigger control.
* The value is normalized to a range from 0.0 (released) to 1.0 (fully pressed).
*/
AMOTION_EVENT_AXIS_LTRIGGER = 17,
/**
* Axis constant: Right Trigger axis of a motion event.
*
* - For a joystick, reports the absolute position of the right trigger control.
* The value is normalized to a range from 0.0 (released) to 1.0 (fully pressed).
*/
AMOTION_EVENT_AXIS_RTRIGGER = 18,
/**
* Axis constant: Throttle axis of a motion event.
*
* - For a joystick, reports the absolute position of the throttle control.
* The value is normalized to a range from 0.0 (fully open) to 1.0 (fully closed).
*/
AMOTION_EVENT_AXIS_THROTTLE = 19,
/**
* Axis constant: Rudder axis of a motion event.
*
* - For a joystick, reports the absolute position of the rudder control.
* The value is normalized to a range from -1.0 (turn left) to 1.0 (turn right).
*/
AMOTION_EVENT_AXIS_RUDDER = 20,
/**
* Axis constant: Wheel axis of a motion event.
*
* - For a joystick, reports the absolute position of the steering wheel control.
* The value is normalized to a range from -1.0 (turn left) to 1.0 (turn right).
*/
AMOTION_EVENT_AXIS_WHEEL = 21,
/**
* Axis constant: Gas axis of a motion event.
*
* - For a joystick, reports the absolute position of the gas (accelerator) control.
* The value is normalized to a range from 0.0 (no acceleration)
* to 1.0 (maximum acceleration).
*/
AMOTION_EVENT_AXIS_GAS = 22,
/**
* Axis constant: Brake axis of a motion event.
*
* - For a joystick, reports the absolute position of the brake control.
* The value is normalized to a range from 0.0 (no braking) to 1.0 (maximum braking).
*/
AMOTION_EVENT_AXIS_BRAKE = 23,
/**
* Axis constant: Distance axis of a motion event.
*
* - For a stylus, reports the distance of the stylus from the screen.
* A value of 0.0 indicates direct contact and larger values indicate increasing
* distance from the surface.
*/
AMOTION_EVENT_AXIS_DISTANCE = 24,
/**
* Axis constant: Tilt axis of a motion event.
*
* - For a stylus, reports the tilt angle of the stylus in radians where
* 0 radians indicates that the stylus is being held perpendicular to the
* surface, and PI/2 radians indicates that the stylus is being held flat
* against the surface.
*/
AMOTION_EVENT_AXIS_TILT = 25,
/**
* Axis constant: Generic scroll axis of a motion event.
*
* - This is used for scroll axis motion events that can't be classified as strictly
* vertical or horizontal. The movement of a rotating scroller is an example of this.
*/
AMOTION_EVENT_AXIS_SCROLL = 26,
/**
* Axis constant: The movement of x position of a motion event.
*
* - For a mouse, reports a difference of x position between the previous position.
* This is useful when pointer is captured, in that case the mouse pointer doesn't
* change the location but this axis reports the difference which allows the app
* to see how the mouse is moved.
*/
AMOTION_EVENT_AXIS_RELATIVE_X = 27,
/**
* Axis constant: The movement of y position of a motion event.
*
* Same as {@link RELATIVE_X}, but for y position.
*/
AMOTION_EVENT_AXIS_RELATIVE_Y = 28,
/**
* Axis constant: Generic 1 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_1 = 32,
/**
* Axis constant: Generic 2 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_2 = 33,
/**
* Axis constant: Generic 3 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_3 = 34,
/**
* Axis constant: Generic 4 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_4 = 35,
/**
* Axis constant: Generic 5 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_5 = 36,
/**
* Axis constant: Generic 6 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_6 = 37,
/**
* Axis constant: Generic 7 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_7 = 38,
/**
* Axis constant: Generic 8 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_8 = 39,
/**
* Axis constant: Generic 9 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_9 = 40,
/**
* Axis constant: Generic 10 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_10 = 41,
/**
* Axis constant: Generic 11 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_11 = 42,
/**
* Axis constant: Generic 12 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_12 = 43,
/**
* Axis constant: Generic 13 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_13 = 44,
/**
* Axis constant: Generic 14 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_14 = 45,
/**
* Axis constant: Generic 15 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_15 = 46,
/**
* Axis constant: Generic 16 axis of a motion event.
* The interpretation of a generic axis is device-specific.
*/
AMOTION_EVENT_AXIS_GENERIC_16 = 47,
// NOTE: If you add a new axis here you must also add it to several other files.
// Refer to frameworks/base/core/java/android/view/MotionEvent.java for the full list.
};
/**
* Constants that identify buttons that are associated with motion events.
* Refer to the documentation on the MotionEvent class for descriptions of each button.
*/
enum AndroidMotioneventButtons
{
/** primary */
AMOTION_EVENT_BUTTON_PRIMARY = 1 << 0,
/** secondary */
AMOTION_EVENT_BUTTON_SECONDARY = 1 << 1,
/** tertiary */
AMOTION_EVENT_BUTTON_TERTIARY = 1 << 2,
/** back */
AMOTION_EVENT_BUTTON_BACK = 1 << 3,
/** forward */
AMOTION_EVENT_BUTTON_FORWARD = 1 << 4,
AMOTION_EVENT_BUTTON_STYLUS_PRIMARY = 1 << 5,
AMOTION_EVENT_BUTTON_STYLUS_SECONDARY = 1 << 6,
};
/**
* Constants that identify tool types.
* Refer to the documentation on the MotionEvent class for descriptions of each tool type.
*/
enum AndroidMotioneventToolType
{
/** unknown */
AMOTION_EVENT_TOOL_TYPE_UNKNOWN = 0,
/** finger */
AMOTION_EVENT_TOOL_TYPE_FINGER = 1,
/** stylus */
AMOTION_EVENT_TOOL_TYPE_STYLUS = 2,
/** mouse */
AMOTION_EVENT_TOOL_TYPE_MOUSE = 3,
/** eraser */
AMOTION_EVENT_TOOL_TYPE_ERASER = 4,
};
/**
* Input source masks.
*
* Refer to the documentation on android.view.InputDevice for more details about input sources
* and their correct interpretation.
*/
enum AndroidInputSourceClass
{
/** mask */
AINPUT_SOURCE_CLASS_MASK = 0x000000ff,
/** none */
AINPUT_SOURCE_CLASS_NONE = 0x00000000,
/** button */
AINPUT_SOURCE_CLASS_BUTTON = 0x00000001,
/** pointer */
AINPUT_SOURCE_CLASS_POINTER = 0x00000002,
/** navigation */
AINPUT_SOURCE_CLASS_NAVIGATION = 0x00000004,
/** position */
AINPUT_SOURCE_CLASS_POSITION = 0x00000008,
/** joystick */
AINPUT_SOURCE_CLASS_JOYSTICK = 0x00000010,
};
/**
* Input sources.
*/
enum AndroidInputSource
{
/** unknown */
AINPUT_SOURCE_UNKNOWN = 0x00000000,
/** keyboard */
AINPUT_SOURCE_KEYBOARD = 0x00000100 | AINPUT_SOURCE_CLASS_BUTTON,
/** dpad */
AINPUT_SOURCE_DPAD = 0x00000200 | AINPUT_SOURCE_CLASS_BUTTON,
/** gamepad */
AINPUT_SOURCE_GAMEPAD = 0x00000400 | AINPUT_SOURCE_CLASS_BUTTON,
/** touchscreen */
AINPUT_SOURCE_TOUCHSCREEN = 0x00001000 | AINPUT_SOURCE_CLASS_POINTER,
/** mouse */
AINPUT_SOURCE_MOUSE = 0x00002000 | AINPUT_SOURCE_CLASS_POINTER,
/** stylus */
AINPUT_SOURCE_STYLUS = 0x00004000 | AINPUT_SOURCE_CLASS_POINTER,
/** bluetooth stylus */
AINPUT_SOURCE_BLUETOOTH_STYLUS = 0x00008000 | AINPUT_SOURCE_STYLUS,
/** trackball */
AINPUT_SOURCE_TRACKBALL = 0x00010000 | AINPUT_SOURCE_CLASS_NAVIGATION,
/** mouse relative */
AINPUT_SOURCE_MOUSE_RELATIVE = 0x00020000 | AINPUT_SOURCE_CLASS_NAVIGATION,
/** touchpad */
AINPUT_SOURCE_TOUCHPAD = 0x00100000 | AINPUT_SOURCE_CLASS_POSITION,
/** navigation */
AINPUT_SOURCE_TOUCH_NAVIGATION = 0x00200000 | AINPUT_SOURCE_CLASS_NONE,
/** joystick */
AINPUT_SOURCE_JOYSTICK = 0x01000000 | AINPUT_SOURCE_CLASS_JOYSTICK,
/** rotary encoder */
AINPUT_SOURCE_ROTARY_ENCODER = 0x00400000 | AINPUT_SOURCE_CLASS_NONE,
};
/**
* Keyboard types.
*
* Refer to the documentation on android.view.InputDevice for more details.
*/
enum AndroidKeyboardType
{
/** none */
AINPUT_KEYBOARD_TYPE_NONE = 0,
/** non alphabetic */
AINPUT_KEYBOARD_TYPE_NON_ALPHABETIC = 1,
/** alphabetic */
AINPUT_KEYBOARD_TYPE_ALPHABETIC = 2,
};
/**
* Constants used to retrieve information about the range of motion for a particular
* coordinate of a motion event.
*
* Refer to the documentation on android.view.InputDevice for more details about input sources
* and their correct interpretation.
*
* @deprecated These constants are deprecated. Use {@link AMOTION_EVENT_AXIS AMOTION_EVENT_AXIS_*} constants instead.
*/
enum AndroidMotionRange
{
/** x */
AINPUT_MOTION_RANGE_X = AMOTION_EVENT_AXIS_X,
/** y */
AINPUT_MOTION_RANGE_Y = AMOTION_EVENT_AXIS_Y,
/** pressure */
AINPUT_MOTION_RANGE_PRESSURE = AMOTION_EVENT_AXIS_PRESSURE,
/** size */
AINPUT_MOTION_RANGE_SIZE = AMOTION_EVENT_AXIS_SIZE,
/** touch major */
AINPUT_MOTION_RANGE_TOUCH_MAJOR = AMOTION_EVENT_AXIS_TOUCH_MAJOR,
/** touch minor */
AINPUT_MOTION_RANGE_TOUCH_MINOR = AMOTION_EVENT_AXIS_TOUCH_MINOR,
/** tool major */
AINPUT_MOTION_RANGE_TOOL_MAJOR = AMOTION_EVENT_AXIS_TOOL_MAJOR,
/** tool minor */
AINPUT_MOTION_RANGE_TOOL_MINOR = AMOTION_EVENT_AXIS_TOOL_MINOR,
/** orientation */
AINPUT_MOTION_RANGE_ORIENTATION = AMOTION_EVENT_AXIS_ORIENTATION,
};
#endif // _ANDROID_INPUT_H

View file

@ -1,746 +0,0 @@
// copied from <https://android.googlesource.com/platform/frameworks/native/+/master/include/android/keycodes.h>
// blob 2164d6163e1646c22825e364cad4f3c47638effd
// (and modified)
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _ANDROID_KEYCODES_H
#define _ANDROID_KEYCODES_H
/**
* Key codes.
*/
enum AndroidKeycode
{
/** Unknown key code. */
AKEYCODE_UNKNOWN = 0,
/** Soft Left key.
* Usually situated below the display on phones and used as a multi-function
* feature key for selecting a software defined function shown on the bottom left
* of the display. */
AKEYCODE_SOFT_LEFT = 1,
/** Soft Right key.
* Usually situated below the display on phones and used as a multi-function
* feature key for selecting a software defined function shown on the bottom right
* of the display. */
AKEYCODE_SOFT_RIGHT = 2,
/** Home key.
* This key is handled by the framework and is never delivered to applications. */
AKEYCODE_HOME = 3,
/** Back key. */
AKEYCODE_BACK = 4,
/** Call key. */
AKEYCODE_CALL = 5,
/** End Call key. */
AKEYCODE_ENDCALL = 6,
/** '0' key. */
AKEYCODE_0 = 7,
/** '1' key. */
AKEYCODE_1 = 8,
/** '2' key. */
AKEYCODE_2 = 9,
/** '3' key. */
AKEYCODE_3 = 10,
/** '4' key. */
AKEYCODE_4 = 11,
/** '5' key. */
AKEYCODE_5 = 12,
/** '6' key. */
AKEYCODE_6 = 13,
/** '7' key. */
AKEYCODE_7 = 14,
/** '8' key. */
AKEYCODE_8 = 15,
/** '9' key. */
AKEYCODE_9 = 16,
/** '*' key. */
AKEYCODE_STAR = 17,
/** '#' key. */
AKEYCODE_POUND = 18,
/** Directional Pad Up key.
* May also be synthesized from trackball motions. */
AKEYCODE_DPAD_UP = 19,
/** Directional Pad Down key.
* May also be synthesized from trackball motions. */
AKEYCODE_DPAD_DOWN = 20,
/** Directional Pad Left key.
* May also be synthesized from trackball motions. */
AKEYCODE_DPAD_LEFT = 21,
/** Directional Pad Right key.
* May also be synthesized from trackball motions. */
AKEYCODE_DPAD_RIGHT = 22,
/** Directional Pad Center key.
* May also be synthesized from trackball motions. */
AKEYCODE_DPAD_CENTER = 23,
/** Volume Up key.
* Adjusts the speaker volume up. */
AKEYCODE_VOLUME_UP = 24,
/** Volume Down key.
* Adjusts the speaker volume down. */
AKEYCODE_VOLUME_DOWN = 25,
/** Power key. */
AKEYCODE_POWER = 26,
/** Camera key.
* Used to launch a camera application or take pictures. */
AKEYCODE_CAMERA = 27,
/** Clear key. */
AKEYCODE_CLEAR = 28,
/** 'A' key. */
AKEYCODE_A = 29,
/** 'B' key. */
AKEYCODE_B = 30,
/** 'C' key. */
AKEYCODE_C = 31,
/** 'D' key. */
AKEYCODE_D = 32,
/** 'E' key. */
AKEYCODE_E = 33,
/** 'F' key. */
AKEYCODE_F = 34,
/** 'G' key. */
AKEYCODE_G = 35,
/** 'H' key. */
AKEYCODE_H = 36,
/** 'I' key. */
AKEYCODE_I = 37,
/** 'J' key. */
AKEYCODE_J = 38,
/** 'K' key. */
AKEYCODE_K = 39,
/** 'L' key. */
AKEYCODE_L = 40,
/** 'M' key. */
AKEYCODE_M = 41,
/** 'N' key. */
AKEYCODE_N = 42,
/** 'O' key. */
AKEYCODE_O = 43,
/** 'P' key. */
AKEYCODE_P = 44,
/** 'Q' key. */
AKEYCODE_Q = 45,
/** 'R' key. */
AKEYCODE_R = 46,
/** 'S' key. */
AKEYCODE_S = 47,
/** 'T' key. */
AKEYCODE_T = 48,
/** 'U' key. */
AKEYCODE_U = 49,
/** 'V' key. */
AKEYCODE_V = 50,
/** 'W' key. */
AKEYCODE_W = 51,
/** 'X' key. */
AKEYCODE_X = 52,
/** 'Y' key. */
AKEYCODE_Y = 53,
/** 'Z' key. */
AKEYCODE_Z = 54,
/** ',' key. */
AKEYCODE_COMMA = 55,
/** '.' key. */
AKEYCODE_PERIOD = 56,
/** Left Alt modifier key. */
AKEYCODE_ALT_LEFT = 57,
/** Right Alt modifier key. */
AKEYCODE_ALT_RIGHT = 58,
/** Left Shift modifier key. */
AKEYCODE_SHIFT_LEFT = 59,
/** Right Shift modifier key. */
AKEYCODE_SHIFT_RIGHT = 60,
/** Tab key. */
AKEYCODE_TAB = 61,
/** Space key. */
AKEYCODE_SPACE = 62,
/** Symbol modifier key.
* Used to enter alternate symbols. */
AKEYCODE_SYM = 63,
/** Explorer special function key.
* Used to launch a browser application. */
AKEYCODE_EXPLORER = 64,
/** Envelope special function key.
* Used to launch a mail application. */
AKEYCODE_ENVELOPE = 65,
/** Enter key. */
AKEYCODE_ENTER = 66,
/** Backspace key.
* Deletes characters before the insertion point, unlike {@link AKEYCODE_FORWARD_DEL}. */
AKEYCODE_DEL = 67,
/** '`' (backtick) key. */
AKEYCODE_GRAVE = 68,
/** '-'. */
AKEYCODE_MINUS = 69,
/** '=' key. */
AKEYCODE_EQUALS = 70,
/** '[' key. */
AKEYCODE_LEFT_BRACKET = 71,
/** ']' key. */
AKEYCODE_RIGHT_BRACKET = 72,
/** '\' key. */
AKEYCODE_BACKSLASH = 73,
/** ';' key. */
AKEYCODE_SEMICOLON = 74,
/** ''' (apostrophe) key. */
AKEYCODE_APOSTROPHE = 75,
/** '/' key. */
AKEYCODE_SLASH = 76,
/** '@' key. */
AKEYCODE_AT = 77,
/** Number modifier key.
* Used to enter numeric symbols.
* This key is not {@link AKEYCODE_NUM_LOCK}; it is more like {@link AKEYCODE_ALT_LEFT}. */
AKEYCODE_NUM = 78,
/** Headset Hook key.
* Used to hang up calls and stop media. */
AKEYCODE_HEADSETHOOK = 79,
/** Camera Focus key.
* Used to focus the camera. */
AKEYCODE_FOCUS = 80,
/** '+' key. */
AKEYCODE_PLUS = 81,
/** Menu key. */
AKEYCODE_MENU = 82,
/** Notification key. */
AKEYCODE_NOTIFICATION = 83,
/** Search key. */
AKEYCODE_SEARCH = 84,
/** Play/Pause media key. */
AKEYCODE_MEDIA_PLAY_PAUSE = 85,
/** Stop media key. */
AKEYCODE_MEDIA_STOP = 86,
/** Play Next media key. */
AKEYCODE_MEDIA_NEXT = 87,
/** Play Previous media key. */
AKEYCODE_MEDIA_PREVIOUS = 88,
/** Rewind media key. */
AKEYCODE_MEDIA_REWIND = 89,
/** Fast Forward media key. */
AKEYCODE_MEDIA_FAST_FORWARD = 90,
/** Mute key.
* Mutes the microphone, unlike {@link AKEYCODE_VOLUME_MUTE}. */
AKEYCODE_MUTE = 91,
/** Page Up key. */
AKEYCODE_PAGE_UP = 92,
/** Page Down key. */
AKEYCODE_PAGE_DOWN = 93,
/** Picture Symbols modifier key.
* Used to switch symbol sets (Emoji, Kao-moji). */
AKEYCODE_PICTSYMBOLS = 94,
/** Switch Charset modifier key.
* Used to switch character sets (Kanji, Katakana). */
AKEYCODE_SWITCH_CHARSET = 95,
/** A Button key.
* On a game controller, the A button should be either the button labeled A
* or the first button on the bottom row of controller buttons. */
AKEYCODE_BUTTON_A = 96,
/** B Button key.
* On a game controller, the B button should be either the button labeled B
* or the second button on the bottom row of controller buttons. */
AKEYCODE_BUTTON_B = 97,
/** C Button key.
* On a game controller, the C button should be either the button labeled C
* or the third button on the bottom row of controller buttons. */
AKEYCODE_BUTTON_C = 98,
/** X Button key.
* On a game controller, the X button should be either the button labeled X
* or the first button on the upper row of controller buttons. */
AKEYCODE_BUTTON_X = 99,
/** Y Button key.
* On a game controller, the Y button should be either the button labeled Y
* or the second button on the upper row of controller buttons. */
AKEYCODE_BUTTON_Y = 100,
/** Z Button key.
* On a game controller, the Z button should be either the button labeled Z
* or the third button on the upper row of controller buttons. */
AKEYCODE_BUTTON_Z = 101,
/** L1 Button key.
* On a game controller, the L1 button should be either the button labeled L1 (or L)
* or the top left trigger button. */
AKEYCODE_BUTTON_L1 = 102,
/** R1 Button key.
* On a game controller, the R1 button should be either the button labeled R1 (or R)
* or the top right trigger button. */
AKEYCODE_BUTTON_R1 = 103,
/** L2 Button key.
* On a game controller, the L2 button should be either the button labeled L2
* or the bottom left trigger button. */
AKEYCODE_BUTTON_L2 = 104,
/** R2 Button key.
* On a game controller, the R2 button should be either the button labeled R2
* or the bottom right trigger button. */
AKEYCODE_BUTTON_R2 = 105,
/** Left Thumb Button key.
* On a game controller, the left thumb button indicates that the left (or only)
* joystick is pressed. */
AKEYCODE_BUTTON_THUMBL = 106,
/** Right Thumb Button key.
* On a game controller, the right thumb button indicates that the right
* joystick is pressed. */
AKEYCODE_BUTTON_THUMBR = 107,
/** Start Button key.
* On a game controller, the button labeled Start. */
AKEYCODE_BUTTON_START = 108,
/** Select Button key.
* On a game controller, the button labeled Select. */
AKEYCODE_BUTTON_SELECT = 109,
/** Mode Button key.
* On a game controller, the button labeled Mode. */
AKEYCODE_BUTTON_MODE = 110,
/** Escape key. */
AKEYCODE_ESCAPE = 111,
/** Forward Delete key.
* Deletes characters ahead of the insertion point, unlike {@link AKEYCODE_DEL}. */
AKEYCODE_FORWARD_DEL = 112,
/** Left Control modifier key. */
AKEYCODE_CTRL_LEFT = 113,
/** Right Control modifier key. */
AKEYCODE_CTRL_RIGHT = 114,
/** Caps Lock key. */
AKEYCODE_CAPS_LOCK = 115,
/** Scroll Lock key. */
AKEYCODE_SCROLL_LOCK = 116,
/** Left Meta modifier key. */
AKEYCODE_META_LEFT = 117,
/** Right Meta modifier key. */
AKEYCODE_META_RIGHT = 118,
/** Function modifier key. */
AKEYCODE_FUNCTION = 119,
/** System Request / Print Screen key. */
AKEYCODE_SYSRQ = 120,
/** Break / Pause key. */
AKEYCODE_BREAK = 121,
/** Home Movement key.
* Used for scrolling or moving the cursor around to the start of a line
* or to the top of a list. */
AKEYCODE_MOVE_HOME = 122,
/** End Movement key.
* Used for scrolling or moving the cursor around to the end of a line
* or to the bottom of a list. */
AKEYCODE_MOVE_END = 123,
/** Insert key.
* Toggles insert / overwrite edit mode. */
AKEYCODE_INSERT = 124,
/** Forward key.
* Navigates forward in the history stack. Complement of {@link AKEYCODE_BACK}. */
AKEYCODE_FORWARD = 125,
/** Play media key. */
AKEYCODE_MEDIA_PLAY = 126,
/** Pause media key. */
AKEYCODE_MEDIA_PAUSE = 127,
/** Close media key.
* May be used to close a CD tray, for example. */
AKEYCODE_MEDIA_CLOSE = 128,
/** Eject media key.
* May be used to eject a CD tray, for example. */
AKEYCODE_MEDIA_EJECT = 129,
/** Record media key. */
AKEYCODE_MEDIA_RECORD = 130,
/** F1 key. */
AKEYCODE_F1 = 131,
/** F2 key. */
AKEYCODE_F2 = 132,
/** F3 key. */
AKEYCODE_F3 = 133,
/** F4 key. */
AKEYCODE_F4 = 134,
/** F5 key. */
AKEYCODE_F5 = 135,
/** F6 key. */
AKEYCODE_F6 = 136,
/** F7 key. */
AKEYCODE_F7 = 137,
/** F8 key. */
AKEYCODE_F8 = 138,
/** F9 key. */
AKEYCODE_F9 = 139,
/** F10 key. */
AKEYCODE_F10 = 140,
/** F11 key. */
AKEYCODE_F11 = 141,
/** F12 key. */
AKEYCODE_F12 = 142,
/** Num Lock key.
* This is the Num Lock key; it is different from {@link AKEYCODE_NUM}.
* This key alters the behavior of other keys on the numeric keypad. */
AKEYCODE_NUM_LOCK = 143,
/** Numeric keypad '0' key. */
AKEYCODE_NUMPAD_0 = 144,
/** Numeric keypad '1' key. */
AKEYCODE_NUMPAD_1 = 145,
/** Numeric keypad '2' key. */
AKEYCODE_NUMPAD_2 = 146,
/** Numeric keypad '3' key. */
AKEYCODE_NUMPAD_3 = 147,
/** Numeric keypad '4' key. */
AKEYCODE_NUMPAD_4 = 148,
/** Numeric keypad '5' key. */
AKEYCODE_NUMPAD_5 = 149,
/** Numeric keypad '6' key. */
AKEYCODE_NUMPAD_6 = 150,
/** Numeric keypad '7' key. */
AKEYCODE_NUMPAD_7 = 151,
/** Numeric keypad '8' key. */
AKEYCODE_NUMPAD_8 = 152,
/** Numeric keypad '9' key. */
AKEYCODE_NUMPAD_9 = 153,
/** Numeric keypad '/' key (for division). */
AKEYCODE_NUMPAD_DIVIDE = 154,
/** Numeric keypad '*' key (for multiplication). */
AKEYCODE_NUMPAD_MULTIPLY = 155,
/** Numeric keypad '-' key (for subtraction). */
AKEYCODE_NUMPAD_SUBTRACT = 156,
/** Numeric keypad '+' key (for addition). */
AKEYCODE_NUMPAD_ADD = 157,
/** Numeric keypad '.' key (for decimals or digit grouping). */
AKEYCODE_NUMPAD_DOT = 158,
/** Numeric keypad ',' key (for decimals or digit grouping). */
AKEYCODE_NUMPAD_COMMA = 159,
/** Numeric keypad Enter key. */
AKEYCODE_NUMPAD_ENTER = 160,
/** Numeric keypad '=' key. */
AKEYCODE_NUMPAD_EQUALS = 161,
/** Numeric keypad '(' key. */
AKEYCODE_NUMPAD_LEFT_PAREN = 162,
/** Numeric keypad ')' key. */
AKEYCODE_NUMPAD_RIGHT_PAREN = 163,
/** Volume Mute key.
* Mutes the speaker, unlike {@link AKEYCODE_MUTE}.
* This key should normally be implemented as a toggle such that the first press
* mutes the speaker and the second press restores the original volume. */
AKEYCODE_VOLUME_MUTE = 164,
/** Info key.
* Common on TV remotes to show additional information related to what is
* currently being viewed. */
AKEYCODE_INFO = 165,
/** Channel up key.
* On TV remotes, increments the television channel. */
AKEYCODE_CHANNEL_UP = 166,
/** Channel down key.
* On TV remotes, decrements the television channel. */
AKEYCODE_CHANNEL_DOWN = 167,
/** Zoom in key. */
AKEYCODE_ZOOM_IN = 168,
/** Zoom out key. */
AKEYCODE_ZOOM_OUT = 169,
/** TV key.
* On TV remotes, switches to viewing live TV. */
AKEYCODE_TV = 170,
/** Window key.
* On TV remotes, toggles picture-in-picture mode or other windowing functions. */
AKEYCODE_WINDOW = 171,
/** Guide key.
* On TV remotes, shows a programming guide. */
AKEYCODE_GUIDE = 172,
/** DVR key.
* On some TV remotes, switches to a DVR mode for recorded shows. */
AKEYCODE_DVR = 173,
/** Bookmark key.
* On some TV remotes, bookmarks content or web pages. */
AKEYCODE_BOOKMARK = 174,
/** Toggle captions key.
* Switches the mode for closed-captioning text, for example during television shows. */
AKEYCODE_CAPTIONS = 175,
/** Settings key.
* Starts the system settings activity. */
AKEYCODE_SETTINGS = 176,
/** TV power key.
* On TV remotes, toggles the power on a television screen. */
AKEYCODE_TV_POWER = 177,
/** TV input key.
* On TV remotes, switches the input on a television screen. */
AKEYCODE_TV_INPUT = 178,
/** Set-top-box power key.
* On TV remotes, toggles the power on an external Set-top-box. */
AKEYCODE_STB_POWER = 179,
/** Set-top-box input key.
* On TV remotes, switches the input mode on an external Set-top-box. */
AKEYCODE_STB_INPUT = 180,
/** A/V Receiver power key.
* On TV remotes, toggles the power on an external A/V Receiver. */
AKEYCODE_AVR_POWER = 181,
/** A/V Receiver input key.
* On TV remotes, switches the input mode on an external A/V Receiver. */
AKEYCODE_AVR_INPUT = 182,
/** Red "programmable" key.
* On TV remotes, acts as a contextual/programmable key. */
AKEYCODE_PROG_RED = 183,
/** Green "programmable" key.
* On TV remotes, actsas a contextual/programmable key. */
AKEYCODE_PROG_GREEN = 184,
/** Yellow "programmable" key.
* On TV remotes, acts as a contextual/programmable key. */
AKEYCODE_PROG_YELLOW = 185,
/** Blue "programmable" key.
* On TV remotes, acts as a contextual/programmable key. */
AKEYCODE_PROG_BLUE = 186,
/** App switch key.
* Should bring up the application switcher dialog. */
AKEYCODE_APP_SWITCH = 187,
/** Generic Game Pad Button #1.*/
AKEYCODE_BUTTON_1 = 188,
/** Generic Game Pad Button #2.*/
AKEYCODE_BUTTON_2 = 189,
/** Generic Game Pad Button #3.*/
AKEYCODE_BUTTON_3 = 190,
/** Generic Game Pad Button #4.*/
AKEYCODE_BUTTON_4 = 191,
/** Generic Game Pad Button #5.*/
AKEYCODE_BUTTON_5 = 192,
/** Generic Game Pad Button #6.*/
AKEYCODE_BUTTON_6 = 193,
/** Generic Game Pad Button #7.*/
AKEYCODE_BUTTON_7 = 194,
/** Generic Game Pad Button #8.*/
AKEYCODE_BUTTON_8 = 195,
/** Generic Game Pad Button #9.*/
AKEYCODE_BUTTON_9 = 196,
/** Generic Game Pad Button #10.*/
AKEYCODE_BUTTON_10 = 197,
/** Generic Game Pad Button #11.*/
AKEYCODE_BUTTON_11 = 198,
/** Generic Game Pad Button #12.*/
AKEYCODE_BUTTON_12 = 199,
/** Generic Game Pad Button #13.*/
AKEYCODE_BUTTON_13 = 200,
/** Generic Game Pad Button #14.*/
AKEYCODE_BUTTON_14 = 201,
/** Generic Game Pad Button #15.*/
AKEYCODE_BUTTON_15 = 202,
/** Generic Game Pad Button #16.*/
AKEYCODE_BUTTON_16 = 203,
/** Language Switch key.
* Toggles the current input language such as switching between English and Japanese on
* a QWERTY keyboard. On some devices, the same function may be performed by
* pressing Shift+Spacebar. */
AKEYCODE_LANGUAGE_SWITCH = 204,
/** Manner Mode key.
* Toggles silent or vibrate mode on and off to make the device behave more politely
* in certain settings such as on a crowded train. On some devices, the key may only
* operate when long-pressed. */
AKEYCODE_MANNER_MODE = 205,
/** 3D Mode key.
* Toggles the display between 2D and 3D mode. */
AKEYCODE_3D_MODE = 206,
/** Contacts special function key.
* Used to launch an address book application. */
AKEYCODE_CONTACTS = 207,
/** Calendar special function key.
* Used to launch a calendar application. */
AKEYCODE_CALENDAR = 208,
/** Music special function key.
* Used to launch a music player application. */
AKEYCODE_MUSIC = 209,
/** Calculator special function key.
* Used to launch a calculator application. */
AKEYCODE_CALCULATOR = 210,
/** Japanese full-width / half-width key. */
AKEYCODE_ZENKAKU_HANKAKU = 211,
/** Japanese alphanumeric key. */
AKEYCODE_EISU = 212,
/** Japanese non-conversion key. */
AKEYCODE_MUHENKAN = 213,
/** Japanese conversion key. */
AKEYCODE_HENKAN = 214,
/** Japanese katakana / hiragana key. */
AKEYCODE_KATAKANA_HIRAGANA = 215,
/** Japanese Yen key. */
AKEYCODE_YEN = 216,
/** Japanese Ro key. */
AKEYCODE_RO = 217,
/** Japanese kana key. */
AKEYCODE_KANA = 218,
/** Assist key.
* Launches the global assist activity. Not delivered to applications. */
AKEYCODE_ASSIST = 219,
/** Brightness Down key.
* Adjusts the screen brightness down. */
AKEYCODE_BRIGHTNESS_DOWN = 220,
/** Brightness Up key.
* Adjusts the screen brightness up. */
AKEYCODE_BRIGHTNESS_UP = 221,
/** Audio Track key.
* Switches the audio tracks. */
AKEYCODE_MEDIA_AUDIO_TRACK = 222,
/** Sleep key.
* Puts the device to sleep. Behaves somewhat like {@link AKEYCODE_POWER} but it
* has no effect if the device is already asleep. */
AKEYCODE_SLEEP = 223,
/** Wakeup key.
* Wakes up the device. Behaves somewhat like {@link AKEYCODE_POWER} but it
* has no effect if the device is already awake. */
AKEYCODE_WAKEUP = 224,
/** Pairing key.
* Initiates peripheral pairing mode. Useful for pairing remote control
* devices or game controllers, especially if no other input mode is
* available. */
AKEYCODE_PAIRING = 225,
/** Media Top Menu key.
* Goes to the top of media menu. */
AKEYCODE_MEDIA_TOP_MENU = 226,
/** '11' key. */
AKEYCODE_11 = 227,
/** '12' key. */
AKEYCODE_12 = 228,
/** Last Channel key.
* Goes to the last viewed channel. */
AKEYCODE_LAST_CHANNEL = 229,
/** TV data service key.
* Displays data services like weather, sports. */
AKEYCODE_TV_DATA_SERVICE = 230,
/** Voice Assist key.
* Launches the global voice assist activity. Not delivered to applications. */
AKEYCODE_VOICE_ASSIST = 231,
/** Radio key.
* Toggles TV service / Radio service. */
AKEYCODE_TV_RADIO_SERVICE = 232,
/** Teletext key.
* Displays Teletext service. */
AKEYCODE_TV_TELETEXT = 233,
/** Number entry key.
* Initiates to enter multi-digit channel nubmber when each digit key is assigned
* for selecting separate channel. Corresponds to Number Entry Mode (0x1D) of CEC
* User Control Code. */
AKEYCODE_TV_NUMBER_ENTRY = 234,
/** Analog Terrestrial key.
* Switches to analog terrestrial broadcast service. */
AKEYCODE_TV_TERRESTRIAL_ANALOG = 235,
/** Digital Terrestrial key.
* Switches to digital terrestrial broadcast service. */
AKEYCODE_TV_TERRESTRIAL_DIGITAL = 236,
/** Satellite key.
* Switches to digital satellite broadcast service. */
AKEYCODE_TV_SATELLITE = 237,
/** BS key.
* Switches to BS digital satellite broadcasting service available in Japan. */
AKEYCODE_TV_SATELLITE_BS = 238,
/** CS key.
* Switches to CS digital satellite broadcasting service available in Japan. */
AKEYCODE_TV_SATELLITE_CS = 239,
/** BS/CS key.
* Toggles between BS and CS digital satellite services. */
AKEYCODE_TV_SATELLITE_SERVICE = 240,
/** Toggle Network key.
* Toggles selecting broacast services. */
AKEYCODE_TV_NETWORK = 241,
/** Antenna/Cable key.
* Toggles broadcast input source between antenna and cable. */
AKEYCODE_TV_ANTENNA_CABLE = 242,
/** HDMI #1 key.
* Switches to HDMI input #1. */
AKEYCODE_TV_INPUT_HDMI_1 = 243,
/** HDMI #2 key.
* Switches to HDMI input #2. */
AKEYCODE_TV_INPUT_HDMI_2 = 244,
/** HDMI #3 key.
* Switches to HDMI input #3. */
AKEYCODE_TV_INPUT_HDMI_3 = 245,
/** HDMI #4 key.
* Switches to HDMI input #4. */
AKEYCODE_TV_INPUT_HDMI_4 = 246,
/** Composite #1 key.
* Switches to composite video input #1. */
AKEYCODE_TV_INPUT_COMPOSITE_1 = 247,
/** Composite #2 key.
* Switches to composite video input #2. */
AKEYCODE_TV_INPUT_COMPOSITE_2 = 248,
/** Component #1 key.
* Switches to component video input #1. */
AKEYCODE_TV_INPUT_COMPONENT_1 = 249,
/** Component #2 key.
* Switches to component video input #2. */
AKEYCODE_TV_INPUT_COMPONENT_2 = 250,
/** VGA #1 key.
* Switches to VGA (analog RGB) input #1. */
AKEYCODE_TV_INPUT_VGA_1 = 251,
/** Audio description key.
* Toggles audio description off / on. */
AKEYCODE_TV_AUDIO_DESCRIPTION = 252,
/** Audio description mixing volume up key.
* Louden audio description volume as compared with normal audio volume. */
AKEYCODE_TV_AUDIO_DESCRIPTION_MIX_UP = 253,
/** Audio description mixing volume down key.
* Lessen audio description volume as compared with normal audio volume. */
AKEYCODE_TV_AUDIO_DESCRIPTION_MIX_DOWN = 254,
/** Zoom mode key.
* Changes Zoom mode (Normal, Full, Zoom, Wide-zoom, etc.) */
AKEYCODE_TV_ZOOM_MODE = 255,
/** Contents menu key.
* Goes to the title list. Corresponds to Contents Menu (0x0B) of CEC User Control
* Code */
AKEYCODE_TV_CONTENTS_MENU = 256,
/** Media context menu key.
* Goes to the context menu of media contents. Corresponds to Media Context-sensitive
* Menu (0x11) of CEC User Control Code. */
AKEYCODE_TV_MEDIA_CONTEXT_MENU = 257,
/** Timer programming key.
* Goes to the timer recording menu. Corresponds to Timer Programming (0x54) of
* CEC User Control Code. */
AKEYCODE_TV_TIMER_PROGRAMMING = 258,
/** Help key. */
AKEYCODE_HELP = 259,
AKEYCODE_NAVIGATE_PREVIOUS = 260,
AKEYCODE_NAVIGATE_NEXT = 261,
AKEYCODE_NAVIGATE_IN = 262,
AKEYCODE_NAVIGATE_OUT = 263,
/** Primary stem key for Wear
* Main power/reset button on watch. */
AKEYCODE_STEM_PRIMARY = 264,
/** Generic stem key 1 for Wear */
AKEYCODE_STEM_1 = 265,
/** Generic stem key 2 for Wear */
AKEYCODE_STEM_2 = 266,
/** Generic stem key 3 for Wear */
AKEYCODE_STEM_3 = 267,
/** Directional Pad Up-Left */
AKEYCODE_DPAD_UP_LEFT = 268,
/** Directional Pad Down-Left */
AKEYCODE_DPAD_DOWN_LEFT = 269,
/** Directional Pad Up-Right */
AKEYCODE_DPAD_UP_RIGHT = 270,
/** Directional Pad Down-Right */
AKEYCODE_DPAD_DOWN_RIGHT = 271,
/** Skip forward media key */
AKEYCODE_MEDIA_SKIP_FORWARD = 272,
/** Skip backward media key */
AKEYCODE_MEDIA_SKIP_BACKWARD = 273,
/** Step forward media key.
* Steps media forward one from at a time. */
AKEYCODE_MEDIA_STEP_FORWARD = 274,
/** Step backward media key.
* Steps media backward one from at a time. */
AKEYCODE_MEDIA_STEP_BACKWARD = 275,
/** Put device to sleep unless a wakelock is held. */
AKEYCODE_SOFT_SLEEP = 276,
/** Cut key. */
AKEYCODE_CUT = 277,
/** Copy key. */
AKEYCODE_COPY = 278,
/** Paste key. */
AKEYCODE_PASTE = 279,
/** fingerprint navigation key, up. */
AKEYCODE_SYSTEM_NAVIGATION_UP = 280,
/** fingerprint navigation key, down. */
AKEYCODE_SYSTEM_NAVIGATION_DOWN = 281,
/** fingerprint navigation key, left. */
AKEYCODE_SYSTEM_NAVIGATION_LEFT = 282,
/** fingerprint navigation key, right. */
AKEYCODE_SYSTEM_NAVIGATION_RIGHT = 283,
/** all apps */
AKEYCODE_ALL_APPS = 284
};
#endif // _ANDROID_KEYCODES_H

View file

@ -1,35 +0,0 @@
#ifndef COMPAT_H
#define COMPAT_H
#include "libavcodec/version.h"
#include "libavformat/version.h"
// In ffmpeg/doc/APIchanges:
// 2016-04-11 - 6f69f7a / 9200514 - lavf 57.33.100 / 57.5.0 - avformat.h
// Add AVStream.codecpar, deprecate AVStream.codec.
#if (LIBAVFORMAT_VERSION_MICRO >= 100 /* FFmpeg */ && LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)) \
|| (LIBAVFORMAT_VERSION_MICRO < 100 && /* Libav */ \
LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 5, 0))
#define QTSCRCPY_LAVF_HAS_NEW_CODEC_PARAMS_API
#endif
// In ffmpeg/doc/APIchanges:
// 2018-02-06 - 0694d87024 - lavf 58.9.100 - avformat.h
// Deprecate use of av_register_input_format(), av_register_output_format(),
// av_register_all(), av_iformat_next(), av_oformat_next().
// Add av_demuxer_iterate(), and av_muxer_iterate().
#if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(58, 9, 100)
#define QTSCRCPY_LAVF_HAS_NEW_MUXER_ITERATOR_API
#else
#define QTSCRCPY_LAVF_REQUIRES_REGISTER_ALL
#endif
// In ffmpeg/doc/APIchanges:
// 2016-04-21 - 7fc329e - lavc 57.37.100 - avcodec.h
// Add a new audio/video encoding and decoding API with decoupled input
// and output -- avcodec_send_packet(), avcodec_receive_frame(),
// avcodec_send_frame() and avcodec_receive_packet().
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 37, 100)
#define QTSCRCPY_LAVF_HAS_NEW_ENCODING_DECODING_API
#endif
#endif // COMPAT_H

View file

@ -1,58 +0,0 @@
#include "bufferutil.h"
void BufferUtil::write32(QBuffer &buffer, quint32 value)
{
buffer.putChar(value >> 24);
buffer.putChar(value >> 16);
buffer.putChar(value >> 8);
buffer.putChar(value);
}
void BufferUtil::write64(QBuffer &buffer, quint64 value)
{
write32(buffer, value >> 32);
write32(buffer, (quint32)value);
}
void BufferUtil::write16(QBuffer &buffer, quint32 value)
{
buffer.putChar(value >> 8);
buffer.putChar(value);
}
quint16 BufferUtil::read16(QBuffer &buffer)
{
uchar c;
quint16 ret = 0;
buffer.getChar(reinterpret_cast<char *>(&c));
ret |= (c << 8);
buffer.getChar(reinterpret_cast<char *>(&c));
ret |= c;
return ret;
}
quint32 BufferUtil::read32(QBuffer &buffer)
{
uchar c;
quint32 ret = 0;
buffer.getChar(reinterpret_cast<char *>(&c));
ret |= (c << 24);
buffer.getChar(reinterpret_cast<char *>(&c));
ret |= (c << 16);
buffer.getChar(reinterpret_cast<char *>(&c));
ret |= (c << 8);
buffer.getChar(reinterpret_cast<char *>(&c));
ret |= c;
return ret;
}
quint64 BufferUtil::read64(QBuffer &buffer)
{
quint32 msb = read32(buffer);
quint32 lsb = read32(buffer);
return ((quint64)msb << 32) | lsb;
;
}

View file

@ -1,16 +0,0 @@
#ifndef BUFFERUTIL_H
#define BUFFERUTIL_H
#include <QBuffer>
class BufferUtil
{
public:
static void write16(QBuffer &buffer, quint32 value);
static void write32(QBuffer &buffer, quint32 value);
static void write64(QBuffer &buffer, quint64 value);
static quint16 read16(QBuffer &buffer);
static quint32 read32(QBuffer &buffer);
static quint64 read64(QBuffer &buffer);
};
#endif // BUFFERUTIL_H

View file

@ -1,262 +0,0 @@
#include <QApplication>
#include <QClipboard>
#include "controller.h"
#include "controlmsg.h"
#include "inputconvertgame.h"
#include "receiver.h"
#include "videosocket.h"
Controller::Controller(std::function<qint64(const QByteArray&)> sendData, QString gameScript, QObject *parent)
: QObject(parent)
, m_sendData(sendData)
{
m_receiver = new Receiver(this);
Q_ASSERT(m_receiver);
updateScript(gameScript);
}
Controller::~Controller() {}
void Controller::postControlMsg(ControlMsg *controlMsg)
{
if (controlMsg) {
QCoreApplication::postEvent(this, controlMsg);
}
}
void Controller::recvDeviceMsg(DeviceMsg *deviceMsg)
{
if (!m_receiver) {
return;
}
m_receiver->recvDeviceMsg(deviceMsg);
}
void Controller::test(QRect rc)
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TOUCH);
controlMsg->setInjectTouchMsgData(static_cast<quint64>(POINTER_ID_MOUSE), AMOTION_EVENT_ACTION_DOWN, AMOTION_EVENT_BUTTON_PRIMARY, rc, 1.0f);
postControlMsg(controlMsg);
}
void Controller::updateScript(QString gameScript)
{
if (m_inputConvert) {
delete m_inputConvert;
}
if (!gameScript.isEmpty()) {
InputConvertGame *convertgame = new InputConvertGame(this);
convertgame->loadKeyMap(gameScript);
m_inputConvert = convertgame;
} else {
m_inputConvert = new InputConvertNormal(this);
}
Q_ASSERT(m_inputConvert);
connect(m_inputConvert, &InputConvertBase::grabCursor, this, &Controller::grabCursor);
}
bool Controller::isCurrentCustomKeymap()
{
if (!m_inputConvert) {
return false;
}
return m_inputConvert->isCurrentCustomKeymap();
}
void Controller::postBackOrScreenOn(bool down)
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_BACK_OR_SCREEN_ON);
controlMsg->setBackOrScreenOnData(down);
if (!controlMsg) {
return;
}
postControlMsg(controlMsg);
}
void Controller::postGoHome()
{
postKeyCodeClick(AKEYCODE_HOME);
}
void Controller::postGoMenu()
{
postKeyCodeClick(AKEYCODE_MENU);
}
void Controller::postGoBack()
{
postKeyCodeClick(AKEYCODE_BACK);
}
void Controller::postAppSwitch()
{
postKeyCodeClick(AKEYCODE_APP_SWITCH);
}
void Controller::postPower()
{
postKeyCodeClick(AKEYCODE_POWER);
}
void Controller::postVolumeUp()
{
postKeyCodeClick(AKEYCODE_VOLUME_UP);
}
void Controller::postVolumeDown()
{
postKeyCodeClick(AKEYCODE_VOLUME_DOWN);
}
void Controller::copy()
{
postKeyCodeClick(AKEYCODE_COPY);
}
void Controller::cut()
{
postKeyCodeClick(AKEYCODE_CUT);
}
void Controller::expandNotificationPanel()
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_EXPAND_NOTIFICATION_PANEL);
if (!controlMsg) {
return;
}
postControlMsg(controlMsg);
}
void Controller::collapsePanel()
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_COLLAPSE_PANELS);
if (!controlMsg) {
return;
}
postControlMsg(controlMsg);
}
void Controller::requestDeviceClipboard()
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_GET_CLIPBOARD);
if (!controlMsg) {
return;
}
postControlMsg(controlMsg);
}
void Controller::getDeviceClipboard(bool cut)
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_GET_CLIPBOARD);
if (!controlMsg) {
return;
}
ControlMsg::GetClipboardCopyKey copyKey = cut ? ControlMsg::GCCK_CUT : ControlMsg::GCCK_COPY;
controlMsg->setGetClipboardMsgData(copyKey);
postControlMsg(controlMsg);
}
void Controller::setDeviceClipboard(bool pause)
{
QClipboard *board = QApplication::clipboard();
QString text = board->text();
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_SET_CLIPBOARD);
if (!controlMsg) {
return;
}
controlMsg->setSetClipboardMsgData(text, pause);
postControlMsg(controlMsg);
}
void Controller::clipboardPaste()
{
QClipboard *board = QApplication::clipboard();
QString text = board->text();
postTextInput(text);
}
void Controller::postTextInput(QString &text)
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TEXT);
if (!controlMsg) {
return;
}
controlMsg->setInjectTextMsgData(text);
postControlMsg(controlMsg);
}
void Controller::setScreenPowerMode(ControlMsg::ScreenPowerMode mode)
{
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_SET_SCREEN_POWER_MODE);
if (!controlMsg) {
return;
}
controlMsg->setSetScreenPowerModeData(mode);
postControlMsg(controlMsg);
}
void Controller::mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (m_inputConvert) {
m_inputConvert->mouseEvent(from, frameSize, showSize);
}
}
void Controller::wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (m_inputConvert) {
m_inputConvert->wheelEvent(from, frameSize, showSize);
}
}
void Controller::keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (m_inputConvert) {
m_inputConvert->keyEvent(from, frameSize, showSize);
}
}
bool Controller::event(QEvent *event)
{
if (event && static_cast<ControlMsg::Type>(event->type()) == ControlMsg::Control) {
ControlMsg *controlMsg = dynamic_cast<ControlMsg *>(event);
if (controlMsg) {
sendControl(controlMsg->serializeData());
}
return true;
}
return QObject::event(event);
}
bool Controller::sendControl(const QByteArray &buffer)
{
if (buffer.isEmpty()) {
return false;
}
qint32 len = 0;
if (m_sendData) {
len = static_cast<qint32>(m_sendData(buffer));
}
return len == buffer.length() ? true : false;
}
void Controller::postKeyCodeClick(AndroidKeycode keycode)
{
ControlMsg *controlEventDown = new ControlMsg(ControlMsg::CMT_INJECT_KEYCODE);
if (!controlEventDown) {
return;
}
controlEventDown->setInjectKeycodeMsgData(AKEY_EVENT_ACTION_DOWN, keycode, 0, AMETA_NONE);
postControlMsg(controlEventDown);
ControlMsg *controlEventUp = new ControlMsg(ControlMsg::CMT_INJECT_KEYCODE);
if (!controlEventUp) {
return;
}
controlEventUp->setInjectKeycodeMsgData(AKEY_EVENT_ACTION_UP, keycode, 0, AMETA_NONE);
postControlMsg(controlEventUp);
}

View file

@ -1,71 +0,0 @@
#ifndef CONTROLLER_H
#define CONTROLLER_H
#include <QObject>
#include <QPointer>
#include "inputconvertbase.h"
class QTcpSocket;
class Receiver;
class InputConvertBase;
class DeviceMsg;
class Controller : public QObject
{
Q_OBJECT
public:
Controller(std::function<qint64(const QByteArray&)> sendData, QString gameScript = "", QObject *parent = Q_NULLPTR);
virtual ~Controller();
void postControlMsg(ControlMsg *controlMsg);
void recvDeviceMsg(DeviceMsg *deviceMsg);
void test(QRect rc);
void updateScript(QString gameScript = "");
bool isCurrentCustomKeymap();
void postGoBack();
void postGoHome();
void postGoMenu();
void postAppSwitch();
void postPower();
void postVolumeUp();
void postVolumeDown();
void copy();
void cut();
void expandNotificationPanel();
void collapsePanel();
void setScreenPowerMode(ControlMsg::ScreenPowerMode mode);
// for input convert
void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize);
void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize);
void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize);
// turn the screen on if it was off, press BACK otherwise
// If the screen is off, it is turned on only on down
void postBackOrScreenOn(bool down);
void requestDeviceClipboard();
void getDeviceClipboard(bool cut = false);
void setDeviceClipboard(bool pause = true);
void clipboardPaste();
void postTextInput(QString &text);
signals:
void grabCursor(bool grab);
protected:
bool event(QEvent *event);
private:
bool sendControl(const QByteArray &buffer);
void postKeyCodeClick(AndroidKeycode keycode);
private:
QPointer<Receiver> m_receiver;
QPointer<InputConvertBase> m_inputConvert;
std::function<qint64(const QByteArray&)> m_sendData = Q_NULLPTR;
};
#endif // CONTROLLER_H

View file

@ -1,166 +0,0 @@
#include <QDebug>
#include "bufferutil.h"
#include "controlmsg.h"
ControlMsg::ControlMsg(ControlMsgType controlMsgType) : QScrcpyEvent(Control)
{
m_data.type = controlMsgType;
}
ControlMsg::~ControlMsg()
{
if (CMT_SET_CLIPBOARD == m_data.type && Q_NULLPTR != m_data.setClipboard.text) {
delete m_data.setClipboard.text;
m_data.setClipboard.text = Q_NULLPTR;
} else if (CMT_INJECT_TEXT == m_data.type && Q_NULLPTR != m_data.injectText.text) {
delete m_data.injectText.text;
m_data.injectText.text = Q_NULLPTR;
}
}
void ControlMsg::setInjectKeycodeMsgData(AndroidKeyeventAction action, AndroidKeycode keycode, quint32 repeat, AndroidMetastate metastate)
{
m_data.injectKeycode.action = action;
m_data.injectKeycode.keycode = keycode;
m_data.injectKeycode.repeat = repeat;
m_data.injectKeycode.metastate = metastate;
}
void ControlMsg::setInjectTextMsgData(QString &text)
{
// write length (2 byte) + string (non nul-terminated)
if (CONTROL_MSG_INJECT_TEXT_MAX_LENGTH < text.length()) {
// injecting a text takes time, so limit the text length
text = text.left(CONTROL_MSG_INJECT_TEXT_MAX_LENGTH);
}
QByteArray tmp = text.toUtf8();
m_data.injectText.text = new char[tmp.length() + 1];
memcpy(m_data.injectText.text, tmp.data(), tmp.length());
m_data.injectText.text[tmp.length()] = '\0';
}
void ControlMsg::setInjectTouchMsgData(quint64 id, AndroidMotioneventAction action, AndroidMotioneventButtons buttons, QRect position, float pressure)
{
m_data.injectTouch.id = id;
m_data.injectTouch.action = action;
m_data.injectTouch.buttons = buttons;
m_data.injectTouch.position = position;
m_data.injectTouch.pressure = pressure;
}
void ControlMsg::setInjectScrollMsgData(QRect position, qint32 hScroll, qint32 vScroll)
{
m_data.injectScroll.position = position;
m_data.injectScroll.hScroll = hScroll;
m_data.injectScroll.vScroll = vScroll;
}
void ControlMsg::setGetClipboardMsgData(ControlMsg::GetClipboardCopyKey copyKey)
{
m_data.getClipboard.copyKey = copyKey;
}
void ControlMsg::setSetClipboardMsgData(QString &text, bool paste)
{
if (text.isEmpty()) {
return;
}
if (CONTROL_MSG_CLIPBOARD_TEXT_MAX_LENGTH < text.length()) {
text = text.left(CONTROL_MSG_CLIPBOARD_TEXT_MAX_LENGTH);
}
QByteArray tmp = text.toUtf8();
m_data.setClipboard.text = new char[tmp.length() + 1];
memcpy(m_data.setClipboard.text, tmp.data(), tmp.length());
m_data.setClipboard.text[tmp.length()] = '\0';
m_data.setClipboard.paste = paste;
m_data.setClipboard.sequence = 0;
}
void ControlMsg::setSetScreenPowerModeData(ControlMsg::ScreenPowerMode mode)
{
m_data.setScreenPowerMode.mode = mode;
}
void ControlMsg::setBackOrScreenOnData(bool down)
{
m_data.backOrScreenOn.action = down ? AKEY_EVENT_ACTION_DOWN : AKEY_EVENT_ACTION_UP;
}
void ControlMsg::writePosition(QBuffer &buffer, const QRect &value)
{
BufferUtil::write32(buffer, value.left());
BufferUtil::write32(buffer, value.top());
BufferUtil::write16(buffer, value.width());
BufferUtil::write16(buffer, value.height());
}
quint16 ControlMsg::toFixedPoint16(float f)
{
Q_ASSERT(f >= 0.0f && f <= 1.0f);
quint32 u = f * 0x1p16f; // 2^16
if (u >= 0xffff) {
u = 0xffff;
}
return (quint16)u;
}
QByteArray ControlMsg::serializeData()
{
QByteArray byteArray;
QBuffer buffer(&byteArray);
buffer.open(QBuffer::WriteOnly);
buffer.putChar(m_data.type);
switch (m_data.type) {
case CMT_INJECT_KEYCODE:
buffer.putChar(m_data.injectKeycode.action);
BufferUtil::write32(buffer, m_data.injectKeycode.keycode);
BufferUtil::write32(buffer, m_data.injectKeycode.repeat);
BufferUtil::write32(buffer, m_data.injectKeycode.metastate);
break;
case CMT_INJECT_TEXT:
BufferUtil::write32(buffer, static_cast<quint32>(strlen(m_data.injectText.text)));
buffer.write(m_data.injectText.text, strlen(m_data.injectText.text));
break;
case CMT_INJECT_TOUCH: {
buffer.putChar(m_data.injectTouch.action);
BufferUtil::write64(buffer, m_data.injectTouch.id);
writePosition(buffer, m_data.injectTouch.position);
quint16 pressure = toFixedPoint16(m_data.injectTouch.pressure);
BufferUtil::write16(buffer, pressure);
BufferUtil::write32(buffer, m_data.injectTouch.buttons);
} break;
case CMT_INJECT_SCROLL:
writePosition(buffer, m_data.injectScroll.position);
BufferUtil::write32(buffer, m_data.injectScroll.hScroll);
BufferUtil::write32(buffer, m_data.injectScroll.vScroll);
break;
case CMT_BACK_OR_SCREEN_ON:
buffer.putChar(m_data.backOrScreenOn.action);
break;
case CMT_GET_CLIPBOARD:
buffer.putChar(m_data.getClipboard.copyKey);
break;
case CMT_SET_CLIPBOARD:
BufferUtil::write64(buffer, m_data.setClipboard.sequence);
buffer.putChar(!!m_data.setClipboard.paste);
BufferUtil::write32(buffer, static_cast<quint32>(strlen(m_data.setClipboard.text)));
buffer.write(m_data.setClipboard.text, strlen(m_data.setClipboard.text));
break;
case CMT_SET_SCREEN_POWER_MODE:
buffer.putChar(m_data.setScreenPowerMode.mode);
break;
case CMT_EXPAND_NOTIFICATION_PANEL:
case CMT_EXPAND_SETTINGS_PANEL:
case CMT_COLLAPSE_PANELS:
case CMT_ROTATE_DEVICE:
break;
default:
qDebug() << "Unknown event type:" << m_data.type;
break;
}
buffer.close();
return byteArray;
}

View file

@ -1,136 +0,0 @@
#ifndef CONTROLMSG_H
#define CONTROLMSG_H
#include <QBuffer>
#include <QRect>
#include <QString>
#include "input.h"
#include "keycodes.h"
#include "qscrcpyevent.h"
#define CONTROL_MSG_MAX_SIZE (1 << 18) // 256k
#define CONTROL_MSG_INJECT_TEXT_MAX_LENGTH 300
// type: 1 byte; paste flag: 1 byte; length: 4 bytes
#define CONTROL_MSG_CLIPBOARD_TEXT_MAX_LENGTH \
(CONTROL_MSG_MAX_SIZE - 6)
#define POINTER_ID_MOUSE static_cast<quint64>(-1)
#define POINTER_ID_VIRTUAL_FINGER UINT64_C(-2)
// ControlMsg
class ControlMsg : public QScrcpyEvent
{
public:
enum ControlMsgType
{
CMT_NULL = -1,
CMT_INJECT_KEYCODE = 0,
CMT_INJECT_TEXT,
CMT_INJECT_TOUCH,
CMT_INJECT_SCROLL,
CMT_BACK_OR_SCREEN_ON,
CMT_EXPAND_NOTIFICATION_PANEL,
CMT_EXPAND_SETTINGS_PANEL,
CMT_COLLAPSE_PANELS,
CMT_GET_CLIPBOARD,
CMT_SET_CLIPBOARD,
CMT_SET_SCREEN_POWER_MODE,
CMT_ROTATE_DEVICE
};
enum ScreenPowerMode
{
// see <https://android.googlesource.com/platform/frameworks/base.git/+/pie-release-2/core/java/android/view/SurfaceControl.java#305>
SPM_OFF = 0,
SPM_NORMAL = 2,
};
enum GetClipboardCopyKey {
GCCK_NONE,
GCCK_COPY,
GCCK_CUT,
};
ControlMsg(ControlMsgType controlMsgType);
virtual ~ControlMsg();
void setInjectKeycodeMsgData(AndroidKeyeventAction action, AndroidKeycode keycode, quint32 repeat, AndroidMetastate metastate);
void setInjectTextMsgData(QString &text);
// id 代表一个触摸点最多支持10个触摸点[0,9]
// action 只能是AMOTION_EVENT_ACTION_DOWNAMOTION_EVENT_ACTION_UPAMOTION_EVENT_ACTION_MOVE
// position action动作对应的位置
void setInjectTouchMsgData(quint64 id, AndroidMotioneventAction action, AndroidMotioneventButtons buttons, QRect position, float pressure);
void setInjectScrollMsgData(QRect position, qint32 hScroll, qint32 vScroll);
void setGetClipboardMsgData(ControlMsg::GetClipboardCopyKey copyKey);
void setSetClipboardMsgData(QString &text, bool paste);
void setSetScreenPowerModeData(ControlMsg::ScreenPowerMode mode);
void setBackOrScreenOnData(bool down);
QByteArray serializeData();
private:
void writePosition(QBuffer &buffer, const QRect &value);
quint16 toFixedPoint16(float f);
private:
struct ControlMsgData
{
ControlMsgType type = CMT_NULL;
union
{
struct
{
AndroidKeyeventAction action;
AndroidKeycode keycode;
quint32 repeat;
AndroidMetastate metastate;
} injectKeycode;
struct
{
char *text = Q_NULLPTR;
} injectText;
struct
{
quint64 id;
AndroidMotioneventAction action;
AndroidMotioneventButtons buttons;
QRect position;
float pressure;
} injectTouch;
struct
{
QRect position;
qint32 hScroll;
qint32 vScroll;
} injectScroll;
struct
{
AndroidKeyeventAction action; // action for the BACK key
// screen may only be turned on on ACTION_DOWN
} backOrScreenOn;
struct
{
enum GetClipboardCopyKey copyKey;
} getClipboard;
struct
{
uint64_t sequence = 0;
char *text = Q_NULLPTR;
bool paste = true;
} setClipboard;
struct
{
ScreenPowerMode mode;
} setScreenPowerMode;
};
ControlMsgData() {}
~ControlMsgData() {}
};
ControlMsgData m_data;
};
#endif // CONTROLMSG_H

View file

@ -1,16 +0,0 @@
#include "inputconvertbase.h"
#include "controller.h"
InputConvertBase::InputConvertBase(Controller *controller) : QObject(controller), m_controller(controller)
{
Q_ASSERT(controller);
}
InputConvertBase::~InputConvertBase() {}
void InputConvertBase::sendControlMsg(ControlMsg *msg)
{
if (msg && m_controller) {
m_controller->postControlMsg(msg);
}
}

View file

@ -1,41 +0,0 @@
#ifndef INPUTCONVERTBASE_H
#define INPUTCONVERTBASE_H
#include <QKeyEvent>
#include <QMouseEvent>
#include <QPointer>
#include <QWheelEvent>
#include "controlmsg.h"
class Controller;
class InputConvertBase : public QObject
{
Q_OBJECT
public:
InputConvertBase(Controller *controller);
virtual ~InputConvertBase();
// the frame size may be different from the real device size, so we need the size
// to which the absolute position apply, to scale it accordingly
virtual void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize) = 0;
virtual void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize) = 0;
virtual void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize) = 0;
virtual bool isCurrentCustomKeymap()
{
return false;
}
signals:
void grabCursor(bool grab);
protected:
void sendControlMsg(ControlMsg *msg);
QPointer<Controller> m_controller;
// Qt reports repeated events as a boolean, but Android expects the actual
// number of repetitions. This variable keeps track of the count.
unsigned m_repeat = 0;
};
#endif // INPUTCONVERTBASE_H

View file

@ -1,665 +0,0 @@
#include <QDebug>
#include <QCursor>
#include <QGuiApplication>
#include <QTimer>
#include <QTime>
#include <QRandomGenerator>
#include "inputconvertgame.h"
#define CURSOR_POS_CHECK 50
InputConvertGame::InputConvertGame(Controller *controller) : InputConvertNormal(controller) {
m_ctrlSteerWheel.delayData.timer = new QTimer(this);
m_ctrlSteerWheel.delayData.timer->setSingleShot(true);
connect(m_ctrlSteerWheel.delayData.timer, &QTimer::timeout, this, &InputConvertGame::onSteerWheelTimer);
}
InputConvertGame::~InputConvertGame() {}
void InputConvertGame::mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize)
{
// 处理开关按键
if (m_keyMap.isSwitchOnKeyboard() == false && m_keyMap.getSwitchKey() == static_cast<int>(from->button())) {
if (from->type() != QEvent::MouseButtonPress) {
return;
}
if (!switchGameMap()) {
m_needBackMouseMove = false;
}
return;
}
if (!m_needBackMouseMove && m_gameMap) {
updateSize(frameSize, showSize);
// mouse move
if (m_keyMap.isValidMouseMoveMap()) {
if (processMouseMove(from)) {
return;
}
}
// mouse click
if (processMouseClick(from)) {
return;
}
}
InputConvertNormal::mouseEvent(from, frameSize, showSize);
}
void InputConvertGame::wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (m_gameMap) {
updateSize(frameSize, showSize);
} else {
InputConvertNormal::wheelEvent(from, frameSize, showSize);
}
}
void InputConvertGame::keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize)
{
// 处理开关按键
if (m_keyMap.isSwitchOnKeyboard() && m_keyMap.getSwitchKey() == from->key()) {
if (QEvent::KeyPress != from->type()) {
return;
}
if (!switchGameMap()) {
m_needBackMouseMove = false;
}
return;
}
const KeyMap::KeyMapNode &node = m_keyMap.getKeyMapNodeKey(from->key());
// 处理特殊按键:可以释放出鼠标的按键
if (m_needBackMouseMove && KeyMap::KMT_CLICK == node.type && node.data.click.switchMap) {
updateSize(frameSize, showSize);
// Qt::Key_Tab Qt::Key_M for PUBG mobile
processKeyClick(node.data.click.keyNode.pos, false, node.data.click.switchMap, from);
return;
}
if (m_gameMap) {
updateSize(frameSize, showSize);
if (!from || from->isAutoRepeat()) {
return;
}
// small eyes
if (m_keyMap.isValidMouseMoveMap() && from->key() == m_keyMap.getMouseMoveMap().data.mouseMove.smallEyes.key) {
m_ctrlMouseMove.smallEyes = (QEvent::KeyPress == from->type());
if (QEvent::KeyPress == from->type()) {
m_processMouseMove = false;
int delay = 30;
QTimer::singleShot(delay, this, [this]() { mouseMoveStopTouch(); });
QTimer::singleShot(delay * 2, this, [this]() {
mouseMoveStartTouch(nullptr);
m_processMouseMove = true;
});
stopMouseMoveTimer();
} else {
mouseMoveStopTouch();
mouseMoveStartTouch(nullptr);
}
return;
}
switch (node.type) {
// 处理方向盘
case KeyMap::KMT_STEER_WHEEL:
processSteerWheel(node, from);
return;
// 处理普通按键
case KeyMap::KMT_CLICK:
processKeyClick(node.data.click.keyNode.pos, false, node.data.click.switchMap, from);
return;
case KeyMap::KMT_CLICK_TWICE:
processKeyClick(node.data.clickTwice.keyNode.pos, true, false, from);
return;
case KeyMap::KMT_CLICK_MULTI:
processKeyClickMulti(node.data.clickMulti.keyNode.delayClickNodes, node.data.clickMulti.keyNode.delayClickNodesCount, from);
return;
case KeyMap::KMT_DRAG:
processKeyDrag(node.data.drag.keyNode.pos, node.data.drag.keyNode.extendPos, from);
return;
default:
break;
}
} else {
InputConvertNormal::keyEvent(from, frameSize, showSize);
}
}
bool InputConvertGame::isCurrentCustomKeymap()
{
return m_gameMap;
}
void InputConvertGame::loadKeyMap(const QString &json)
{
m_keyMap.loadKeyMap(json);
}
void InputConvertGame::updateSize(const QSize &frameSize, const QSize &showSize)
{
if (showSize != m_showSize) {
if (m_gameMap && m_keyMap.isValidMouseMoveMap()) {
#ifdef QT_NO_DEBUG
// show size change, resize grab cursor
emit grabCursor(true);
#endif
}
}
m_frameSize = frameSize;
m_showSize = showSize;
}
void InputConvertGame::sendTouchDownEvent(int id, QPointF pos)
{
sendTouchEvent(id, pos, AMOTION_EVENT_ACTION_DOWN);
}
void InputConvertGame::sendTouchMoveEvent(int id, QPointF pos)
{
sendTouchEvent(id, pos, AMOTION_EVENT_ACTION_MOVE);
}
void InputConvertGame::sendTouchUpEvent(int id, QPointF pos)
{
sendTouchEvent(id, pos, AMOTION_EVENT_ACTION_UP);
}
void InputConvertGame::sendTouchEvent(int id, QPointF pos, AndroidMotioneventAction action)
{
if (0 > id || MULTI_TOUCH_MAX_NUM - 1 < id) {
Q_ASSERT(0);
return;
}
//qDebug() << "id:" << id << " pos:" << pos << " action" << action;
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TOUCH);
if (!controlMsg) {
return;
}
QPoint absolutePos = calcFrameAbsolutePos(pos).toPoint();
static QPoint lastAbsolutePos = absolutePos;
if (AMOTION_EVENT_ACTION_MOVE == action && lastAbsolutePos == absolutePos) {
delete controlMsg;
return;
}
lastAbsolutePos = absolutePos;
controlMsg->setInjectTouchMsgData(static_cast<quint64>(id), action,
static_cast<AndroidMotioneventButtons>(0),
QRect(absolutePos, m_frameSize),
AMOTION_EVENT_ACTION_DOWN == action? 1.0f : 0.0f);
sendControlMsg(controlMsg);
}
QPointF InputConvertGame::calcFrameAbsolutePos(QPointF relativePos)
{
QPointF absolutePos;
absolutePos.setX(m_frameSize.width() * relativePos.x());
absolutePos.setY(m_frameSize.height() * relativePos.y());
return absolutePos;
}
QPointF InputConvertGame::calcScreenAbsolutePos(QPointF relativePos)
{
QPointF absolutePos;
absolutePos.setX(m_showSize.width() * relativePos.x());
absolutePos.setY(m_showSize.height() * relativePos.y());
return absolutePos;
}
int InputConvertGame::attachTouchID(int key)
{
for (int i = 0; i < MULTI_TOUCH_MAX_NUM; i++) {
if (0 == m_multiTouchID[i]) {
m_multiTouchID[i] = key;
return i;
}
}
return -1;
}
void InputConvertGame::detachTouchID(int key)
{
for (int i = 0; i < MULTI_TOUCH_MAX_NUM; i++) {
if (key == m_multiTouchID[i]) {
m_multiTouchID[i] = 0;
return;
}
}
}
int InputConvertGame::getTouchID(int key)
{
for (int i = 0; i < MULTI_TOUCH_MAX_NUM; i++) {
if (key == m_multiTouchID[i]) {
return i;
}
}
return -1;
}
// -------- steer wheel event --------
void InputConvertGame::getDelayQueue(const QPointF& start, const QPointF& end,
const double& distanceStep, const double& posStepconst,
quint32 lowestTimer, quint32 highestTimer,
QQueue<QPointF>& queuePos, QQueue<quint32>& queueTimer) {
double x1 = start.x();
double y1 = start.y();
double x2 = end.x();
double y2 = end.y();
double dx=x2-x1;
double dy=y2-y1;
double e=(fabs(dx)>fabs(dy))?fabs(dx):fabs(dy);
e /= distanceStep;
dx/=e;
dy/=e;
QQueue<QPointF> queue;
QQueue<quint32> queue2;
for(int i=1;i<=e;i++) {
QPointF pos(x1+(QRandomGenerator::global()->bounded(posStepconst*2)-posStepconst), y1+(QRandomGenerator::global()->bounded(posStepconst*2)-posStepconst));
queue.enqueue(pos);
queue2.enqueue(QRandomGenerator::global()->bounded(lowestTimer, highestTimer));
x1+=dx;
y1+=dy;
}
queuePos = queue;
queueTimer = queue2;
}
void InputConvertGame::onSteerWheelTimer() {
if(m_ctrlSteerWheel.delayData.queuePos.empty()) {
return;
}
int id = getTouchID(m_ctrlSteerWheel.touchKey);
m_ctrlSteerWheel.delayData.currentPos = m_ctrlSteerWheel.delayData.queuePos.dequeue();
sendTouchMoveEvent(id, m_ctrlSteerWheel.delayData.currentPos);
if(m_ctrlSteerWheel.delayData.queuePos.empty() && m_ctrlSteerWheel.delayData.pressedNum == 0) {
sendTouchUpEvent(id, m_ctrlSteerWheel.delayData.currentPos);
detachTouchID(m_ctrlSteerWheel.touchKey);
return;
}
if(!m_ctrlSteerWheel.delayData.queuePos.empty()) {
m_ctrlSteerWheel.delayData.timer->start(m_ctrlSteerWheel.delayData.queueTimer.dequeue());
}
}
void InputConvertGame::processSteerWheel(const KeyMap::KeyMapNode &node, const QKeyEvent *from)
{
int key = from->key();
bool flag = from->type() == QEvent::KeyPress;
// identify keys
if (key == node.data.steerWheel.up.key) {
m_ctrlSteerWheel.pressedUp = flag;
} else if (key == node.data.steerWheel.right.key) {
m_ctrlSteerWheel.pressedRight = flag;
} else if (key == node.data.steerWheel.down.key) {
m_ctrlSteerWheel.pressedDown = flag;
} else { // left
m_ctrlSteerWheel.pressedLeft = flag;
}
// calc offset and pressed number
QPointF offset(0.0, 0.0);
int pressedNum = 0;
if (m_ctrlSteerWheel.pressedUp) {
++pressedNum;
offset.ry() -= node.data.steerWheel.up.extendOffset;
}
if (m_ctrlSteerWheel.pressedRight) {
++pressedNum;
offset.rx() += node.data.steerWheel.right.extendOffset;
}
if (m_ctrlSteerWheel.pressedDown) {
++pressedNum;
offset.ry() += node.data.steerWheel.down.extendOffset;
}
if (m_ctrlSteerWheel.pressedLeft) {
++pressedNum;
offset.rx() -= node.data.steerWheel.left.extendOffset;
}
m_ctrlSteerWheel.delayData.pressedNum = pressedNum;
// last key release and timer no active, active timer to detouch
if (pressedNum == 0) {
if (m_ctrlSteerWheel.delayData.timer->isActive()) {
m_ctrlSteerWheel.delayData.timer->stop();
m_ctrlSteerWheel.delayData.queueTimer.clear();
m_ctrlSteerWheel.delayData.queuePos.clear();
}
sendTouchUpEvent(getTouchID(m_ctrlSteerWheel.touchKey), m_ctrlSteerWheel.delayData.currentPos);
detachTouchID(m_ctrlSteerWheel.touchKey);
return;
}
// process steer wheel key event
m_ctrlSteerWheel.delayData.timer->stop();
m_ctrlSteerWheel.delayData.queueTimer.clear();
m_ctrlSteerWheel.delayData.queuePos.clear();
// first press, get key and touch down
if (pressedNum == 1 && flag) {
m_ctrlSteerWheel.touchKey = from->key();
int id = attachTouchID(m_ctrlSteerWheel.touchKey);
sendTouchDownEvent(id, node.data.steerWheel.centerPos);
getDelayQueue(node.data.steerWheel.centerPos, node.data.steerWheel.centerPos+offset,
0.01f, 0.002f, 2, 8,
m_ctrlSteerWheel.delayData.queuePos,
m_ctrlSteerWheel.delayData.queueTimer);
} else {
getDelayQueue(m_ctrlSteerWheel.delayData.currentPos, node.data.steerWheel.centerPos+offset,
0.01f, 0.002f, 2, 8,
m_ctrlSteerWheel.delayData.queuePos,
m_ctrlSteerWheel.delayData.queueTimer);
}
m_ctrlSteerWheel.delayData.timer->start();
return;
}
// -------- key event --------
void InputConvertGame::processKeyClick(const QPointF &clickPos, bool clickTwice, bool switchMap, const QKeyEvent *from)
{
if (switchMap && QEvent::KeyRelease == from->type()) {
m_needBackMouseMove = !m_needBackMouseMove;
hideMouseCursor(!m_needBackMouseMove);
}
if (QEvent::KeyPress == from->type()) {
int id = attachTouchID(from->key());
sendTouchDownEvent(id, clickPos);
if (clickTwice) {
sendTouchUpEvent(getTouchID(from->key()), clickPos);
detachTouchID(from->key());
}
} else if (QEvent::KeyRelease == from->type()) {
if (clickTwice) {
int id = attachTouchID(from->key());
sendTouchDownEvent(id, clickPos);
}
sendTouchUpEvent(getTouchID(from->key()), clickPos);
detachTouchID(from->key());
}
}
void InputConvertGame::processKeyClickMulti(const KeyMap::DelayClickNode *nodes, const int count, const QKeyEvent *from)
{
if (QEvent::KeyPress != from->type()) {
return;
}
int key = from->key();
int delay = 0;
QPointF clickPos;
for (int i = 0; i < count; i++) {
delay += nodes[i].delay;
clickPos = nodes[i].pos;
QTimer::singleShot(delay, this, [this, key, clickPos]() {
int id = attachTouchID(key);
sendTouchDownEvent(id, clickPos);
});
// Don't up it too fast
delay += 20;
QTimer::singleShot(delay, this, [this, key, clickPos]() {
int id = getTouchID(key);
sendTouchUpEvent(id, clickPos);
detachTouchID(key);
});
}
}
void InputConvertGame::onDragTimer() {
if(m_dragDelayData.queuePos.empty()) {
return;
}
int id = getTouchID(m_dragDelayData.pressKey);
m_dragDelayData.currentPos = m_dragDelayData.queuePos.dequeue();
sendTouchMoveEvent(id, m_dragDelayData.currentPos);
if(m_dragDelayData.queuePos.empty()) {
delete m_dragDelayData.timer;
m_dragDelayData.timer = nullptr;
sendTouchUpEvent(id, m_dragDelayData.currentPos);
detachTouchID(m_dragDelayData.pressKey);
m_dragDelayData.currentPos = QPointF();
m_dragDelayData.pressKey = 0;
return;
}
if(!m_dragDelayData.queuePos.empty()) {
m_dragDelayData.timer->start(m_dragDelayData.queueTimer.dequeue());
}
}
void InputConvertGame::processKeyDrag(const QPointF &startPos, QPointF endPos, const QKeyEvent *from)
{
if (QEvent::KeyPress == from->type()) {
// stop last
if (m_dragDelayData.timer && m_dragDelayData.timer->isActive()) {
m_dragDelayData.timer->stop();
delete m_dragDelayData.timer;
m_dragDelayData.timer = nullptr;
m_dragDelayData.queuePos.clear();
m_dragDelayData.queueTimer.clear();
sendTouchUpEvent(getTouchID(m_dragDelayData.pressKey), m_dragDelayData.currentPos);
detachTouchID(m_dragDelayData.pressKey);
m_dragDelayData.currentPos = QPointF();
m_dragDelayData.pressKey = 0;
}
// start this
int id = attachTouchID(from->key());
sendTouchDownEvent(id, startPos);
m_dragDelayData.timer = new QTimer(this);
m_dragDelayData.timer->setSingleShot(true);
connect(m_dragDelayData.timer, &QTimer::timeout, this, &InputConvertGame::onDragTimer);
m_dragDelayData.pressKey = from->key();
m_dragDelayData.currentPos = startPos;
m_dragDelayData.queuePos.clear();
m_dragDelayData.queueTimer.clear();
getDelayQueue(startPos, endPos,
0.01f, 0.002f, 0, 2,
m_dragDelayData.queuePos,
m_dragDelayData.queueTimer);
m_dragDelayData.timer->start();
}
}
// -------- mouse event --------
bool InputConvertGame::processMouseClick(const QMouseEvent *from)
{
const KeyMap::KeyMapNode &node = m_keyMap.getKeyMapNodeMouse(from->button());
if (KeyMap::KMT_INVALID == node.type) {
return false;
}
if (QEvent::MouseButtonPress == from->type() || QEvent::MouseButtonDblClick == from->type()) {
int id = attachTouchID(from->button());
sendTouchDownEvent(id, node.data.click.keyNode.pos);
return true;
}
if (QEvent::MouseButtonRelease == from->type()) {
int id = getTouchID(from->button());
sendTouchUpEvent(id, node.data.click.keyNode.pos);
detachTouchID(from->button());
return true;
}
return false;
}
bool InputConvertGame::processMouseMove(const QMouseEvent *from)
{
if (QEvent::MouseMove != from->type()) {
return false;
}
if (checkCursorPos(from)) {
m_ctrlMouseMove.lastPos = QPointF(0.0, 0.0);
return true;
}
if (!m_ctrlMouseMove.lastPos.isNull() && m_processMouseMove) {
QPointF distance_raw{from->localPos() - m_ctrlMouseMove.lastPos};
QPointF speedRatio {m_keyMap.getMouseMoveMap().data.mouseMove.speedRatio};
QPointF distance {distance_raw.x() / speedRatio.x(), distance_raw.y() / speedRatio.y()};
mouseMoveStartTouch(from);
startMouseMoveTimer();
m_ctrlMouseMove.lastConverPos.setX(m_ctrlMouseMove.lastConverPos.x() + distance.x() / m_showSize.width());
m_ctrlMouseMove.lastConverPos.setY(m_ctrlMouseMove.lastConverPos.y() + distance.y() / m_showSize.height());
if (m_ctrlMouseMove.lastConverPos.x() < 0.05 || m_ctrlMouseMove.lastConverPos.x() > 0.95 || m_ctrlMouseMove.lastConverPos.y() < 0.05
|| m_ctrlMouseMove.lastConverPos.y() > 0.95) {
if (m_ctrlMouseMove.smallEyes) {
m_processMouseMove = false;
int delay = 30;
QTimer::singleShot(delay, this, [this]() { mouseMoveStopTouch(); });
QTimer::singleShot(delay * 2, this, [this]() {
mouseMoveStartTouch(nullptr);
m_processMouseMove = true;
});
} else {
mouseMoveStopTouch();
mouseMoveStartTouch(from);
}
}
sendTouchMoveEvent(getTouchID(Qt::ExtraButton24), m_ctrlMouseMove.lastConverPos);
}
m_ctrlMouseMove.lastPos = from->localPos();
return true;
}
bool InputConvertGame::checkCursorPos(const QMouseEvent *from)
{
bool moveCursor = false;
QPoint pos = from->pos();
if (pos.x() < CURSOR_POS_CHECK) {
pos.setX(m_showSize.width() - CURSOR_POS_CHECK);
moveCursor = true;
} else if (pos.x() > m_showSize.width() - CURSOR_POS_CHECK) {
pos.setX(CURSOR_POS_CHECK);
moveCursor = true;
} else if (pos.y() < CURSOR_POS_CHECK) {
pos.setY(m_showSize.height() - CURSOR_POS_CHECK);
moveCursor = true;
} else if (pos.y() > m_showSize.height() - CURSOR_POS_CHECK) {
pos.setY(CURSOR_POS_CHECK);
moveCursor = true;
}
if (moveCursor) {
moveCursorTo(from, pos);
}
return moveCursor;
}
void InputConvertGame::moveCursorTo(const QMouseEvent *from, const QPoint &localPosPixel)
{
QPoint posOffset = from->pos() - localPosPixel;
QPoint globalPos = from->globalPos();
globalPos -= posOffset;
//qDebug()<<"move cursor to "<<globalPos<<" offset "<<posOffset;
QCursor::setPos(globalPos);
}
void InputConvertGame::mouseMoveStartTouch(const QMouseEvent *from)
{
Q_UNUSED(from)
if (!m_ctrlMouseMove.touching) {
QPointF mouseMoveStartPos
= m_ctrlMouseMove.smallEyes ? m_keyMap.getMouseMoveMap().data.mouseMove.smallEyes.pos : m_keyMap.getMouseMoveMap().data.mouseMove.startPos;
int id = attachTouchID(Qt::ExtraButton24);
sendTouchDownEvent(id, mouseMoveStartPos);
m_ctrlMouseMove.lastConverPos = mouseMoveStartPos;
m_ctrlMouseMove.touching = true;
}
}
void InputConvertGame::mouseMoveStopTouch()
{
if (m_ctrlMouseMove.touching) {
sendTouchUpEvent(getTouchID(Qt::ExtraButton24), m_ctrlMouseMove.lastConverPos);
detachTouchID(Qt::ExtraButton24);
m_ctrlMouseMove.touching = false;
}
}
void InputConvertGame::startMouseMoveTimer()
{
stopMouseMoveTimer();
m_ctrlMouseMove.timer = startTimer(500);
}
void InputConvertGame::stopMouseMoveTimer()
{
if (0 != m_ctrlMouseMove.timer) {
killTimer(m_ctrlMouseMove.timer);
m_ctrlMouseMove.timer = 0;
}
}
bool InputConvertGame::switchGameMap()
{
m_gameMap = !m_gameMap;
qInfo() << QString("current keymap mode: %1").arg(m_gameMap ? "custom" : "normal");
if (!m_keyMap.isValidMouseMoveMap()) {
return m_gameMap;
}
#ifdef QT_NO_DEBUG
// grab cursor and set cursor only mouse move map
emit grabCursor(m_gameMap);
#endif
hideMouseCursor(m_gameMap);
if (!m_gameMap) {
stopMouseMoveTimer();
mouseMoveStopTouch();
}
return m_gameMap;
}
void InputConvertGame::hideMouseCursor(bool hide)
{
if (hide) {
#ifdef QT_NO_DEBUG
QGuiApplication::setOverrideCursor(QCursor(Qt::BlankCursor));
#else
QGuiApplication::setOverrideCursor(QCursor(Qt::CrossCursor));
#endif
} else {
QGuiApplication::restoreOverrideCursor();
}
}
void InputConvertGame::timerEvent(QTimerEvent *event)
{
if (m_ctrlMouseMove.timer == event->timerId()) {
stopMouseMoveTimer();
mouseMoveStopTouch();
}
}

View file

@ -1,126 +0,0 @@
#ifndef INPUTCONVERTGAME_H
#define INPUTCONVERTGAME_H
#include <QPointF>
#include <QQueue>
#include "inputconvertnormal.h"
#include "keymap.h"
#define MULTI_TOUCH_MAX_NUM 10
class InputConvertGame : public InputConvertNormal
{
Q_OBJECT
public:
InputConvertGame(Controller *controller);
virtual ~InputConvertGame();
virtual void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize);
virtual void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize);
virtual void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize);
virtual bool isCurrentCustomKeymap();
void loadKeyMap(const QString &json);
protected:
void updateSize(const QSize &frameSize, const QSize &showSize);
void sendTouchDownEvent(int id, QPointF pos);
void sendTouchMoveEvent(int id, QPointF pos);
void sendTouchUpEvent(int id, QPointF pos);
void sendTouchEvent(int id, QPointF pos, AndroidMotioneventAction action);
QPointF calcFrameAbsolutePos(QPointF relativePos);
QPointF calcScreenAbsolutePos(QPointF relativePos);
// multi touch id
int attachTouchID(int key);
void detachTouchID(int key);
int getTouchID(int key);
// steer wheel
void processSteerWheel(const KeyMap::KeyMapNode &node, const QKeyEvent *from);
// click
void processKeyClick(const QPointF &clickPos, bool clickTwice, bool switchMap, const QKeyEvent *from);
// click mutil
void processKeyClickMulti(const KeyMap::DelayClickNode *nodes, const int count, const QKeyEvent *from);
// drag
void processKeyDrag(const QPointF &startPos, QPointF endPos, const QKeyEvent *from);
// mouse
bool processMouseClick(const QMouseEvent *from);
bool processMouseMove(const QMouseEvent *from);
void moveCursorTo(const QMouseEvent *from, const QPoint &localPosPixel);
void mouseMoveStartTouch(const QMouseEvent *from);
void mouseMoveStopTouch();
void startMouseMoveTimer();
void stopMouseMoveTimer();
bool switchGameMap();
bool checkCursorPos(const QMouseEvent *from);
void hideMouseCursor(bool hide);
void getDelayQueue(const QPointF& start, const QPointF& end,
const double& distanceStep, const double& posStepconst,
quint32 lowestTimer, quint32 highestTimer,
QQueue<QPointF>& queuePos, QQueue<quint32>& queueTimer);
protected:
void timerEvent(QTimerEvent *event);
private slots:
void onSteerWheelTimer();
void onDragTimer();
private:
QSize m_frameSize;
QSize m_showSize;
bool m_gameMap = false;
bool m_needBackMouseMove = false;
int m_multiTouchID[MULTI_TOUCH_MAX_NUM] = { 0 };
KeyMap m_keyMap;
bool m_processMouseMove = true;
// steer wheel
struct
{
// the first key pressed
int touchKey = Qt::Key_unknown;
bool pressedUp = false;
bool pressedDown = false;
bool pressedLeft = false;
bool pressedRight = false;
// for delay
struct {
QPointF currentPos;
QTimer* timer = nullptr;
QQueue<QPointF> queuePos;
QQueue<quint32> queueTimer;
int pressedNum = 0;
} delayData;
} m_ctrlSteerWheel;
// mouse move
struct
{
QPointF lastConverPos;
QPointF lastPos = { 0.0, 0.0 };
bool touching = false;
int timer = 0;
bool smallEyes = false;
} m_ctrlMouseMove;
// for drag delay
struct {
QPointF currentPos;
QTimer* timer = nullptr;
QQueue<QPointF> queuePos;
QQueue<quint32> queueTimer;
int pressKey = 0;
} m_dragDelayData;
};
#endif // INPUTCONVERTGAME_H

View file

@ -1,423 +0,0 @@
#include <cmath>
#include <QDebug>
#include "inputconvertnormal.h"
#include "controller.h"
InputConvertNormal::InputConvertNormal(Controller *controller) : InputConvertBase(controller) {}
InputConvertNormal::~InputConvertNormal() {}
void InputConvertNormal::mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (!from) {
return;
}
// action
AndroidMotioneventAction action;
switch (from->type()) {
case QEvent::MouseButtonPress:
action = AMOTION_EVENT_ACTION_DOWN;
break;
case QEvent::MouseButtonRelease:
action = AMOTION_EVENT_ACTION_UP;
break;
case QEvent::MouseMove:
// only support left button drag
if (!(from->buttons() & Qt::LeftButton)) {
return;
}
action = AMOTION_EVENT_ACTION_MOVE;
break;
default:
return;
}
// pos
QPointF pos = from->localPos();
// convert pos
pos.setX(pos.x() * frameSize.width() / showSize.width());
pos.setY(pos.y() * frameSize.height() / showSize.height());
// set data
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_TOUCH);
if (!controlMsg) {
return;
}
controlMsg->setInjectTouchMsgData(
static_cast<quint64>(POINTER_ID_MOUSE), action,
convertMouseButtons(from->buttons()),
QRect(pos.toPoint(), frameSize),
AMOTION_EVENT_ACTION_DOWN == action ? 1.0f : 0.0f);
sendControlMsg(controlMsg);
}
void InputConvertNormal::wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (!from || from->angleDelta().isNull()) {
return;
}
// delta
qint32 hScroll = from->angleDelta().x() == 0 ? 0 : from->angleDelta().x() / abs(from->angleDelta().x()) * 2;
qint32 vScroll = from->angleDelta().y() == 0 ? 0 : from->angleDelta().y() / abs(from->angleDelta().y()) * 2;
// pos
#if QT_VERSION >= QT_VERSION_CHECK(5, 15, 0)
QPointF pos = from->position();
#else
QPointF pos = from->posF();
#endif
// convert pos
pos.setX(pos.x() * frameSize.width() / showSize.width());
pos.setY(pos.y() * frameSize.height() / showSize.height());
// set data
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_SCROLL);
if (!controlMsg) {
return;
}
controlMsg->setInjectScrollMsgData(QRect(pos.toPoint(), frameSize), hScroll, vScroll);
sendControlMsg(controlMsg);
}
void InputConvertNormal::keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize)
{
Q_UNUSED(frameSize)
Q_UNUSED(showSize)
if (!from) {
return;
}
bool repeat = from->isAutoRepeat();
// action
AndroidKeyeventAction action;
switch (from->type()) {
case QEvent::KeyPress:
action = AKEY_EVENT_ACTION_DOWN;
break;
case QEvent::KeyRelease:
action = AKEY_EVENT_ACTION_UP;
break;
default:
return;
}
// key code
AndroidKeycode keyCode = convertKeyCode(from->key(), from->modifiers());
if (AKEYCODE_UNKNOWN == keyCode) {
return;
}
// set data
ControlMsg *controlMsg = new ControlMsg(ControlMsg::CMT_INJECT_KEYCODE);
if (!controlMsg) {
return;
}
if (repeat) {
m_repeat++;
} else {
m_repeat = 0;
}
controlMsg->setInjectKeycodeMsgData(action, keyCode, m_repeat, convertMetastate(from->modifiers()));
sendControlMsg(controlMsg);
}
AndroidMotioneventButtons InputConvertNormal::convertMouseButtons(Qt::MouseButtons buttonState)
{
quint32 buttons = 0;
if (buttonState & Qt::LeftButton) {
buttons |= AMOTION_EVENT_BUTTON_PRIMARY;
}
if (buttonState & Qt::RightButton) {
buttons |= AMOTION_EVENT_BUTTON_SECONDARY;
}
#if (QT_VERSION >= QT_VERSION_CHECK(5, 15, 0))
if (buttonState & Qt::MiddleButton) {
#else
if (buttonState & Qt::MidButton) {
#endif
buttons |= AMOTION_EVENT_BUTTON_TERTIARY;
}
if (buttonState & Qt::XButton1) {
buttons |= AMOTION_EVENT_BUTTON_BACK;
}
if (buttonState & Qt::XButton2) {
buttons |= AMOTION_EVENT_BUTTON_FORWARD;
}
return static_cast<AndroidMotioneventButtons>(buttons);
}
AndroidKeycode InputConvertNormal::convertKeyCode(int key, Qt::KeyboardModifiers modifiers)
{
AndroidKeycode keyCode = AKEYCODE_UNKNOWN;
// functional keys
switch (key) {
case Qt::Key_Return:
keyCode = AKEYCODE_ENTER;
break;
case Qt::Key_Enter:
keyCode = AKEYCODE_NUMPAD_ENTER;
break;
case Qt::Key_Escape:
keyCode = AKEYCODE_ESCAPE;
break;
case Qt::Key_Backspace:
keyCode = AKEYCODE_DEL;
break;
case Qt::Key_Delete:
keyCode = AKEYCODE_FORWARD_DEL;
break;
case Qt::Key_Tab:
keyCode = AKEYCODE_TAB;
break;
case Qt::Key_Home:
keyCode = AKEYCODE_MOVE_HOME;
break;
case Qt::Key_End:
keyCode = AKEYCODE_MOVE_END;
break;
case Qt::Key_PageUp:
keyCode = AKEYCODE_PAGE_UP;
break;
case Qt::Key_PageDown:
keyCode = AKEYCODE_PAGE_DOWN;
break;
case Qt::Key_Left:
keyCode = AKEYCODE_DPAD_LEFT;
break;
case Qt::Key_Right:
keyCode = AKEYCODE_DPAD_RIGHT;
break;
case Qt::Key_Up:
keyCode = AKEYCODE_DPAD_UP;
break;
case Qt::Key_Down:
keyCode = AKEYCODE_DPAD_DOWN;
break;
}
if (AKEYCODE_UNKNOWN != keyCode) {
return keyCode;
}
// if ALT and META are pressed, dont handle letters and space
if (modifiers & (Qt::AltModifier | Qt::MetaModifier)) {
return keyCode;
}
// character keys
switch (key) {
case Qt::Key_A:
keyCode = AKEYCODE_A;
break;
case Qt::Key_B:
keyCode = AKEYCODE_B;
break;
case Qt::Key_C:
keyCode = AKEYCODE_C;
break;
case Qt::Key_D:
keyCode = AKEYCODE_D;
break;
case Qt::Key_E:
keyCode = AKEYCODE_E;
break;
case Qt::Key_F:
keyCode = AKEYCODE_F;
break;
case Qt::Key_G:
keyCode = AKEYCODE_G;
break;
case Qt::Key_H:
keyCode = AKEYCODE_H;
break;
case Qt::Key_I:
keyCode = AKEYCODE_I;
break;
case Qt::Key_J:
keyCode = AKEYCODE_J;
break;
case Qt::Key_K:
keyCode = AKEYCODE_K;
break;
case Qt::Key_L:
keyCode = AKEYCODE_L;
break;
case Qt::Key_M:
keyCode = AKEYCODE_M;
break;
case Qt::Key_N:
keyCode = AKEYCODE_N;
break;
case Qt::Key_O:
keyCode = AKEYCODE_O;
break;
case Qt::Key_P:
keyCode = AKEYCODE_P;
break;
case Qt::Key_Q:
keyCode = AKEYCODE_Q;
break;
case Qt::Key_R:
keyCode = AKEYCODE_R;
break;
case Qt::Key_S:
keyCode = AKEYCODE_S;
break;
case Qt::Key_T:
keyCode = AKEYCODE_T;
break;
case Qt::Key_U:
keyCode = AKEYCODE_U;
break;
case Qt::Key_V:
keyCode = AKEYCODE_V;
break;
case Qt::Key_W:
keyCode = AKEYCODE_W;
break;
case Qt::Key_X:
keyCode = AKEYCODE_X;
break;
case Qt::Key_Y:
keyCode = AKEYCODE_Y;
break;
case Qt::Key_Z:
keyCode = AKEYCODE_Z;
break;
case Qt::Key_0:
keyCode = AKEYCODE_0;
break;
case Qt::Key_1:
case Qt::Key_Exclam: // !
keyCode = AKEYCODE_1;
break;
case Qt::Key_2:
keyCode = AKEYCODE_2;
break;
case Qt::Key_3:
keyCode = AKEYCODE_3;
break;
case Qt::Key_4:
case Qt::Key_Dollar: //$
keyCode = AKEYCODE_4;
break;
case Qt::Key_5:
case Qt::Key_Percent: // %
keyCode = AKEYCODE_5;
break;
case Qt::Key_6:
case Qt::Key_AsciiCircum: //^
keyCode = AKEYCODE_6;
break;
case Qt::Key_7:
case Qt::Key_Ampersand: //&
keyCode = AKEYCODE_7;
break;
case Qt::Key_8:
keyCode = AKEYCODE_8;
break;
case Qt::Key_9:
keyCode = AKEYCODE_9;
break;
case Qt::Key_Space:
keyCode = AKEYCODE_SPACE;
break;
case Qt::Key_Comma: //,
case Qt::Key_Less: //<
keyCode = AKEYCODE_COMMA;
break;
case Qt::Key_Period: //.
case Qt::Key_Greater: //>
keyCode = AKEYCODE_PERIOD;
break;
case Qt::Key_Minus: //-
case Qt::Key_Underscore: //_
keyCode = AKEYCODE_MINUS;
break;
case Qt::Key_Equal: //=
keyCode = AKEYCODE_EQUALS;
break;
case Qt::Key_BracketLeft: //[
case Qt::Key_BraceLeft: //{
keyCode = AKEYCODE_LEFT_BRACKET;
break;
case Qt::Key_BracketRight: //]
case Qt::Key_BraceRight: //}
keyCode = AKEYCODE_RIGHT_BRACKET;
break;
case Qt::Key_Backslash: // \ ????
case Qt::Key_Bar: //|
keyCode = AKEYCODE_BACKSLASH;
break;
case Qt::Key_Semicolon: //;
case Qt::Key_Colon: //:
keyCode = AKEYCODE_SEMICOLON;
break;
case Qt::Key_Apostrophe: //'
case Qt::Key_QuoteDbl: //"
keyCode = AKEYCODE_APOSTROPHE;
break;
case Qt::Key_Slash: // /
case Qt::Key_Question: //?
keyCode = AKEYCODE_SLASH;
break;
case Qt::Key_At: //@
keyCode = AKEYCODE_AT;
break;
case Qt::Key_Plus: //+
keyCode = AKEYCODE_PLUS;
break;
case Qt::Key_QuoteLeft: //`
case Qt::Key_AsciiTilde: //~
keyCode = AKEYCODE_GRAVE;
break;
case Qt::Key_NumberSign: //#
keyCode = AKEYCODE_POUND;
break;
case Qt::Key_ParenLeft: //(
keyCode = AKEYCODE_NUMPAD_LEFT_PAREN;
break;
case Qt::Key_ParenRight: //)
keyCode = AKEYCODE_NUMPAD_RIGHT_PAREN;
break;
case Qt::Key_Asterisk: //*
keyCode = AKEYCODE_STAR;
break;
}
return keyCode;
}
AndroidMetastate InputConvertNormal::convertMetastate(Qt::KeyboardModifiers modifiers)
{
int metastate = AMETA_NONE;
if (modifiers & Qt::ShiftModifier) {
metastate |= AMETA_SHIFT_ON;
}
if (modifiers & Qt::ControlModifier) {
metastate |= AMETA_CTRL_ON;
}
if (modifiers & Qt::AltModifier) {
metastate |= AMETA_ALT_ON;
}
if (modifiers & Qt::MetaModifier) {
metastate |= AMETA_META_ON;
}
/*
if (mod & KMOD_NUM) {
metastate |= AMETA_NUM_LOCK_ON;
}
if (mod & KMOD_CAPS) {
metastate |= AMETA_CAPS_LOCK_ON;
}
if (mod & KMOD_MODE) { // Alt Gr
// no mapping?
}
*/
return static_cast<AndroidMetastate>(metastate);
}

View file

@ -1,23 +0,0 @@
#ifndef INPUTCONVERT_H
#define INPUTCONVERT_H
#include "inputconvertbase.h"
class InputConvertNormal : public InputConvertBase
{
Q_OBJECT
public:
InputConvertNormal(Controller *controller);
virtual ~InputConvertNormal();
virtual void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize);
virtual void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize);
virtual void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize);
private:
AndroidMotioneventButtons convertMouseButtons(Qt::MouseButtons buttonState);
AndroidKeycode convertKeyCode(int key, Qt::KeyboardModifiers modifiers);
AndroidMetastate convertMetastate(Qt::KeyboardModifiers modifiers);
};
#endif // INPUTCONVERT_H

View file

@ -1,519 +0,0 @@
#include <QCoreApplication>
#include <QDebug>
#include <QFile>
#include <QFileInfo>
#include <QJsonArray>
#include <QJsonDocument>
#include <QMetaEnum>
#include "keymap.h"
KeyMap::KeyMap(QObject *parent) : QObject(parent) {}
KeyMap::~KeyMap() {}
void KeyMap::loadKeyMap(const QString &json)
{
QString errorString;
QJsonParseError jsonError;
QJsonDocument jsonDoc;
QJsonObject rootObj;
QPair<ActionType, int> switchKey;
jsonDoc = QJsonDocument::fromJson(json.toUtf8(), &jsonError);
if (jsonError.error != QJsonParseError::NoError) {
errorString = QString("json error: %1").arg(jsonError.errorString());
goto parseError;
}
// switchKey
rootObj = jsonDoc.object();
if (!checkItemString(rootObj, "switchKey")) {
errorString = QString("json error: no find switchKey");
goto parseError;
}
switchKey = getItemKey(rootObj, "switchKey");
if (switchKey.first == AT_INVALID) {
errorString = QString("json error: switchKey invalid");
goto parseError;
}
m_switchKey.type = switchKey.first;
m_switchKey.key = switchKey.second;
// mouseMoveMap
if (checkItemObject(rootObj, "mouseMoveMap")) {
QJsonObject mouseMoveMap = getItemObject(rootObj, "mouseMoveMap");
KeyMapNode keyMapNode;
keyMapNode.type = KMT_MOUSE_MOVE;
bool have_speedRatio = false;
// General speedRatio (for backwards compatibility)
if (checkItemDouble(mouseMoveMap, "speedRatio")) {
float ratio = static_cast<float>(getItemDouble(mouseMoveMap, "speedRatio"));
keyMapNode.data.mouseMove.speedRatio.setX(ratio);
keyMapNode.data.mouseMove.speedRatio.setY(ratio / 2.25f); // Phone screens are often FHD+
have_speedRatio = true;
}
// Individual X Ratio
if (checkItemDouble(mouseMoveMap, "speedRatioX")) {
keyMapNode.data.mouseMove.speedRatio.setX(static_cast<float>(getItemDouble(mouseMoveMap, "speedRatioX")));
have_speedRatio = true;
}
// Individual Y Ratio
if (checkItemDouble(mouseMoveMap, "speedRatioY")) {
keyMapNode.data.mouseMove.speedRatio.setY(static_cast<float>(getItemDouble(mouseMoveMap, "speedRatioY")));
have_speedRatio = true;
}
if (!have_speedRatio) {
errorString = QString("json error: speedRatio setting is missing in mouseMoveMap!");
goto parseError;
}
// Sanity check: No ratio must be lower than 0.001
if ( ( keyMapNode.data.mouseMove.speedRatio.x() < 0.001f ) || ( keyMapNode.data.mouseMove.speedRatio.x() < 0.001f ) ) {
errorString = QString("json error: Minimum speedRatio is 0.001");
goto parseError;
}
if (!checkItemObject(mouseMoveMap, "startPos")) {
errorString = QString("json error: mouseMoveMap on find startPos");
goto parseError;
}
QJsonObject startPos = mouseMoveMap.value("startPos").toObject();
if (checkItemDouble(startPos, "x")) {
keyMapNode.data.mouseMove.startPos.setX(getItemDouble(startPos, "x"));
}
if (checkItemDouble(startPos, "y")) {
keyMapNode.data.mouseMove.startPos.setY(getItemDouble(startPos, "y"));
}
// small eyes
if (checkItemObject(mouseMoveMap, "smallEyes")) {
QJsonObject smallEyes = mouseMoveMap.value("smallEyes").toObject();
if (!smallEyes.contains("type") || !smallEyes.value("type").isString()) {
errorString = QString("json error: smallEyes no find node type");
goto parseError;
}
// type just support KMT_CLICK
KeyMap::KeyMapType type = getItemKeyMapType(smallEyes, "type");
if (KeyMap::KMT_CLICK != type) {
errorString = QString("json error: smallEyes just support KMT_CLICK");
goto parseError;
}
// safe check
if (!checkForClick(smallEyes)) {
errorString = QString("json error: smallEyes node format error");
goto parseError;
}
QPair<ActionType, int> key = getItemKey(smallEyes, "key");
if (key.first == AT_INVALID) {
errorString = QString("json error: keyMapNodes node invalid key: %1").arg(smallEyes.value("key").toString());
goto parseError;
}
keyMapNode.data.mouseMove.smallEyes.type = key.first;
keyMapNode.data.mouseMove.smallEyes.key = key.second;
keyMapNode.data.mouseMove.smallEyes.pos = getItemPos(smallEyes, "pos");
}
m_idxMouseMove = m_keyMapNodes.size();
m_keyMapNodes.push_back(keyMapNode);
}
// keyMapNodes
if (rootObj.contains("keyMapNodes") && rootObj.value("keyMapNodes").isArray()) {
QJsonArray keyMapNodes = rootObj.value("keyMapNodes").toArray();
QJsonObject node;
int size = keyMapNodes.size();
for (int i = 0; i < size; i++) {
if (!keyMapNodes.at(i).isObject()) {
errorString = QString("json error: keyMapNodes node must be json object");
goto parseError;
}
node = keyMapNodes.at(i).toObject();
if (!node.contains("type") || !node.value("type").isString()) {
errorString = QString("json error: keyMapNodes no find node type");
goto parseError;
}
KeyMap::KeyMapType type = getItemKeyMapType(node, "type");
switch (type) {
case KeyMap::KMT_CLICK: {
// safe check
if (!checkForClick(node)) {
qWarning() << "json error: keyMapNodes node format error";
break;
}
QPair<ActionType, int> key = getItemKey(node, "key");
if (key.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("key").toString();
break;
}
KeyMapNode keyMapNode;
keyMapNode.type = type;
keyMapNode.data.click.keyNode.type = key.first;
keyMapNode.data.click.keyNode.key = key.second;
keyMapNode.data.click.keyNode.pos = getItemPos(node, "pos");
keyMapNode.data.click.switchMap = getItemBool(node, "switchMap");
m_keyMapNodes.push_back(keyMapNode);
} break;
case KeyMap::KMT_CLICK_TWICE: {
// safe check
if (!checkForClickTwice(node)) {
qWarning() << "json error: keyMapNodes node format error";
break;
}
QPair<ActionType, int> key = getItemKey(node, "key");
if (key.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("key").toString();
break;
}
KeyMapNode keyMapNode;
keyMapNode.type = type;
keyMapNode.data.click.keyNode.type = key.first;
keyMapNode.data.click.keyNode.key = key.second;
keyMapNode.data.click.keyNode.pos = getItemPos(node, "pos");
keyMapNode.data.click.switchMap = getItemBool(node, "switchMap");
m_keyMapNodes.push_back(keyMapNode);
} break;
case KeyMap::KMT_CLICK_MULTI: {
// safe check
if (!checkForClickMulti(node)) {
qWarning() << "json error: keyMapNodes node format error";
break;
}
QPair<ActionType, int> key = getItemKey(node, "key");
if (key.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("key").toString();
break;
}
KeyMapNode keyMapNode;
keyMapNode.type = type;
keyMapNode.data.clickMulti.keyNode.type = key.first;
keyMapNode.data.clickMulti.keyNode.key = key.second;
QJsonArray clickNodes = node.value("clickNodes").toArray();
QJsonObject clickNode;
keyMapNode.data.clickMulti.keyNode.delayClickNodesCount = 0;
for (int i = 0; i < clickNodes.size(); i++) {
if (i >= MAX_DELAY_CLICK_NODES) {
qInfo() << "clickNodes too much, up to " << MAX_DELAY_CLICK_NODES;
break;
}
clickNode = clickNodes.at(i).toObject();
DelayClickNode delayClickNode;
delayClickNode.delay = getItemDouble(clickNode, "delay");
delayClickNode.pos = getItemPos(clickNode, "pos");
keyMapNode.data.clickMulti.keyNode.delayClickNodes[i] = delayClickNode;
keyMapNode.data.clickMulti.keyNode.delayClickNodesCount++;
}
m_keyMapNodes.push_back(keyMapNode);
} break;
case KeyMap::KMT_STEER_WHEEL: {
// safe check
if (!checkForSteerWhell(node)) {
qWarning() << "json error: keyMapNodes node format error";
break;
}
QPair<ActionType, int> leftKey = getItemKey(node, "leftKey");
QPair<ActionType, int> rightKey = getItemKey(node, "rightKey");
QPair<ActionType, int> upKey = getItemKey(node, "upKey");
QPair<ActionType, int> downKey = getItemKey(node, "downKey");
if (leftKey.first == AT_INVALID || rightKey.first == AT_INVALID || upKey.first == AT_INVALID || downKey.first == AT_INVALID) {
if (leftKey.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("leftKey").toString();
}
if (rightKey.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("rightKey").toString();
}
if (upKey.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("upKey").toString();
}
if (downKey.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("downKey").toString();
}
break;
}
KeyMapNode keyMapNode;
keyMapNode.type = type;
keyMapNode.data.steerWheel.left = { leftKey.first, leftKey.second, QPointF(0, 0), QPointF(0, 0), getItemDouble(node, "leftOffset") };
keyMapNode.data.steerWheel.right = { rightKey.first, rightKey.second, QPointF(0, 0), QPointF(0, 0), getItemDouble(node, "rightOffset") };
keyMapNode.data.steerWheel.up = { upKey.first, upKey.second, QPointF(0, 0), QPointF(0, 0), getItemDouble(node, "upOffset") };
keyMapNode.data.steerWheel.down = { downKey.first, downKey.second, QPointF(0, 0), QPointF(0, 0), getItemDouble(node, "downOffset") };
keyMapNode.data.steerWheel.centerPos = getItemPos(node, "centerPos");
m_idxSteerWheel = m_keyMapNodes.size();
m_keyMapNodes.push_back(keyMapNode);
} break;
case KeyMap::KMT_DRAG: {
// safe check
if (!checkForDrag(node)) {
qWarning() << "json error: keyMapNodes node format error";
break;
}
QPair<ActionType, int> key = getItemKey(node, "key");
if (key.first == AT_INVALID) {
qWarning() << "json error: keyMapNodes node invalid key: " << node.value("key").toString();
break;
}
KeyMapNode keyMapNode;
keyMapNode.type = type;
keyMapNode.data.drag.keyNode.type = key.first;
keyMapNode.data.drag.keyNode.key = key.second;
keyMapNode.data.drag.keyNode.pos = getItemPos(node, "startPos");
keyMapNode.data.drag.keyNode.extendPos = getItemPos(node, "endPos");
m_keyMapNodes.push_back(keyMapNode);
break;
}
default:
qWarning() << "json error: keyMapNodes invalid node type:" << node.value("type").toString();
break;
}
}
}
// this must be called after m_keyMapNodes is stable
makeReverseMap();
qInfo() << "Script updated, current keymap mode:normal, Press ~ key to switch keymap mode";
parseError:
if (!errorString.isEmpty()) {
qWarning() << errorString;
}
return;
}
const KeyMap::KeyMapNode &KeyMap::getKeyMapNode(int key)
{
auto p = m_rmapKey.value(key, &m_invalidNode);
if (p == &m_invalidNode) {
return *m_rmapMouse.value(key, &m_invalidNode);
}
return *p;
}
const KeyMap::KeyMapNode &KeyMap::getKeyMapNodeKey(int key)
{
return *m_rmapKey.value(key, &m_invalidNode);
}
const KeyMap::KeyMapNode &KeyMap::getKeyMapNodeMouse(int key)
{
return *m_rmapMouse.value(key, &m_invalidNode);
}
bool KeyMap::isSwitchOnKeyboard()
{
return m_switchKey.type == AT_KEY;
}
int KeyMap::getSwitchKey()
{
return m_switchKey.key;
}
const KeyMap::KeyMapNode &KeyMap::getMouseMoveMap()
{
return m_keyMapNodes[m_idxMouseMove];
}
bool KeyMap::isValidMouseMoveMap()
{
return m_idxMouseMove != -1;
}
bool KeyMap::isValidSteerWheelMap()
{
return m_idxSteerWheel != -1;
}
void KeyMap::makeReverseMap()
{
m_rmapKey.clear();
m_rmapMouse.clear();
for (int i = 0; i < m_keyMapNodes.size(); ++i) {
auto &node = m_keyMapNodes[i];
switch (node.type) {
case KMT_CLICK: {
QMultiHash<int, KeyMapNode *> &m = node.data.click.keyNode.type == AT_KEY ? m_rmapKey : m_rmapMouse;
m.insert(node.data.click.keyNode.key, &node);
} break;
case KMT_CLICK_TWICE: {
QMultiHash<int, KeyMapNode *> &m = node.data.clickTwice.keyNode.type == AT_KEY ? m_rmapKey : m_rmapMouse;
m.insert(node.data.clickTwice.keyNode.key, &node);
} break;
case KMT_CLICK_MULTI: {
QMultiHash<int, KeyMapNode *> &m = node.data.clickMulti.keyNode.type == AT_KEY ? m_rmapKey : m_rmapMouse;
m.insert(node.data.clickMulti.keyNode.key, &node);
} break;
case KMT_STEER_WHEEL: {
QMultiHash<int, KeyMapNode *> &ml = node.data.steerWheel.left.type == AT_KEY ? m_rmapKey : m_rmapMouse;
ml.insert(node.data.steerWheel.left.key, &node);
QMultiHash<int, KeyMapNode *> &mr = node.data.steerWheel.right.type == AT_KEY ? m_rmapKey : m_rmapMouse;
mr.insert(node.data.steerWheel.right.key, &node);
QMultiHash<int, KeyMapNode *> &mu = node.data.steerWheel.up.type == AT_KEY ? m_rmapKey : m_rmapMouse;
mu.insert(node.data.steerWheel.up.key, &node);
QMultiHash<int, KeyMapNode *> &md = node.data.steerWheel.down.type == AT_KEY ? m_rmapKey : m_rmapMouse;
md.insert(node.data.steerWheel.down.key, &node);
} break;
case KMT_DRAG: {
QMultiHash<int, KeyMapNode *> &m = node.data.drag.keyNode.type == AT_KEY ? m_rmapKey : m_rmapMouse;
m.insert(node.data.drag.keyNode.key, &node);
} break;
default:
break;
}
}
}
QString KeyMap::getItemString(const QJsonObject &node, const QString &name)
{
return node.value(name).toString();
}
double KeyMap::getItemDouble(const QJsonObject &node, const QString &name)
{
return node.value(name).toDouble();
}
bool KeyMap::getItemBool(const QJsonObject &node, const QString &name)
{
return node.value(name).toBool(false);
}
QJsonObject KeyMap::getItemObject(const QJsonObject &node, const QString &name)
{
return node.value(name).toObject();
}
QPointF KeyMap::getItemPos(const QJsonObject &node, const QString &name)
{
QJsonObject pos = node.value(name).toObject();
return QPointF(pos.value("x").toDouble(), pos.value("y").toDouble());
}
QPair<KeyMap::ActionType, int> KeyMap::getItemKey(const QJsonObject &node, const QString &name)
{
QString value = getItemString(node, name);
int key = m_metaEnumKey.keyToValue(value.toStdString().c_str());
int btn = m_metaEnumMouseButtons.keyToValue(value.toStdString().c_str());
if (key == -1 && btn == -1) {
return { AT_INVALID, -1 };
} else if (key != -1) {
return { AT_KEY, key };
} else {
return { AT_MOUSE, btn };
}
}
KeyMap::KeyMapType KeyMap::getItemKeyMapType(const QJsonObject &node, const QString &name)
{
QString value = getItemString(node, name);
return static_cast<KeyMap::KeyMapType>(m_metaEnumKeyMapType.keyToValue(value.toStdString().c_str()));
}
bool KeyMap::checkItemString(const QJsonObject &node, const QString &name)
{
return node.contains(name) && node.value(name).isString();
}
bool KeyMap::checkItemDouble(const QJsonObject &node, const QString &name)
{
return node.contains(name) && node.value(name).isDouble();
}
bool KeyMap::checkItemBool(const QJsonObject &node, const QString &name)
{
return node.contains(name) && node.value(name).isBool();
}
bool KeyMap::checkItemObject(const QJsonObject &node, const QString &name)
{
return node.contains(name) && node.value(name).isObject();
}
bool KeyMap::checkItemPos(const QJsonObject &node, const QString &name)
{
if (node.contains(name) && node.value(name).isObject()) {
QJsonObject pos = node.value(name).toObject();
return pos.contains("x") && pos.value("x").isDouble() && pos.contains("y") && pos.value("y").isDouble();
}
return false;
}
bool KeyMap::checkForClick(const QJsonObject &node)
{
return checkForClickTwice(node) && checkItemBool(node, "switchMap");
}
bool KeyMap::checkForClickMulti(const QJsonObject &node)
{
bool ret = true;
if (!node.contains("clickNodes") || !node.value("clickNodes").isArray()) {
qWarning("json error: no find clickNodes");
return false;
}
QJsonArray clickNodes = node.value("clickNodes").toArray();
QJsonObject clickNode;
int size = clickNodes.size();
if (0 == size) {
qWarning("json error: clickNodes is empty");
return false;
}
for (int i = 0; i < size; i++) {
if (!clickNodes.at(i).isObject()) {
qWarning("json error: clickNodes node must be json object");
ret = false;
break;
}
clickNode = clickNodes.at(i).toObject();
if (!checkForDelayClickNode(clickNode)) {
ret = false;
break;
}
}
return ret;
}
bool KeyMap::checkForDelayClickNode(const QJsonObject &node)
{
return checkItemPos(node, "pos") && checkItemDouble(node, "delay");
}
bool KeyMap::checkForClickTwice(const QJsonObject &node)
{
return checkItemString(node, "key") && checkItemPos(node, "pos");
}
bool KeyMap::checkForSteerWhell(const QJsonObject &node)
{
return checkItemString(node, "leftKey") && checkItemString(node, "rightKey") && checkItemString(node, "upKey") && checkItemString(node, "downKey")
&& checkItemDouble(node, "leftOffset") && checkItemDouble(node, "rightOffset") && checkItemDouble(node, "upOffset")
&& checkItemDouble(node, "downOffset") && checkItemPos(node, "centerPos");
}
bool KeyMap::checkForDrag(const QJsonObject &node)
{
return checkItemString(node, "key") && checkItemPos(node, "startPos") && checkItemPos(node, "endPos");
}

View file

@ -1,172 +0,0 @@
#ifndef KEYMAP_H
#define KEYMAP_H
#include <QJsonObject>
#include <QMetaEnum>
#include <QMultiHash>
#include <QObject>
#include <QPair>
#include <QPointF>
#include <QRectF>
#include <QVector>
#define MAX_DELAY_CLICK_NODES 50
class KeyMap : public QObject
{
Q_OBJECT
public:
enum KeyMapType
{
KMT_INVALID = -1,
KMT_CLICK = 0,
KMT_CLICK_TWICE,
KMT_CLICK_MULTI,
KMT_STEER_WHEEL,
KMT_DRAG,
KMT_MOUSE_MOVE
};
Q_ENUM(KeyMapType)
enum ActionType
{
AT_INVALID = -1,
AT_KEY = 0,
AT_MOUSE = 1,
};
Q_ENUM(ActionType)
struct DelayClickNode
{
int delay = 0;
QPointF pos = QPointF(0, 0);
};
struct KeyNode
{
ActionType type = AT_INVALID;
int key = Qt::Key_unknown;
QPointF pos = QPointF(0, 0); // normal key
QPointF extendPos = QPointF(0, 0); // for drag
double extendOffset = 0.0; // for steerWheel
DelayClickNode delayClickNodes[MAX_DELAY_CLICK_NODES]; // for multi clicks
int delayClickNodesCount = 0;
KeyNode(
ActionType type = AT_INVALID,
int key = Qt::Key_unknown,
QPointF pos = QPointF(0, 0),
QPointF extendPos = QPointF(0, 0),
double extendOffset = 0.0)
: type(type), key(key), pos(pos), extendPos(extendPos), extendOffset(extendOffset)
{
}
};
struct KeyMapNode
{
KeyMapType type = KMT_INVALID;
union DATA
{
struct
{
KeyNode keyNode;
bool switchMap = false;
} click;
struct
{
KeyNode keyNode;
} clickTwice;
struct
{
KeyNode keyNode;
} clickMulti;
struct
{
QPointF centerPos = { 0.0, 0.0 };
KeyNode left, right, up, down;
} steerWheel;
struct
{
KeyNode keyNode;
} drag;
struct
{
QPointF startPos = { 0.0, 0.0 };
QPointF speedRatio = { 1.0, 1.0 };
KeyNode smallEyes;
} mouseMove;
DATA() {}
~DATA() {}
} data;
KeyMapNode() {}
~KeyMapNode() {}
};
KeyMap(QObject *parent = Q_NULLPTR);
virtual ~KeyMap();
void loadKeyMap(const QString &json);
const KeyMap::KeyMapNode &getKeyMapNode(int key);
const KeyMap::KeyMapNode &getKeyMapNodeKey(int key);
const KeyMap::KeyMapNode &getKeyMapNodeMouse(int key);
bool isSwitchOnKeyboard();
int getSwitchKey();
bool isValidMouseMoveMap();
bool isValidSteerWheelMap();
const KeyMap::KeyMapNode &getMouseMoveMap();
private:
// set up the reverse map from key/event event to keyMapNode
void makeReverseMap();
// safe check for base
bool checkItemString(const QJsonObject &node, const QString &name);
bool checkItemDouble(const QJsonObject &node, const QString &name);
bool checkItemBool(const QJsonObject &node, const QString &name);
bool checkItemObject(const QJsonObject &node, const QString &name);
bool checkItemPos(const QJsonObject &node, const QString &name);
// safe check for KeyMapNode
bool checkForClick(const QJsonObject &node);
bool checkForClickMulti(const QJsonObject &node);
bool checkForDelayClickNode(const QJsonObject &node);
bool checkForClickTwice(const QJsonObject &node);
bool checkForSteerWhell(const QJsonObject &node);
bool checkForDrag(const QJsonObject &node);
// get keymap from json object
QString getItemString(const QJsonObject &node, const QString &name);
double getItemDouble(const QJsonObject &node, const QString &name);
bool getItemBool(const QJsonObject &node, const QString &name);
QJsonObject getItemObject(const QJsonObject &node, const QString &name);
QPointF getItemPos(const QJsonObject &node, const QString &name);
QPair<ActionType, int> getItemKey(const QJsonObject &node, const QString &name);
KeyMapType getItemKeyMapType(const QJsonObject &node, const QString &name);
private:
static QString s_keyMapPath;
QVector<KeyMapNode> m_keyMapNodes;
KeyNode m_switchKey = { AT_KEY, Qt::Key_QuoteLeft };
// just for return
KeyMapNode m_invalidNode;
// steer wheel index
int m_idxSteerWheel = -1;
// mouse move index
int m_idxMouseMove = -1;
// mapping of key/mouse event name to index
QMetaEnum m_metaEnumKey = QMetaEnum::fromType<Qt::Key>();
QMetaEnum m_metaEnumMouseButtons = QMetaEnum::fromType<Qt::MouseButtons>();
QMetaEnum m_metaEnumKeyMapType = QMetaEnum::fromType<KeyMap::KeyMapType>();
// reverse map of key/mouse event
QMultiHash<int, KeyMapNode *> m_rmapKey;
QMultiHash<int, KeyMapNode *> m_rmapMouse;
};
#endif // KEYMAP_H

View file

@ -1,66 +0,0 @@
#include <QDebug>
#include "bufferutil.h"
#include "devicemsg.h"
DeviceMsg::DeviceMsg(QObject *parent) : QObject(parent) {}
DeviceMsg::~DeviceMsg()
{
if (DMT_GET_CLIPBOARD == m_data.type && Q_NULLPTR != m_data.clipboardMsg.text) {
delete m_data.clipboardMsg.text;
m_data.clipboardMsg.text = Q_NULLPTR;
}
}
DeviceMsg::DeviceMsgType DeviceMsg::type()
{
return m_data.type;
}
void DeviceMsg::getClipboardMsgData(QString &text)
{
text = QString::fromUtf8(m_data.clipboardMsg.text);
}
qint32 DeviceMsg::deserialize(QByteArray &byteArray)
{
QBuffer buf(&byteArray);
buf.open(QBuffer::ReadOnly);
qint64 len = buf.size();
char c = 0;
qint32 ret = 0;
if (len < 5) {
// at least type + empty string length
return 0; // not available
}
buf.getChar(&c);
m_data.type = (DeviceMsgType)c;
switch (m_data.type) {
case DMT_GET_CLIPBOARD: {
m_data.clipboardMsg.text = Q_NULLPTR;
quint16 clipboardLen = BufferUtil::read32(buf);
if (clipboardLen > len - 5) {
ret = 0; // not available
break;
}
QByteArray text = buf.readAll();
m_data.clipboardMsg.text = new char[text.length() + 1];
memcpy(m_data.clipboardMsg.text, text.data(), text.length());
m_data.clipboardMsg.text[text.length()] = '\0';
ret = 5 + clipboardLen;
break;
}
default:
qWarning("Unsupported device msg type: %d", (int)m_data.type);
ret = -1; // error, we cannot recover
}
buf.close();
return ret;
}

View file

@ -1,46 +0,0 @@
#ifndef DEVICEMSG_H
#define DEVICEMSG_H
#include <QBuffer>
#define DEVICE_MSG_MAX_SIZE (1 << 18) // 256k
// type: 1 byte; length: 4 bytes
#define DEVICE_MSG_TEXT_MAX_LENGTH (DEVICE_MSG_MAX_SIZE - 5)
class DeviceMsg : public QObject
{
Q_OBJECT
public:
enum DeviceMsgType
{
DMT_NULL = -1,
// 和服务端对应
DMT_GET_CLIPBOARD = 0,
};
explicit DeviceMsg(QObject *parent = nullptr);
virtual ~DeviceMsg();
DeviceMsg::DeviceMsgType type();
void getClipboardMsgData(QString &text);
qint32 deserialize(QByteArray &byteArray);
private:
struct DeviceMsgData
{
DeviceMsgType type = DMT_NULL;
union
{
struct
{
char *text = Q_NULLPTR;
} clipboardMsg;
};
DeviceMsgData() {}
~DeviceMsgData() {}
};
DeviceMsgData m_data;
};
#endif // DEVICEMSG_H

View file

@ -1,30 +0,0 @@
#include <QApplication>
#include <QClipboard>
#include "devicemsg.h"
#include "receiver.h"
Receiver::Receiver(QObject *parent) : QObject(parent) {}
Receiver::~Receiver() {}
void Receiver::recvDeviceMsg(DeviceMsg *deviceMsg)
{
switch (deviceMsg->type()) {
case DeviceMsg::DMT_GET_CLIPBOARD: {
qInfo("Device clipboard copied");
QClipboard *board = QApplication::clipboard();
QString text;
deviceMsg->getClipboardMsgData(text);
if (board->text() == text) {
qDebug("Computer clipboard unchanged");
break;
}
board->setText(text);
break;
}
default:
break;
}
}

View file

@ -1,17 +0,0 @@
#ifndef RECEIVER_H
#define RECEIVER_H
#include <QPointer>
class DeviceMsg;
class Receiver : public QObject
{
Q_OBJECT
public:
explicit Receiver(QObject *parent = Q_NULLPTR);
virtual ~Receiver();
void recvDeviceMsg(DeviceMsg *deviceMsg);
};
#endif // RECEIVER_H

View file

@ -1,74 +0,0 @@
#include <QDebug>
#include "avframeconvert.h"
AVFrameConvert::AVFrameConvert() {}
AVFrameConvert::~AVFrameConvert() {}
void AVFrameConvert::setSrcFrameInfo(int srcWidth, int srcHeight, AVPixelFormat srcFormat)
{
m_srcWidth = srcWidth;
m_srcHeight = srcHeight;
m_srcFormat = srcFormat;
qDebug() << "Convert::src frame info " << srcWidth << "x" << srcHeight;
}
void AVFrameConvert::getSrcFrameInfo(int &srcWidth, int &srcHeight, AVPixelFormat &srcFormat)
{
srcWidth = m_srcWidth;
srcHeight = m_srcHeight;
srcFormat = m_srcFormat;
}
void AVFrameConvert::setDstFrameInfo(int dstWidth, int dstHeight, AVPixelFormat dstFormat)
{
m_dstWidth = dstWidth;
m_dstHeight = dstHeight;
m_dstFormat = dstFormat;
}
void AVFrameConvert::getDstFrameInfo(int &dstWidth, int &dstHeight, AVPixelFormat &dstFormat)
{
dstWidth = m_dstWidth;
dstHeight = m_dstHeight;
dstFormat = m_dstFormat;
}
bool AVFrameConvert::init()
{
if (m_convertCtx) {
return true;
}
m_convertCtx = sws_getContext(m_srcWidth, m_srcHeight, m_srcFormat, m_dstWidth, m_dstHeight, m_dstFormat, SWS_BICUBIC, Q_NULLPTR, Q_NULLPTR, Q_NULLPTR);
if (!m_convertCtx) {
return false;
}
return true;
}
bool AVFrameConvert::isInit()
{
return m_convertCtx ? true : false;
}
void AVFrameConvert::deInit()
{
if (m_convertCtx) {
sws_freeContext(m_convertCtx);
m_convertCtx = Q_NULLPTR;
}
}
bool AVFrameConvert::convert(const AVFrame *srcFrame, AVFrame *dstFrame)
{
if (!m_convertCtx || !srcFrame || !dstFrame) {
return false;
}
qint32 ret
= sws_scale(m_convertCtx, static_cast<const uint8_t *const *>(srcFrame->data), srcFrame->linesize, 0, m_srcHeight, dstFrame->data, dstFrame->linesize);
if (0 == ret) {
return false;
}
return true;
}

View file

@ -1,40 +0,0 @@
#ifndef AVFRAMECONVERT_H
#define AVFRAMECONVERT_H
#include <QtGlobal>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavutil/frame.h"
#include "libswscale/swscale.h"
}
class AVFrameConvert
{
public:
AVFrameConvert();
virtual ~AVFrameConvert();
public:
void setSrcFrameInfo(int srcWidth, int srcHeight, AVPixelFormat srcFormat);
void getSrcFrameInfo(int &srcWidth, int &srcHeight, AVPixelFormat &srcFormat);
void setDstFrameInfo(int dstWidth, int dstHeight, AVPixelFormat dstFormat);
void getDstFrameInfo(int &dstWidth, int &dstHeight, AVPixelFormat &dstFormat);
bool init();
bool isInit();
void deInit();
bool convert(const AVFrame *srcFrame, AVFrame *dstFrame);
private:
int m_srcWidth = 0;
int m_srcHeight = 0;
AVPixelFormat m_srcFormat = AV_PIX_FMT_NONE;
int m_dstWidth = 0;
int m_dstHeight = 0;
AVPixelFormat m_dstFormat = AV_PIX_FMT_NONE;
struct SwsContext *m_convertCtx = Q_NULLPTR;
};
#endif // AVFRAMECONVERT_H

View file

@ -1,155 +0,0 @@
#include <QDebug>
#include "compat.h"
#include "decoder.h"
#include "videobuffer.h"
Decoder::Decoder(std::function<void(int, int, uint8_t*, uint8_t*, uint8_t*, int, int, int)> onFrame, QObject *parent)
: QObject(parent)
, m_vb(new VideoBuffer())
, m_onFrame(onFrame)
{
m_vb->init();
connect(this, &Decoder::newFrame, this, &Decoder::onNewFrame, Qt::QueuedConnection);
connect(m_vb, &VideoBuffer::updateFPS, this, &Decoder::updateFPS);
}
Decoder::~Decoder() {
m_vb->deInit();
delete m_vb;
}
bool Decoder::open()
{
// codec
AVCodec *codec = Q_NULLPTR;
codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
qCritical("H.264 decoder not found");
return false;
}
// codec context
m_codecCtx = avcodec_alloc_context3(codec);
if (!m_codecCtx) {
qCritical("Could not allocate decoder context");
return false;
}
if (avcodec_open2(m_codecCtx, codec, NULL) < 0) {
qCritical("Could not open H.264 codec");
return false;
}
m_isCodecCtxOpen = true;
return true;
}
void Decoder::close()
{
if (m_vb) {
m_vb->interrupt();
}
if (!m_codecCtx) {
return;
}
if (m_isCodecCtxOpen) {
avcodec_close(m_codecCtx);
}
avcodec_free_context(&m_codecCtx);
}
bool Decoder::push(const AVPacket *packet)
{
if (!m_codecCtx || !m_vb) {
return false;
}
AVFrame *decodingFrame = m_vb->decodingFrame();
#ifdef QTSCRCPY_LAVF_HAS_NEW_ENCODING_DECODING_API
int ret = -1;
if ((ret = avcodec_send_packet(m_codecCtx, packet)) < 0) {
char errorbuf[255] = { 0 };
av_strerror(ret, errorbuf, 254);
qCritical("Could not send video packet: %s", errorbuf);
return false;
}
if (decodingFrame) {
ret = avcodec_receive_frame(m_codecCtx, decodingFrame);
}
if (!ret) {
// a frame was received
pushFrame();
//emit getOneFrame(yuvDecoderFrame->data[0], yuvDecoderFrame->data[1], yuvDecoderFrame->data[2],
// yuvDecoderFrame->linesize[0], yuvDecoderFrame->linesize[1], yuvDecoderFrame->linesize[2]);
/*
// m_conver转换yuv为rgb是使用cpu转的占用cpu太高改用opengl渲染yuv
// QImage的copy也非常占用内存此方案不考虑
if (!m_conver.isInit()) {
qDebug() << "decoder frame format" << decodingFrame->format;
m_conver.setSrcFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_YUV420P);
m_conver.setDstFrameInfo(codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB32);
m_conver.init();
}
if (!outBuffer) {
outBuffer=new quint8[avpicture_get_size(AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height)];
avpicture_fill((AVPicture *)rgbDecoderFrame, outBuffer, AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height);
}
m_conver.convert(decodingFrame, rgbDecoderFrame);
//QImage tmpImg((uchar *)outBuffer, codecCtx->width, codecCtx->height, QImage::Format_RGB32);
//QImage image = tmpImg.copy();
//emit getOneImage(image);
*/
} else if (ret != AVERROR(EAGAIN)) {
qCritical("Could not receive video frame: %d", ret);
return false;
}
#else
int gotPicture = 0;
int len = -1;
if (decodingFrame) {
len = avcodec_decode_video2(m_codecCtx, decodingFrame, &gotPicture, packet);
}
if (len < 0) {
qCritical("Could not decode video packet: %d", len);
return false;
}
if (gotPicture) {
pushFrame();
}
#endif
return true;
}
void Decoder::peekFrame(std::function<void (int, int, uint8_t *)> onFrame)
{
if (!m_vb) {
return;
}
m_vb->peekRenderedFrame(onFrame);
}
void Decoder::pushFrame()
{
if (!m_vb) {
return;
}
bool previousFrameSkipped = true;
m_vb->offerDecodedFrame(previousFrameSkipped);
if (previousFrameSkipped) {
// the previous newFrame will consume this frame
return;
}
emit newFrame();
}
void Decoder::onNewFrame() {
if (!m_onFrame) {
return;
}
m_vb->lock();
const AVFrame *frame = m_vb->consumeRenderedFrame();
m_onFrame(frame->width, frame->height, frame->data[0], frame->data[1], frame->data[2], frame->linesize[0], frame->linesize[1], frame->linesize[2]);
m_vb->unLock();
}

View file

@ -1,42 +0,0 @@
#ifndef DECODER_H
#define DECODER_H
#include <QObject>
extern "C"
{
#include "libavcodec/avcodec.h"
}
class VideoBuffer;
class Decoder : public QObject
{
Q_OBJECT
public:
Decoder(std::function<void(int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV)> onFrame, QObject *parent = Q_NULLPTR);
virtual ~Decoder();
bool open();
void close();
bool push(const AVPacket *packet);
void peekFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame);
signals:
void updateFPS(quint32 fps);
private slots:
void onNewFrame();
signals:
void newFrame();
private:
void pushFrame();
private:
VideoBuffer *m_vb = Q_NULLPTR;
AVCodecContext *m_codecCtx = Q_NULLPTR;
bool m_isCodecCtxOpen = false;
std::function<void(int, int, uint8_t*, uint8_t*, uint8_t*, int, int, int)> m_onFrame = Q_NULLPTR;
};
#endif // DECODER_H

View file

@ -1,66 +0,0 @@
#include <QDebug>
#include <QTimerEvent>
#include "fpscounter.h"
FpsCounter::FpsCounter(QObject *parent) : QObject(parent) {}
FpsCounter::~FpsCounter() {}
void FpsCounter::start()
{
resetCounter();
startCounterTimer();
}
void FpsCounter::stop()
{
stopCounterTimer();
resetCounter();
}
bool FpsCounter::isStarted()
{
return m_counterTimer;
}
void FpsCounter::addRenderedFrame()
{
m_rendered++;
}
void FpsCounter::addSkippedFrame()
{
m_skipped++;
}
void FpsCounter::timerEvent(QTimerEvent *event)
{
if (event && m_counterTimer == event->timerId()) {
m_curRendered = m_rendered;
m_curSkipped = m_skipped;
resetCounter();
emit updateFPS(m_curRendered);
//qInfo("FPS:%d Discard:%d", m_curRendered, m_skipped);
}
}
void FpsCounter::startCounterTimer()
{
stopCounterTimer();
m_counterTimer = startTimer(1000);
}
void FpsCounter::stopCounterTimer()
{
if (m_counterTimer) {
killTimer(m_counterTimer);
m_counterTimer = 0;
}
}
void FpsCounter::resetCounter()
{
m_rendered = 0;
m_skipped = 0;
}

View file

@ -1,38 +0,0 @@
#ifndef FPSCOUNTER_H
#define FPSCOUNTER_H
#include <QObject>
class FpsCounter : public QObject
{
Q_OBJECT
public:
FpsCounter(QObject *parent = Q_NULLPTR);
virtual ~FpsCounter();
void start();
void stop();
bool isStarted();
void addRenderedFrame();
void addSkippedFrame();
signals:
void updateFPS(quint32 fps);
protected:
virtual void timerEvent(QTimerEvent *event);
private:
void startCounterTimer();
void stopCounterTimer();
void resetCounter();
private:
qint32 m_counterTimer = 0;
quint32 m_curRendered = 0;
quint32 m_curSkipped = 0;
quint32 m_rendered = 0;
quint32 m_skipped = 0;
};
#endif // FPSCOUNTER_H

View file

@ -1,173 +0,0 @@
#include "videobuffer.h"
#include "avframeconvert.h"
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libavutil/imgutils.h"
}
VideoBuffer::VideoBuffer(QObject *parent) : QObject(parent) {
connect(&m_fpsCounter, &FpsCounter::updateFPS, this, &VideoBuffer::updateFPS);
}
VideoBuffer::~VideoBuffer() {}
bool VideoBuffer::init()
{
m_decodingFrame = av_frame_alloc();
if (!m_decodingFrame) {
goto error;
}
m_renderingframe = av_frame_alloc();
if (!m_renderingframe) {
goto error;
}
// there is initially no rendering frame, so consider it has already been
// consumed
m_renderingFrameConsumed = true;
m_fpsCounter.start();
return true;
error:
deInit();
return false;
}
void VideoBuffer::deInit()
{
if (m_decodingFrame) {
av_frame_free(&m_decodingFrame);
m_decodingFrame = Q_NULLPTR;
}
if (m_renderingframe) {
av_frame_free(&m_renderingframe);
m_renderingframe = Q_NULLPTR;
}
m_fpsCounter.stop();
}
void VideoBuffer::lock()
{
m_mutex.lock();
}
void VideoBuffer::unLock()
{
m_mutex.unlock();
}
void VideoBuffer::setRenderExpiredFrames(bool renderExpiredFrames)
{
m_renderExpiredFrames = renderExpiredFrames;
}
AVFrame *VideoBuffer::decodingFrame()
{
return m_decodingFrame;
}
void VideoBuffer::offerDecodedFrame(bool &previousFrameSkipped)
{
m_mutex.lock();
if (m_renderExpiredFrames) {
// if m_renderExpiredFrames is enable, then the decoder must wait for the current
// frame to be consumed
while (!m_renderingFrameConsumed && !m_interrupted) {
m_renderingFrameConsumedCond.wait(&m_mutex);
}
} else {
if (m_fpsCounter.isStarted() && !m_renderingFrameConsumed) {
m_fpsCounter.addSkippedFrame();
}
}
swap();
previousFrameSkipped = !m_renderingFrameConsumed;
m_renderingFrameConsumed = false;
m_mutex.unlock();
}
const AVFrame *VideoBuffer::consumeRenderedFrame()
{
Q_ASSERT(!m_renderingFrameConsumed);
m_renderingFrameConsumed = true;
if (m_fpsCounter.isStarted()) {
m_fpsCounter.addRenderedFrame();
}
if (m_renderExpiredFrames) {
// if m_renderExpiredFrames is enable, then notify the decoder the current frame is
// consumed, so that it may push a new one
m_renderingFrameConsumedCond.wakeOne();
}
return m_renderingframe;
}
void VideoBuffer::peekRenderedFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame)
{
if (!onFrame) {
return;
}
lock();
auto frame = m_renderingframe;
int width = frame->width;
int height = frame->height;
// create buffer
uint8_t* rgbBuffer = new uint8_t[width * height * 4];
AVFrame *rgbFrame = av_frame_alloc();
if (!rgbFrame) {
delete [] rgbBuffer;
return;
}
// bind buffer to AVFrame
av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, rgbBuffer, AV_PIX_FMT_RGB32, width, height, 4);
// convert
AVFrameConvert convert;
convert.setSrcFrameInfo(width, height, AV_PIX_FMT_YUV420P);
convert.setDstFrameInfo(width, height, AV_PIX_FMT_RGB32);
bool ret = false;
ret = convert.init();
if (!ret) {
delete [] rgbBuffer;
av_free(rgbFrame);
return;
}
ret = convert.convert(frame, rgbFrame);
if (!ret) {
delete [] rgbBuffer;
av_free(rgbFrame);
return;
}
convert.deInit();
av_free(rgbFrame);
unLock();
onFrame(width, height, rgbBuffer);
delete [] rgbBuffer;
}
void VideoBuffer::interrupt()
{
if (m_renderExpiredFrames) {
m_mutex.lock();
m_interrupted = true;
m_mutex.unlock();
// wake up blocking wait
m_renderingFrameConsumedCond.wakeOne();
}
}
void VideoBuffer::swap()
{
AVFrame *tmp = m_decodingFrame;
m_decodingFrame = m_renderingframe;
m_renderingframe = tmp;
}

View file

@ -1,64 +0,0 @@
#ifndef VIDEO_BUFFER_H
#define VIDEO_BUFFER_H
#include <QMutex>
#include <QWaitCondition>
#include <QObject>
#include "fpscounter.h"
// forward declarations
typedef struct AVFrame AVFrame;
class VideoBuffer : public QObject
{
Q_OBJECT
public:
VideoBuffer(QObject *parent = Q_NULLPTR);
virtual ~VideoBuffer();
bool init();
void deInit();
void lock();
void unLock();
void setRenderExpiredFrames(bool renderExpiredFrames);
AVFrame *decodingFrame();
// set the decoder frame as ready for rendering
// this function locks m_mutex during its execution
// returns true if the previous frame had been consumed
void offerDecodedFrame(bool &previousFrameSkipped);
// mark the rendering frame as consumed and return it
// MUST be called with m_mutex locked!!!
// the caller is expected to render the returned frame to some texture before
// unlocking m_mutex
const AVFrame *consumeRenderedFrame();
void peekRenderedFrame(std::function<void(int width, int height, uint8_t* dataRGB32)> onFrame);
// wake up and avoid any blocking call
void interrupt();
signals:
void updateFPS(quint32 fps);
private:
void swap();
private:
AVFrame *m_decodingFrame = Q_NULLPTR;
AVFrame *m_renderingframe = Q_NULLPTR;
QMutex m_mutex;
bool m_renderingFrameConsumed = true;
FpsCounter m_fpsCounter;
bool m_renderExpiredFrames = false;
QWaitCondition m_renderingFrameConsumedCond;
// interrupted is not used if expired frames are not rendered
// since offering a frame will never block
bool m_interrupted = false;
};
#endif // VIDEO_BUFFER_H

View file

@ -1,632 +0,0 @@
#include <QDir>
#include <QMessageBox>
#include <QTimer>
#include "controller.h"
#include "devicemsg.h"
#include "decoder.h"
#include "device.h"
#include "filehandler.h"
#include "recorder.h"
#include "server.h"
#include "stream.h"
namespace qsc {
Device::Device(DeviceParams params, QObject *parent) : IDevice(parent), m_params(params)
{
if (!params.display && !m_params.recordFile) {
qCritical("not display must be recorded");
return;
}
if (params.display) {
m_decoder = new Decoder([this](int width, int height, uint8_t* dataY, uint8_t* dataU, uint8_t* dataV, int linesizeY, int linesizeU, int linesizeV) {
for (const auto& item : m_deviceObservers) {
item->onFrame(width, height, dataY, dataU, dataV, linesizeY, linesizeU, linesizeV);
}
}, this);
m_fileHandler = new FileHandler(this);
m_controller = new Controller([this](const QByteArray& buffer) -> qint64 {
if (!m_server || !m_server->getControlSocket()) {
return 0;
}
return m_server->getControlSocket()->write(buffer.data(), buffer.length());
}, params.gameScript, this);
}
m_stream = new Stream([this](quint8 *buf, qint32 bufSize) -> qint32 {
auto videoSocket = m_server->getVideoSocket();
if (!videoSocket) {
return 0;
}
return videoSocket->subThreadRecvData(buf, bufSize);
}, this);
m_server = new Server(this);
if (m_params.recordFile && !m_params.recordPath.trimmed().isEmpty()) {
QString absFilePath;
QString fileDir(m_params.recordPath);
if (!fileDir.isEmpty()) {
QDateTime dateTime = QDateTime::currentDateTime();
QString fileName = dateTime.toString("_yyyyMMdd_hhmmss_zzz");
fileName = m_params.serial + fileName + "." + m_params.recordFileFormat;
QDir dir(fileDir);
absFilePath = dir.absoluteFilePath(fileName);
}
m_recorder = new Recorder(absFilePath, this);
}
initSignals();
}
Device::~Device()
{
Device::disconnectDevice();
}
void Device::setUserData(void *data)
{
m_userData = data;
}
void *Device::getUserData()
{
return m_userData;
}
void Device::registerDeviceObserver(DeviceObserver *observer)
{
m_deviceObservers.insert(observer);
}
void Device::deRegisterDeviceObserver(DeviceObserver *observer)
{
m_deviceObservers.erase(observer);
}
const QString &Device::getSerial()
{
return m_params.serial;
}
void Device::updateScript(QString script)
{
if (m_controller) {
m_controller->updateScript(script);
}
}
void Device::screenshot()
{
if (!m_decoder) {
return;
}
// screenshot
m_decoder->peekFrame([this](int width, int height, uint8_t* dataRGB32) {
saveFrame(width, height, dataRGB32);
});
}
void Device::showTouch(bool show)
{
AdbProcess *adb = new qsc::AdbProcess();
if (!adb) {
return;
}
connect(adb, &qsc::AdbProcess::adbProcessResult, this, [this](qsc::AdbProcess::ADB_EXEC_RESULT processResult) {
if (AdbProcess::AER_SUCCESS_START != processResult) {
sender()->deleteLater();
}
});
adb->setShowTouchesEnabled(getSerial(), show);
qInfo() << getSerial() << " show touch " << (show ? "enable" : "disable");
}
bool Device::isReversePort(quint16 port)
{
if (m_server && m_server->isReverse() && port == m_server->getParams().localPort) {
return true;
}
return false;
}
void Device::initSignals()
{
if (m_controller) {
connect(m_controller, &Controller::grabCursor, this, [this](bool grab){
for (const auto& item : m_deviceObservers) {
item->grabCursor(grab);
}
});
}
if (m_fileHandler) {
connect(m_fileHandler, &FileHandler::fileHandlerResult, this, [this](FileHandler::FILE_HANDLER_RESULT processResult, bool isApk) {
QString tipsType = "";
if (isApk) {
tipsType = "install apk";
} else {
tipsType = "file transfer";
}
QString tips;
if (FileHandler::FAR_IS_RUNNING == processResult) {
tips = QString("wait current %1 to complete").arg(tipsType);
}
if (FileHandler::FAR_SUCCESS_EXEC == processResult) {
tips = QString("%1 complete, save in %2").arg(tipsType).arg(m_params.pushFilePath);
}
if (FileHandler::FAR_ERROR_EXEC == processResult) {
tips = QString("%1 failed").arg(tipsType);
}
qInfo() << tips;
});
}
if (m_server) {
connect(m_server, &Server::serverStarted, this, [this](bool success, const QString &deviceName, const QSize &size) {
emit deviceConnected(success, m_params.serial, deviceName, size);
if (success) {
double diff = m_startTimeCount.elapsed() / 1000.0;
qInfo() << QString("server start finish in %1s").arg(diff).toStdString().c_str();
// init recorder
if (m_recorder) {
m_recorder->setFrameSize(size);
if (!m_recorder->open()) {
qCritical("Could not open recorder");
}
if (!m_recorder->startRecorder()) {
qCritical("Could not start recorder");
}
}
// init decoder
if (m_decoder) {
m_decoder->open();
}
// init decoder
m_stream->startDecode();
// recv device msg
connect(m_server->getControlSocket(), &QTcpSocket::readyRead, this, [this](){
if (!m_controller) {
return;
}
auto controlSocket = m_server->getControlSocket();
while (controlSocket->bytesAvailable()) {
QByteArray byteArray = controlSocket->peek(controlSocket->bytesAvailable());
DeviceMsg deviceMsg;
qint32 consume = deviceMsg.deserialize(byteArray);
if (0 >= consume) {
break;
}
controlSocket->read(consume);
m_controller->recvDeviceMsg(&deviceMsg);
}
});
// 显示界面时才自动息屏m_params.display
if (m_params.closeScreen && m_params.display && m_controller) {
m_controller->setScreenPowerMode(ControlMsg::SPM_OFF);
}
} else {
m_server->stop();
}
});
connect(m_server, &Server::serverStoped, this, [this]() {
disconnectDevice();
qDebug() << "server process stop";
});
}
if (m_stream) {
connect(m_stream, &Stream::onStreamStop, this, [this]() {
disconnectDevice();
qDebug() << "stream thread stop";
});
connect(m_stream, &Stream::getFrame, this, [this](AVPacket *packet) {
if (m_decoder && !m_decoder->push(packet)) {
qCritical("Could not send packet to decoder");
}
if (m_recorder && !m_recorder->push(packet)) {
qCritical("Could not send packet to recorder");
}
}, Qt::DirectConnection);
connect(m_stream, &Stream::getConfigFrame, this, [this](AVPacket *packet) {
if (m_recorder && !m_recorder->push(packet)) {
qCritical("Could not send config packet to recorder");
}
}, Qt::DirectConnection);
}
if (m_decoder) {
connect(m_decoder, &Decoder::updateFPS, this, [this](quint32 fps) {
for (const auto& item : m_deviceObservers) {
item->updateFPS(fps);
}
});
}
}
bool Device::connectDevice()
{
if (!m_server) {
return false;
}
// fix: macos cant recv finished signel, timer is ok
QTimer::singleShot(0, this, [this]() {
m_startTimeCount.start();
// max size support 480p 720p 1080p 设备原生分辨率
// support wireless connect, example:
//m_server->start("192.168.0.174:5555", 27183, m_maxSize, m_bitRate, "");
// only one devices, serial can be null
// mark: crop input format: "width:height:x:y" or "" for no crop, for example: "100:200:0:0"
Server::ServerParams params;
params.serverLocalPath = m_params.serverLocalPath;
params.serverRemotePath = m_params.serverRemotePath;
params.serial = m_params.serial;
params.localPort = m_params.localPort;
params.maxSize = m_params.maxSize;
params.bitRate = m_params.bitRate;
params.maxFps = m_params.maxFps;
params.useReverse = m_params.useReverse;
params.lockVideoOrientation = m_params.lockVideoOrientation;
params.stayAwake = m_params.stayAwake;
params.serverVersion = m_params.serverVersion;
params.logLevel = m_params.logLevel;
params.codecOptions = m_params.codecOptions;
params.codecName = m_params.codecName;
params.crop = "";
params.control = true;
m_server->start(params);
});
return true;
}
void Device::disconnectDevice()
{
if (!m_server) {
return;
}
m_server->stop();
m_server = Q_NULLPTR;
if (m_stream) {
m_stream->stopDecode();
}
// server must stop before decoder, because decoder block main thread
if (m_decoder) {
m_decoder->close();
}
if (m_recorder) {
if (m_recorder->isRunning()) {
m_recorder->stopRecorder();
m_recorder->wait();
}
m_recorder->close();
}
emit deviceDisconnected(m_params.serial);
}
void Device::postGoBack()
{
if (!m_controller) {
return;
}
m_controller->postGoBack();
for (const auto& item : m_deviceObservers) {
item->postGoBack();
}
}
void Device::postGoHome()
{
if (!m_controller) {
return;
}
m_controller->postGoHome();
for (const auto& item : m_deviceObservers) {
item->postGoHome();
}
}
void Device::postGoMenu()
{
if (!m_controller) {
return;
}
m_controller->postGoMenu();
for (const auto& item : m_deviceObservers) {
item->postGoMenu();
}
}
void Device::postAppSwitch()
{
if (!m_controller) {
return;
}
m_controller->postAppSwitch();
for (const auto& item : m_deviceObservers) {
item->postAppSwitch();
}
}
void Device::postPower()
{
if (!m_controller) {
return;
}
m_controller->postPower();
for (const auto& item : m_deviceObservers) {
item->postPower();
}
}
void Device::postVolumeUp()
{
if (!m_controller) {
return;
}
m_controller->postVolumeUp();
for (const auto& item : m_deviceObservers) {
item->postVolumeUp();
}
}
void Device::postVolumeDown()
{
if (!m_controller) {
return;
}
m_controller->postVolumeDown();
for (const auto& item : m_deviceObservers) {
item->postVolumeDown();
}
}
void Device::postCopy()
{
if (!m_controller) {
return;
}
m_controller->copy();
for (const auto& item : m_deviceObservers) {
item->postCopy();
}
}
void Device::postCut()
{
if (!m_controller) {
return;
}
m_controller->cut();
for (const auto& item : m_deviceObservers) {
item->postCut();
}
}
void Device::setScreenPowerMode(bool open)
{
if (!m_controller) {
return;
}
ControlMsg::ScreenPowerMode mode{};
if (open) {
mode = ControlMsg::SPM_NORMAL;
} else {
mode = ControlMsg::SPM_OFF;
}
m_controller->setScreenPowerMode(mode);
for (const auto& item : m_deviceObservers) {
item->setScreenPowerMode(open);
}
}
void Device::expandNotificationPanel()
{
if (!m_controller) {
return;
}
m_controller->expandNotificationPanel();
for (const auto& item : m_deviceObservers) {
item->expandNotificationPanel();
}
}
void Device::collapsePanel()
{
if (!m_controller) {
return;
}
m_controller->collapsePanel();
for (const auto& item : m_deviceObservers) {
item->collapsePanel();
}
}
void Device::postBackOrScreenOn(bool down)
{
if (!m_controller) {
return;
}
m_controller->postBackOrScreenOn(down);
for (const auto& item : m_deviceObservers) {
item->postBackOrScreenOn(down);
}
}
void Device::postTextInput(QString &text)
{
if (!m_controller) {
return;
}
m_controller->postTextInput(text);
for (const auto& item : m_deviceObservers) {
item->postTextInput(text);
}
}
void Device::requestDeviceClipboard()
{
if (!m_controller) {
return;
}
m_controller->requestDeviceClipboard();
for (const auto& item : m_deviceObservers) {
item->requestDeviceClipboard();
}
}
void Device::setDeviceClipboard(bool pause)
{
if (!m_controller) {
return;
}
m_controller->setDeviceClipboard(pause);
for (const auto& item : m_deviceObservers) {
item->setDeviceClipboard(pause);
}
}
void Device::clipboardPaste()
{
if (!m_controller) {
return;
}
m_controller->clipboardPaste();
for (const auto& item : m_deviceObservers) {
item->clipboardPaste();
}
}
void Device::pushFileRequest(const QString &file, const QString &devicePath)
{
if (!m_fileHandler) {
return;
}
m_fileHandler->onPushFileRequest(getSerial(), file, devicePath);
for (const auto& item : m_deviceObservers) {
item->pushFileRequest(file, devicePath);
}
}
void Device::installApkRequest(const QString &apkFile)
{
if (!m_fileHandler) {
return;
}
m_fileHandler->onInstallApkRequest(getSerial(), apkFile);
for (const auto& item : m_deviceObservers) {
item->installApkRequest(apkFile);
}
}
void Device::mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (!m_controller) {
return;
}
m_controller->mouseEvent(from, frameSize, showSize);
for (const auto& item : m_deviceObservers) {
item->mouseEvent(from, frameSize, showSize);
}
}
void Device::wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (!m_controller) {
return;
}
m_controller->wheelEvent(from, frameSize, showSize);
for (const auto& item : m_deviceObservers) {
item->wheelEvent(from, frameSize, showSize);
}
}
void Device::keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize)
{
if (!m_controller) {
return;
}
m_controller->keyEvent(from, frameSize, showSize);
for (const auto& item : m_deviceObservers) {
item->keyEvent(from, frameSize, showSize);
}
}
bool Device::isCurrentCustomKeymap()
{
if (!m_controller) {
return false;
}
return m_controller->isCurrentCustomKeymap();
}
bool Device::saveFrame(int width, int height, uint8_t* dataRGB32)
{
if (!dataRGB32) {
return false;
}
QImage rgbImage(dataRGB32, width, height, QImage::Format_RGB32);
// save
QString absFilePath;
QString fileDir(m_params.recordPath);
if (fileDir.isEmpty()) {
qWarning() << "please select record save path!!!";
return false;
}
QDateTime dateTime = QDateTime::currentDateTime();
QString fileName = dateTime.toString("_yyyyMMdd_hhmmss_zzz");
fileName = m_params.serial + fileName + ".png";
QDir dir(fileDir);
absFilePath = dir.absoluteFilePath(fileName);
int ret = rgbImage.save(absFilePath, "PNG", 100);
if (!ret) {
return false;
}
qInfo() << "screenshot save to " << absFilePath;
return true;
}
}

View file

@ -1,99 +0,0 @@
#ifndef DEVICE_H
#define DEVICE_H
#include <set>
#include <QElapsedTimer>
#include <QPointer>
#include <QTime>
#include "../../include/QtScrcpyCore.h"
#include "controlmsg.h"
class QMouseEvent;
class QWheelEvent;
class QKeyEvent;
class Recorder;
class Server;
class VideoBuffer;
class Decoder;
class FileHandler;
class Stream;
class VideoForm;
class Controller;
struct AVFrame;
namespace qsc {
class Device : public IDevice
{
Q_OBJECT
public:
explicit Device(DeviceParams params, QObject *parent = nullptr);
virtual ~Device();
void setUserData(void* data) override;
void* getUserData() override;
void registerDeviceObserver(DeviceObserver* observer) override;
void deRegisterDeviceObserver(DeviceObserver* observer) override;
bool connectDevice() override;
void disconnectDevice() override;
// key map
void mouseEvent(const QMouseEvent *from, const QSize &frameSize, const QSize &showSize) override;
void wheelEvent(const QWheelEvent *from, const QSize &frameSize, const QSize &showSize) override;
void keyEvent(const QKeyEvent *from, const QSize &frameSize, const QSize &showSize) override;
void postGoBack() override;
void postGoHome() override;
void postGoMenu() override;
void postAppSwitch() override;
void postPower() override;
void postVolumeUp() override;
void postVolumeDown() override;
void postCopy() override;
void postCut() override;
void setScreenPowerMode(bool open) override;
void expandNotificationPanel() override;
void collapsePanel() override;
void postBackOrScreenOn(bool down) override;
void postTextInput(QString &text) override;
void requestDeviceClipboard() override;
void setDeviceClipboard(bool pause = true) override;
void clipboardPaste() override;
void pushFileRequest(const QString &file, const QString &devicePath = "") override;
void installApkRequest(const QString &apkFile) override;
void screenshot() override;
void showTouch(bool show) override;
bool isReversePort(quint16 port) override;
const QString &getSerial() override;
void updateScript(QString script) override;
bool isCurrentCustomKeymap() override;
private:
void initSignals();
bool saveFrame(int width, int height, uint8_t* dataRGB32);
private:
// server relevant
QPointer<Server> m_server;
QPointer<Decoder> m_decoder;
QPointer<Controller> m_controller;
QPointer<FileHandler> m_fileHandler;
QPointer<Stream> m_stream;
QPointer<Recorder> m_recorder;
QElapsedTimer m_startTimeCount;
DeviceParams m_params;
std::set<DeviceObserver*> m_deviceObservers;
void* m_userData = nullptr;
};
}
#endif // DEVICE_H

View file

@ -1,47 +0,0 @@
#include "filehandler.h"
FileHandler::FileHandler(QObject *parent) : QObject(parent)
{
}
FileHandler::~FileHandler() {}
void FileHandler::onPushFileRequest(const QString &serial, const QString &file, const QString &devicePath)
{
qsc::AdbProcess* adb = new qsc::AdbProcess;
bool isApk = false;
connect(adb, &qsc::AdbProcess::adbProcessResult, this, [this, adb, isApk](qsc::AdbProcess::ADB_EXEC_RESULT processResult) {
onAdbProcessResult(adb, isApk, processResult);
});
adb->push(serial, file, devicePath);
}
void FileHandler::onInstallApkRequest(const QString &serial, const QString &apkFile)
{
qsc::AdbProcess* adb = new qsc::AdbProcess;
bool isApk = true;
connect(adb, &qsc::AdbProcess::adbProcessResult, this, [this, adb, isApk](qsc::AdbProcess::ADB_EXEC_RESULT processResult) {
onAdbProcessResult(adb, isApk, processResult);
});
adb->install(serial, apkFile);
}
void FileHandler::onAdbProcessResult(qsc::AdbProcess *adb, bool isApk, qsc::AdbProcess::ADB_EXEC_RESULT processResult)
{
switch (processResult) {
case qsc::AdbProcess::AER_ERROR_START:
case qsc::AdbProcess::AER_ERROR_EXEC:
case qsc::AdbProcess::AER_ERROR_MISSING_BINARY:
emit fileHandlerResult(FAR_ERROR_EXEC, isApk);
adb->deleteLater();
break;
case qsc::AdbProcess::AER_SUCCESS_EXEC:
emit fileHandlerResult(FAR_SUCCESS_EXEC, isApk);
adb->deleteLater();
break;
default:
break;
}
}

View file

@ -1,34 +0,0 @@
#ifndef FILEHANDLER_H
#define FILEHANDLER_H
#include <QObject>
#include "adbprocess.h"
class FileHandler : public QObject
{
Q_OBJECT
public:
enum FILE_HANDLER_RESULT
{
FAR_IS_RUNNING, // 正在执行
FAR_SUCCESS_EXEC, // 执行成功
FAR_ERROR_EXEC, // 执行失败
};
FileHandler(QObject *parent = nullptr);
virtual ~FileHandler();
const QString &getDevicePath();
public slots:
void onPushFileRequest(const QString &serial, const QString &file, const QString &devicePath = "");
void onInstallApkRequest(const QString &serial, const QString &apkFile);
protected:
void onAdbProcessResult(qsc::AdbProcess* adb, bool isApk, qsc::AdbProcess::ADB_EXEC_RESULT processResult);
signals:
void fileHandlerResult(FILE_HANDLER_RESULT processResult, bool isApk = false);
};
#endif // FILEHANDLER_H

View file

@ -1,337 +0,0 @@
#include <QCoreApplication>
#include <QDebug>
#include <QFileInfo>
#include "compat.h"
#include "recorder.h"
static const AVRational SCRCPY_TIME_BASE = { 1, 1000000 }; // timestamps in us
Recorder::Recorder(const QString &fileName, QObject *parent) : QThread(parent), m_fileName(fileName), m_format(guessRecordFormat(fileName)) {}
Recorder::~Recorder() {}
AVPacket *Recorder::packetNew(const AVPacket *packet)
{
AVPacket *rec = new AVPacket;
if (!rec) {
return Q_NULLPTR;
}
// av_packet_ref() does not initialize all fields in old FFmpeg versions
av_init_packet(rec);
if (av_packet_ref(rec, packet)) {
delete rec;
return Q_NULLPTR;
}
return rec;
}
void Recorder::packetDelete(AVPacket *packet)
{
av_packet_unref(packet);
delete packet;
}
void Recorder::queueClear()
{
while (!m_queue.isEmpty()) {
packetDelete(m_queue.dequeue());
}
}
void Recorder::setFrameSize(const QSize &declaredFrameSize)
{
m_declaredFrameSize = declaredFrameSize;
}
void Recorder::setFormat(Recorder::RecorderFormat format)
{
m_format = format;
}
bool Recorder::open()
{
// codec
AVCodec* inputCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!inputCodec) {
qCritical("H.264 decoder not found");
return false;
}
QString formatName = recorderGetFormatName(m_format);
Q_ASSERT(!formatName.isEmpty());
const AVOutputFormat *format = findMuxer(formatName.toUtf8());
if (!format) {
qCritical("Could not find muxer");
return false;
}
m_formatCtx = avformat_alloc_context();
if (!m_formatCtx) {
qCritical("Could not allocate output context");
return false;
}
// contrary to the deprecated API (av_oformat_next()), av_muxer_iterate()
// returns (on purpose) a pointer-to-const, but AVFormatContext.oformat
// still expects a pointer-to-non-const (it has not be updated accordingly)
// <https://github.com/FFmpeg/FFmpeg/commit/0694d8702421e7aff1340038559c438b61bb30dd>
m_formatCtx->oformat = (AVOutputFormat *)format;
QString comment = "Recorded by QtScrcpy " + QCoreApplication::applicationVersion();
av_dict_set(&m_formatCtx->metadata, "comment", comment.toUtf8(), 0);
AVStream *outStream = avformat_new_stream(m_formatCtx, inputCodec);
if (!outStream) {
avformat_free_context(m_formatCtx);
m_formatCtx = Q_NULLPTR;
return false;
}
#ifdef QTSCRCPY_LAVF_HAS_NEW_CODEC_PARAMS_API
outStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
outStream->codecpar->codec_id = inputCodec->id;
outStream->codecpar->format = AV_PIX_FMT_YUV420P;
outStream->codecpar->width = m_declaredFrameSize.width();
outStream->codecpar->height = m_declaredFrameSize.height();
#else
outStream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
outStream->codec->codec_id = inputCodec->id;
outStream->codec->pix_fmt = AV_PIX_FMT_YUV420P;
outStream->codec->width = m_declaredFrameSize.width();
outStream->codec->height = m_declaredFrameSize.height();
#endif
int ret = avio_open(&m_formatCtx->pb, m_fileName.toUtf8().toStdString().c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
char errorbuf[255] = { 0 };
av_strerror(ret, errorbuf, 254);
qCritical() << QString("Failed to open output file: %1 %2").arg(errorbuf).arg(m_fileName).toUtf8().toStdString().c_str();
// ostream will be cleaned up during context cleaning
avformat_free_context(m_formatCtx);
m_formatCtx = Q_NULLPTR;
return false;
}
return true;
}
void Recorder::close()
{
if (Q_NULLPTR != m_formatCtx) {
if (m_headerWritten) {
int ret = av_write_trailer(m_formatCtx);
if (ret < 0) {
qCritical() << QString("Failed to write trailer to %1").arg(m_fileName).toUtf8().toStdString().c_str();
m_failed = true;
} else {
qInfo() << QString("success record %1").arg(m_fileName).toStdString().c_str();
}
} else {
// the recorded file is empty
m_failed = true;
}
avio_close(m_formatCtx->pb);
avformat_free_context(m_formatCtx);
m_formatCtx = Q_NULLPTR;
}
}
bool Recorder::write(AVPacket *packet)
{
if (!m_headerWritten) {
if (packet->pts != AV_NOPTS_VALUE) {
qCritical("The first packet is not a config packet");
return false;
}
bool ok = recorderWriteHeader(packet);
if (!ok) {
return false;
}
m_headerWritten = true;
return true;
}
if (packet->pts == AV_NOPTS_VALUE) {
// ignore config packets
return true;
}
recorderRescalePacket(packet);
return av_write_frame(m_formatCtx, packet) >= 0;
}
const AVOutputFormat *Recorder::findMuxer(const char *name)
{
#ifdef QTSCRCPY_LAVF_HAS_NEW_MUXER_ITERATOR_API
void *opaque = Q_NULLPTR;
#endif
const AVOutputFormat *outFormat = Q_NULLPTR;
do {
#ifdef QTSCRCPY_LAVF_HAS_NEW_MUXER_ITERATOR_API
outFormat = av_muxer_iterate(&opaque);
#else
outFormat = av_oformat_next(outFormat);
#endif
// until null or with name "name"
} while (outFormat && strcmp(outFormat->name, name));
return outFormat;
}
bool Recorder::recorderWriteHeader(const AVPacket *packet)
{
AVStream *ostream = m_formatCtx->streams[0];
quint8 *extradata = (quint8 *)av_malloc(packet->size * sizeof(quint8));
if (!extradata) {
qCritical("Cannot allocate extradata");
return false;
}
// copy the first packet to the extra data
memcpy(extradata, packet->data, packet->size);
#ifdef QTSCRCPY_LAVF_HAS_NEW_CODEC_PARAMS_API
ostream->codecpar->extradata = extradata;
ostream->codecpar->extradata_size = packet->size;
#else
ostream->codec->extradata = extradata;
ostream->codec->extradata_size = packet->size;
#endif
int ret = avformat_write_header(m_formatCtx, NULL);
if (ret < 0) {
qCritical("Failed to write header recorder file");
return false;
}
return true;
}
void Recorder::recorderRescalePacket(AVPacket *packet)
{
AVStream *ostream = m_formatCtx->streams[0];
av_packet_rescale_ts(packet, SCRCPY_TIME_BASE, ostream->time_base);
}
QString Recorder::recorderGetFormatName(Recorder::RecorderFormat format)
{
switch (format) {
case RECORDER_FORMAT_MP4:
return "mp4";
case RECORDER_FORMAT_MKV:
return "matroska";
default:
return "";
}
}
Recorder::RecorderFormat Recorder::guessRecordFormat(const QString &fileName)
{
if (4 > fileName.length()) {
return Recorder::RECORDER_FORMAT_NULL;
}
QFileInfo fileInfo = QFileInfo(fileName);
QString ext = fileInfo.suffix();
if (0 == ext.compare("mp4")) {
return Recorder::RECORDER_FORMAT_MP4;
}
if (0 == ext.compare("mkv")) {
return Recorder::RECORDER_FORMAT_MKV;
}
return Recorder::RECORDER_FORMAT_NULL;
}
void Recorder::run()
{
for (;;) {
AVPacket *rec = Q_NULLPTR;
{
QMutexLocker locker(&m_mutex);
while (!m_stopped && m_queue.isEmpty()) {
m_recvDataCond.wait(&m_mutex);
}
// if stopped is set, continue to process the remaining events (to
// finish the recording) before actually stopping
if (m_stopped && m_queue.isEmpty()) {
AVPacket *last = m_previous;
if (last) {
// assign an arbitrary duration to the last packet
last->duration = 100000;
bool ok = write(last);
if (!ok) {
// failing to write the last frame is not very serious, no
// future frame may depend on it, so the resulting file
// will still be valid
qWarning("Could not record last packet");
}
packetDelete(last);
}
break;
}
rec = m_queue.dequeue();
}
// recorder->previous is only written from this thread, no need to lock
AVPacket *previous = m_previous;
m_previous = rec;
if (!previous) {
// we just received the first packet
continue;
}
// config packets have no PTS, we must ignore them
if (rec->pts != AV_NOPTS_VALUE && previous->pts != AV_NOPTS_VALUE) {
// we now know the duration of the previous packet
previous->duration = rec->pts - previous->pts;
}
bool ok = write(previous);
packetDelete(previous);
if (!ok) {
qCritical("Could not record packet");
QMutexLocker locker(&m_mutex);
m_failed = true;
// discard pending packets
queueClear();
break;
}
}
qDebug("Recorder thread ended");
}
bool Recorder::startRecorder()
{
start();
return true;
}
void Recorder::stopRecorder()
{
QMutexLocker locker(&m_mutex);
m_stopped = true;
m_recvDataCond.wakeOne();
}
bool Recorder::push(const AVPacket *packet)
{
QMutexLocker locker(&m_mutex);
Q_ASSERT(!m_stopped);
if (m_failed) {
// reject any new packet (this will stop the stream)
return false;
}
AVPacket *rec = packetNew(packet);
if (rec) {
m_queue.enqueue(rec);
m_recvDataCond.wakeOne();
}
return rec != Q_NULLPTR;
}

View file

@ -1,71 +0,0 @@
#ifndef RECORDER_H
#define RECORDER_H
#include <QMutex>
#include <QQueue>
#include <QSize>
#include <QString>
#include <QThread>
#include <QWaitCondition>
extern "C"
{
#include "libavformat/avformat.h"
}
class Recorder : public QThread
{
Q_OBJECT
public:
enum RecorderFormat
{
RECORDER_FORMAT_NULL = 0,
RECORDER_FORMAT_MP4,
RECORDER_FORMAT_MKV,
};
Recorder(const QString &fileName, QObject *parent = Q_NULLPTR);
virtual ~Recorder();
void setFrameSize(const QSize &declaredFrameSize);
void setFormat(Recorder::RecorderFormat format);
bool open();
void close();
bool write(AVPacket *packet);
bool startRecorder();
void stopRecorder();
bool push(const AVPacket *packet);
private:
const AVOutputFormat *findMuxer(const char *name);
bool recorderWriteHeader(const AVPacket *packet);
void recorderRescalePacket(AVPacket *packet);
QString recorderGetFormatName(Recorder::RecorderFormat format);
RecorderFormat guessRecordFormat(const QString &fileName);
private:
AVPacket *packetNew(const AVPacket *packet);
void packetDelete(AVPacket *packet);
void queueClear();
protected:
void run();
private:
QString m_fileName = "";
AVFormatContext *m_formatCtx = Q_NULLPTR;
QSize m_declaredFrameSize;
bool m_headerWritten = false;
RecorderFormat m_format = RECORDER_FORMAT_NULL;
QMutex m_mutex;
QWaitCondition m_recvDataCond;
bool m_stopped = false; // set on recorder_stop() by the stream reader
bool m_failed = false; // set on packet write failure
QQueue<AVPacket *> m_queue;
// we can write a packet only once we received the next one so that we can
// set its duration (next_pts - current_pts)
// "previous" is only accessed from the recorder thread, so it does not
// need to be protected by the mutex
AVPacket *m_previous = Q_NULLPTR;
};
#endif // RECORDER_H

View file

@ -1,503 +0,0 @@
#include <QCoreApplication>
#include <QDebug>
#include <QFileInfo>
#include <QThread>
#include <QTimer>
#include <QTimerEvent>
#include "server.h"
#define DEVICE_NAME_FIELD_LENGTH 64
#define SOCKET_NAME "scrcpy"
#define MAX_CONNECT_COUNT 30
#define MAX_RESTART_COUNT 1
Server::Server(QObject *parent) : QObject(parent)
{
connect(&m_workProcess, &qsc::AdbProcess::adbProcessResult, this, &Server::onWorkProcessResult);
connect(&m_serverProcess, &qsc::AdbProcess::adbProcessResult, this, &Server::onWorkProcessResult);
connect(&m_serverSocket, &QTcpServer::newConnection, this, [this]() {
QTcpSocket *tmp = m_serverSocket.nextPendingConnection();
if (dynamic_cast<VideoSocket *>(tmp)) {
m_videoSocket = dynamic_cast<VideoSocket *>(tmp);
if (!m_videoSocket->isValid() || !readInfo(m_videoSocket, m_deviceName, m_deviceSize)) {
stop();
emit serverStarted(false);
}
} else {
m_controlSocket = tmp;
if (m_controlSocket && m_controlSocket->isValid()) {
// we don't need the server socket anymore
// just m_videoSocket is ok
m_serverSocket.close();
// we don't need the adb tunnel anymore
disableTunnelReverse();
m_tunnelEnabled = false;
emit serverStarted(true, m_deviceName, m_deviceSize);
} else {
stop();
emit serverStarted(false);
}
stopAcceptTimeoutTimer();
}
});
}
Server::~Server() {}
bool Server::pushServer()
{
if (m_workProcess.isRuning()) {
m_workProcess.kill();
}
m_workProcess.push(m_params.serial, m_params.serverLocalPath, m_params.serverRemotePath);
return true;
}
bool Server::enableTunnelReverse()
{
if (m_workProcess.isRuning()) {
m_workProcess.kill();
}
m_workProcess.reverse(m_params.serial, SOCKET_NAME, m_params.localPort);
return true;
}
bool Server::disableTunnelReverse()
{
qsc::AdbProcess *adb = new qsc::AdbProcess();
if (!adb) {
return false;
}
connect(adb, &qsc::AdbProcess::adbProcessResult, this, [this](qsc::AdbProcess::ADB_EXEC_RESULT processResult) {
if (qsc::AdbProcess::AER_SUCCESS_START != processResult) {
sender()->deleteLater();
}
});
adb->reverseRemove(m_params.serial, SOCKET_NAME);
return true;
}
bool Server::enableTunnelForward()
{
if (m_workProcess.isRuning()) {
m_workProcess.kill();
}
m_workProcess.forward(m_params.serial, m_params.localPort, SOCKET_NAME);
return true;
}
bool Server::disableTunnelForward()
{
qsc::AdbProcess *adb = new qsc::AdbProcess();
if (!adb) {
return false;
}
connect(adb, &qsc::AdbProcess::adbProcessResult, this, [this](qsc::AdbProcess::ADB_EXEC_RESULT processResult) {
if (qsc::AdbProcess::AER_SUCCESS_START != processResult) {
sender()->deleteLater();
}
});
adb->forwardRemove(m_params.serial, m_params.localPort);
return true;
}
bool Server::execute()
{
if (m_serverProcess.isRuning()) {
m_serverProcess.kill();
}
QStringList args;
args << "shell";
args << QString("CLASSPATH=%1").arg(m_params.serverRemotePath);
args << "app_process";
#ifdef SERVER_DEBUGGER
#define SERVER_DEBUGGER_PORT "5005"
args <<
#ifdef SERVER_DEBUGGER_METHOD_NEW
/* Android 9 and above */
"-XjdwpProvider:internal -XjdwpOptions:transport=dt_socket,suspend=y,server=y,address="
#else
/* Android 8 and below */
"-agentlib:jdwp=transport=dt_socket,suspend=y,server=y,address="
#endif
SERVER_DEBUGGER_PORT,
#endif
args << "/"; // unused;
args << "com.genymobile.scrcpy.Server";
args << m_params.serverVersion;
if (!m_params.logLevel.isEmpty()) {
args << QString("log_level=%1").arg(m_params.logLevel);
}
args << QString("max_size=%1").arg(QString::number(m_params.maxSize));
args << QString("bit_rate=%1").arg(QString::number(m_params.bitRate));
args << QString("max_fps=%1").arg(QString::number(m_params.maxFps));
args << QString("lock_video_orientation=%1").arg(QString::number(m_params.lockVideoOrientation));
args << QString("tunnel_forward=%1").arg((m_tunnelForward ? "true" : "false"));
if (!m_params.crop.isEmpty()) {
args << QString("crop=%1").arg(m_params.crop);
}
args << QString("control=%1").arg((m_params.control ? "true" : "false"));
args << "display_id=0"; // display id
args << "show_touches=false"; // show touch
args << QString("stay_awake=%1").arg((m_params.stayAwake ? "true" : "false")); // stay awake
// code option
// https://github.com/Genymobile/scrcpy/commit/080a4ee3654a9b7e96c8ffe37474b5c21c02852a
// <https://d.android.com/reference/android/media/MediaFormat>
if (!m_params.codecOptions.isEmpty()) {
args << QString("codec_options=%1").arg(m_params.codecOptions);
}
if (!m_params.codecName.isEmpty()) {
args << QString("encoder_name=%1").arg(m_params.codecName);
}
#ifdef SERVER_DEBUGGER
qInfo("Server debugger waiting for a client on device port " SERVER_DEBUGGER_PORT "...");
// From the computer, run
// adb forward tcp:5005 tcp:5005
// Then, from Android Studio: Run > Debug > Edit configurations...
// On the left, click on '+', "Remote", with:
// Host: localhost
// Port: 5005
// Then click on "Debug"
#endif
// adb -s P7C0218510000537 shell CLASSPATH=/data/local/tmp/scrcpy-server app_process / com.genymobile.scrcpy.Server 0 8000000 false
// mark: crop input format: "width:height:x:y" or "" for no crop, for example: "100:200:0:0"
// 这条adb命令是阻塞运行的m_serverProcess进程不会退出了
m_serverProcess.execute(m_params.serial, args);
return true;
}
bool Server::start(Server::ServerParams params)
{
m_params = params;
m_serverStartStep = SSS_PUSH;
return startServerByStep();
}
bool Server::connectTo()
{
if (SSS_RUNNING != m_serverStartStep) {
qWarning("server not run");
return false;
}
if (!m_tunnelForward && !m_videoSocket) {
startAcceptTimeoutTimer();
return true;
}
startConnectTimeoutTimer();
return true;
}
bool Server::isReverse()
{
return !m_tunnelForward;
}
Server::ServerParams Server::getParams()
{
return m_params;
}
void Server::timerEvent(QTimerEvent *event)
{
if (event && m_acceptTimeoutTimer == event->timerId()) {
stopAcceptTimeoutTimer();
emit serverStarted(false);
} else if (event && m_connectTimeoutTimer == event->timerId()) {
onConnectTimer();
}
}
VideoSocket *Server::getVideoSocket()
{
return m_videoSocket;
}
QTcpSocket *Server::getControlSocket()
{
return m_controlSocket;
}
void Server::stop()
{
if (m_tunnelForward) {
stopConnectTimeoutTimer();
} else {
stopAcceptTimeoutTimer();
}
if (m_videoSocket) {
m_videoSocket->close();
m_videoSocket->deleteLater();
}
if (m_controlSocket) {
m_controlSocket->close();
m_controlSocket->deleteLater();
}
// ignore failure
m_serverProcess.kill();
if (m_tunnelEnabled) {
if (m_tunnelForward) {
disableTunnelForward();
} else {
disableTunnelReverse();
}
m_tunnelForward = false;
m_tunnelEnabled = false;
}
m_serverSocket.close();
}
bool Server::startServerByStep()
{
bool stepSuccess = false;
// push, enable tunnel et start the server
if (SSS_NULL != m_serverStartStep) {
switch (m_serverStartStep) {
case SSS_PUSH:
stepSuccess = pushServer();
break;
case SSS_ENABLE_TUNNEL_REVERSE:
stepSuccess = enableTunnelReverse();
break;
case SSS_ENABLE_TUNNEL_FORWARD:
stepSuccess = enableTunnelForward();
break;
case SSS_EXECUTE_SERVER:
// server will connect to our server socket
stepSuccess = execute();
break;
default:
break;
}
}
if (!stepSuccess) {
emit serverStarted(false);
}
return stepSuccess;
}
bool Server::readInfo(VideoSocket *videoSocket, QString &deviceName, QSize &size)
{
unsigned char buf[DEVICE_NAME_FIELD_LENGTH + 4];
if (videoSocket->bytesAvailable() <= (DEVICE_NAME_FIELD_LENGTH + 4)) {
videoSocket->waitForReadyRead(300);
}
qint64 len = videoSocket->read((char *)buf, sizeof(buf));
if (len < DEVICE_NAME_FIELD_LENGTH + 4) {
qInfo("Could not retrieve device information");
return false;
}
buf[DEVICE_NAME_FIELD_LENGTH - 1] = '\0'; // in case the client sends garbage
// strcpy is safe here, since name contains at least DEVICE_NAME_FIELD_LENGTH bytes
// and strlen(buf) < DEVICE_NAME_FIELD_LENGTH
deviceName = (char *)buf;
size.setWidth((buf[DEVICE_NAME_FIELD_LENGTH] << 8) | buf[DEVICE_NAME_FIELD_LENGTH + 1]);
size.setHeight((buf[DEVICE_NAME_FIELD_LENGTH + 2] << 8) | buf[DEVICE_NAME_FIELD_LENGTH + 3]);
return true;
}
void Server::startAcceptTimeoutTimer()
{
stopAcceptTimeoutTimer();
m_acceptTimeoutTimer = startTimer(1000);
}
void Server::stopAcceptTimeoutTimer()
{
if (m_acceptTimeoutTimer) {
killTimer(m_acceptTimeoutTimer);
m_acceptTimeoutTimer = 0;
}
}
void Server::startConnectTimeoutTimer()
{
stopConnectTimeoutTimer();
m_connectTimeoutTimer = startTimer(100);
}
void Server::stopConnectTimeoutTimer()
{
if (m_connectTimeoutTimer) {
killTimer(m_connectTimeoutTimer);
m_connectTimeoutTimer = 0;
}
m_connectCount = 0;
}
void Server::onConnectTimer()
{
// device server need time to start
// 这里连接太早时间不够导致安卓监听socket还没有建立readInfo会失败所以采取定时重试策略
// 每隔100ms尝试一次最多尝试MAX_CONNECT_COUNT次
QString deviceName;
QSize deviceSize;
bool success = false;
VideoSocket *videoSocket = new VideoSocket();
QTcpSocket *controlSocket = new QTcpSocket();
videoSocket->connectToHost(QHostAddress::LocalHost, m_params.localPort);
if (!videoSocket->waitForConnected(1000)) {
// 连接到adb很快的这里失败不重试
m_connectCount = MAX_CONNECT_COUNT;
qWarning("video socket connect to server failed");
goto result;
}
controlSocket->connectToHost(QHostAddress::LocalHost, m_params.localPort);
if (!controlSocket->waitForConnected(1000)) {
// 连接到adb很快的这里失败不重试
m_connectCount = MAX_CONNECT_COUNT;
qWarning("control socket connect to server failed");
goto result;
}
if (QTcpSocket::ConnectedState == videoSocket->state()) {
// connect will success even if devices offline, recv data is real connect success
// because connect is to pc adb server
videoSocket->waitForReadyRead(1000);
// devices will send 1 byte first on tunnel forward mode
QByteArray data = videoSocket->read(1);
if (!data.isEmpty() && readInfo(videoSocket, deviceName, deviceSize)) {
success = true;
goto result;
} else {
qWarning("video socket connect to server read device info failed, try again");
goto result;
}
} else {
qWarning("connect to server failed");
m_connectCount = MAX_CONNECT_COUNT;
goto result;
}
result:
if (success) {
stopConnectTimeoutTimer();
m_videoSocket = videoSocket;
m_controlSocket = controlSocket;
// we don't need the adb tunnel anymore
disableTunnelForward();
m_tunnelEnabled = false;
m_restartCount = 0;
emit serverStarted(success, deviceName, deviceSize);
return;
}
if (videoSocket) {
videoSocket->deleteLater();
}
if (controlSocket) {
controlSocket->deleteLater();
}
if (MAX_CONNECT_COUNT <= m_connectCount++) {
stopConnectTimeoutTimer();
stop();
if (MAX_RESTART_COUNT > m_restartCount++) {
qWarning("restart server auto");
start(m_params);
} else {
m_restartCount = 0;
emit serverStarted(false);
}
}
}
void Server::onWorkProcessResult(qsc::AdbProcess::ADB_EXEC_RESULT processResult)
{
if (sender() == &m_workProcess) {
if (SSS_NULL != m_serverStartStep) {
switch (m_serverStartStep) {
case SSS_PUSH:
if (qsc::AdbProcess::AER_SUCCESS_EXEC == processResult) {
if (m_params.useReverse) {
m_serverStartStep = SSS_ENABLE_TUNNEL_REVERSE;
} else {
m_tunnelForward = true;
m_serverStartStep = SSS_ENABLE_TUNNEL_FORWARD;
}
startServerByStep();
} else if (qsc::AdbProcess::AER_SUCCESS_START != processResult) {
qCritical("adb push failed");
m_serverStartStep = SSS_NULL;
emit serverStarted(false);
}
break;
case SSS_ENABLE_TUNNEL_REVERSE:
if (qsc::AdbProcess::AER_SUCCESS_EXEC == processResult) {
// At the application level, the device part is "the server" because it
// serves video stream and control. However, at the network level, the
// client listens and the server connects to the client. That way, the
// client can listen before starting the server app, so there is no need to
// try to connect until the server socket is listening on the device.
m_serverSocket.setMaxPendingConnections(2);
if (!m_serverSocket.listen(QHostAddress::LocalHost, m_params.localPort)) {
qCritical() << QString("Could not listen on port %1").arg(m_params.localPort).toStdString().c_str();
m_serverStartStep = SSS_NULL;
disableTunnelReverse();
emit serverStarted(false);
break;
}
m_serverStartStep = SSS_EXECUTE_SERVER;
startServerByStep();
} else if (qsc::AdbProcess::AER_SUCCESS_START != processResult) {
// 有一些设备reverse会报错more than o'ne deviceadb的bug
// https://github.com/Genymobile/scrcpy/issues/5
qCritical("adb reverse failed");
m_tunnelForward = true;
m_serverStartStep = SSS_ENABLE_TUNNEL_FORWARD;
startServerByStep();
}
break;
case SSS_ENABLE_TUNNEL_FORWARD:
if (qsc::AdbProcess::AER_SUCCESS_EXEC == processResult) {
m_serverStartStep = SSS_EXECUTE_SERVER;
startServerByStep();
} else if (qsc::AdbProcess::AER_SUCCESS_START != processResult) {
qCritical("adb forward failed");
m_serverStartStep = SSS_NULL;
emit serverStarted(false);
}
break;
default:
break;
}
}
}
if (sender() == &m_serverProcess) {
if (SSS_EXECUTE_SERVER == m_serverStartStep) {
if (qsc::AdbProcess::AER_SUCCESS_START == processResult) {
m_serverStartStep = SSS_RUNNING;
m_tunnelEnabled = true;
connectTo();
} else if (qsc::AdbProcess::AER_ERROR_START == processResult) {
if (!m_tunnelForward) {
m_serverSocket.close();
disableTunnelReverse();
} else {
disableTunnelForward();
}
qCritical("adb shell start server failed");
m_serverStartStep = SSS_NULL;
emit serverStarted(false);
}
} else if (SSS_RUNNING == m_serverStartStep) {
m_serverStartStep = SSS_NULL;
emit serverStoped();
}
}
}

View file

@ -1,111 +0,0 @@
#ifndef SERVER_H
#define SERVER_H
#include <QObject>
#include <QPointer>
#include <QSize>
#include "adbprocess.h"
#include "tcpserver.h"
#include "videosocket.h"
class Server : public QObject
{
Q_OBJECT
enum SERVER_START_STEP
{
SSS_NULL,
SSS_PUSH,
SSS_ENABLE_TUNNEL_REVERSE,
SSS_ENABLE_TUNNEL_FORWARD,
SSS_EXECUTE_SERVER,
SSS_RUNNING,
};
public:
struct ServerParams
{
// necessary
QString serial = ""; // 设备序列号
QString serverLocalPath = ""; // 本地安卓server路径
// optional
QString serverRemotePath = "/data/local/tmp/scrcpy-server.jar"; // 要推送到远端设备的server路径
quint16 localPort = 27183; // reverse时本地监听端口
quint16 maxSize = 720; // 视频分辨率
quint32 bitRate = 8000000; // 视频比特率
quint32 maxFps = 60; // 视频最大帧率
bool useReverse = true; // true:先使用adb reverse失败后自动使用adb forwardfalse:直接使用adb forward
int lockVideoOrientation = -1; // 是否锁定视频方向
int stayAwake = false; // 是否保持唤醒
QString serverVersion = "1.21";// server版本
QString logLevel = "info"; // log级别 debug/info/warn/error
// 编码选项 ""表示默认
// 例如 CodecOptions="profile=1,level=2"
// 更多编码选项参考 https://d.android.com/reference/android/media/MediaFormat
QString codecOptions = "";
// 指定编码器名称(必须是H.264编码器)""表示默认
// 例如 CodecName="OMX.qcom.video.encoder.avc"
QString codecName = "";
QString crop = ""; // 视频裁剪
bool control = true; // 安卓端是否接收键鼠控制
};
explicit Server(QObject *parent = nullptr);
virtual ~Server();
bool start(Server::ServerParams params);
void stop();
bool isReverse();
Server::ServerParams getParams();
VideoSocket *getVideoSocket();
QTcpSocket *getControlSocket();
signals:
void serverStarted(bool success, const QString &deviceName = "", const QSize &size = QSize());
void serverStoped();
private slots:
void onWorkProcessResult(qsc::AdbProcess::ADB_EXEC_RESULT processResult);
protected:
void timerEvent(QTimerEvent *event);
private:
bool pushServer();
bool enableTunnelReverse();
bool disableTunnelReverse();
bool enableTunnelForward();
bool disableTunnelForward();
bool execute();
bool connectTo();
bool startServerByStep();
bool readInfo(VideoSocket *videoSocket, QString &deviceName, QSize &size);
void startAcceptTimeoutTimer();
void stopAcceptTimeoutTimer();
void startConnectTimeoutTimer();
void stopConnectTimeoutTimer();
void onConnectTimer();
private:
qsc::AdbProcess m_workProcess;
qsc::AdbProcess m_serverProcess;
TcpServer m_serverSocket; // only used if !tunnel_forward
QPointer<VideoSocket> m_videoSocket = Q_NULLPTR;
QPointer<QTcpSocket> m_controlSocket = Q_NULLPTR;
bool m_tunnelEnabled = false;
bool m_tunnelForward = false; // use "adb forward" instead of "adb reverse"
int m_acceptTimeoutTimer = 0;
int m_connectTimeoutTimer = 0;
quint32 m_connectCount = 0;
quint32 m_restartCount = 0;
QString m_deviceName = "";
QSize m_deviceSize = QSize();
ServerParams m_params;
SERVER_START_STEP m_serverStartStep = SSS_NULL;
};
#endif // SERVER_H

View file

@ -1,22 +0,0 @@
#include "tcpserver.h"
#include "videosocket.h"
TcpServer::TcpServer(QObject *parent) : QTcpServer(parent) {}
TcpServer::~TcpServer() {}
void TcpServer::incomingConnection(qintptr handle)
{
if (m_isVideoSocket) {
VideoSocket *socket = new VideoSocket();
socket->setSocketDescriptor(handle);
addPendingConnection(socket);
// next is control socket
m_isVideoSocket = false;
} else {
QTcpSocket *socket = new QTcpSocket();
socket->setSocketDescriptor(handle);
addPendingConnection(socket);
}
}

View file

@ -1,20 +0,0 @@
#ifndef TCPSERVER_H
#define TCPSERVER_H
#include <QTcpServer>
class TcpServer : public QTcpServer
{
Q_OBJECT
public:
explicit TcpServer(QObject *parent = nullptr);
virtual ~TcpServer();
protected:
virtual void incomingConnection(qintptr handle);
private:
bool m_isVideoSocket = true;
};
#endif // TCPSERVER_H

View file

@ -1,81 +0,0 @@
#include <QCoreApplication>
#include <QDebug>
#include <QThread>
#include "qscrcpyevent.h"
#include "videosocket.h"
VideoSocket::VideoSocket(QObject *parent) : QTcpSocket(parent)
{
connect(this, &VideoSocket::readyRead, this, &VideoSocket::onReadyRead);
connect(this, &VideoSocket::aboutToClose, this, &VideoSocket::quitNotify);
connect(this, &VideoSocket::disconnected, this, &VideoSocket::quitNotify);
}
VideoSocket::~VideoSocket()
{
quitNotify();
}
qint32 VideoSocket::subThreadRecvData(quint8 *buf, qint32 bufSize)
{
// this function cant call in main thread
Q_ASSERT(QCoreApplication::instance()->thread() != QThread::currentThread());
if (m_quit) {
return 0;
}
QMutexLocker locker(&m_mutex);
m_buffer = buf;
m_bufferSize = bufSize;
m_dataSize = 0;
// post event
VideoSocketEvent *getDataEvent = new VideoSocketEvent();
QCoreApplication::postEvent(this, getDataEvent);
// wait
while (!m_recvData) {
m_recvDataCond.wait(&m_mutex);
}
m_recvData = false;
return m_dataSize;
}
bool VideoSocket::event(QEvent *event)
{
if (static_cast<QScrcpyEvent::Type>(event->type()) == QScrcpyEvent::VideoSocket) {
onReadyRead();
return true;
}
return QTcpSocket::event(event);
}
void VideoSocket::onReadyRead()
{
QMutexLocker locker(&m_mutex);
if (m_buffer && m_bufferSize <= bytesAvailable()) {
// recv data
qint64 readSize = qMin(bytesAvailable(), (qint64)m_bufferSize);
m_dataSize = read((char *)m_buffer, readSize);
m_buffer = Q_NULLPTR;
m_bufferSize = 0;
m_recvData = true;
m_recvDataCond.wakeOne();
}
}
void VideoSocket::quitNotify()
{
m_quit = true;
QMutexLocker locker(&m_mutex);
if (m_buffer) {
m_buffer = Q_NULLPTR;
m_bufferSize = 0;
m_recvData = true;
m_dataSize = 0;
m_recvDataCond.wakeOne();
}
}

View file

@ -1,35 +0,0 @@
#ifndef VIDEOSOCKET_H
#define VIDEOSOCKET_H
#include <QEvent>
#include <QMutex>
#include <QTcpSocket>
#include <QWaitCondition>
class VideoSocket : public QTcpSocket
{
Q_OBJECT
public:
explicit VideoSocket(QObject *parent = nullptr);
virtual ~VideoSocket();
qint32 subThreadRecvData(quint8 *buf, qint32 bufSize);
protected:
bool event(QEvent *event);
protected slots:
void onReadyRead();
void quitNotify();
private:
QMutex m_mutex;
QWaitCondition m_recvDataCond;
bool m_recvData = false;
quint8 *m_buffer = Q_NULLPTR;
qint32 m_bufferSize = 0;
qint32 m_dataSize = 0;
bool m_quit = false;
};
#endif // VIDEOSOCKET_H

View file

@ -1,300 +0,0 @@
#include <QDebug>
#include <QTime>
#include "compat.h"
#include "stream.h"
#define BUFSIZE 0x10000
#define HEADER_SIZE 12
#define NO_PTS UINT64_MAX
typedef qint32 (*ReadPacketFunc)(void *, quint8 *, qint32);
Stream::Stream(std::function<qint32(quint8*, qint32)> recvData, QObject *parent)
: QThread(parent)
, m_recvData(recvData)
{}
Stream::~Stream() {}
static void avLogCallback(void *avcl, int level, const char *fmt, va_list vl)
{
Q_UNUSED(avcl)
Q_UNUSED(vl)
QString localFmt = QString::fromUtf8(fmt);
localFmt.prepend("[FFmpeg] ");
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
qFatal("%s", localFmt.toUtf8().data());
break;
case AV_LOG_ERROR:
qCritical() << localFmt.toUtf8();
break;
case AV_LOG_WARNING:
qWarning() << localFmt.toUtf8();
break;
case AV_LOG_INFO:
qInfo() << localFmt.toUtf8();
break;
case AV_LOG_DEBUG:
// qDebug() << localFmt.toUtf8();
break;
}
// do not forward others, which are too verbose
return;
}
bool Stream::init()
{
#ifdef QTSCRCPY_LAVF_REQUIRES_REGISTER_ALL
av_register_all();
#endif
if (avformat_network_init()) {
return false;
}
av_log_set_callback(avLogCallback);
return true;
}
void Stream::deInit()
{
avformat_network_deinit(); // ignore failure
}
static quint32 bufferRead32be(quint8 *buf)
{
return static_cast<quint32>((buf[0] << 24) | (buf[1] << 16) | (buf[2] << 8) | buf[3]);
}
static quint64 bufferRead64be(quint8 *buf)
{
quint32 msb = bufferRead32be(buf);
quint32 lsb = bufferRead32be(&buf[4]);
return (static_cast<quint64>(msb) << 32) | lsb;
}
qint32 Stream::recvData(quint8 *buf, qint32 bufSize)
{
if (!buf || !m_recvData) {
return 0;
}
qint32 len = m_recvData(buf, bufSize);
return len;
}
bool Stream::startDecode()
{
if (!m_recvData) {
return false;
}
start();
return true;
}
void Stream::stopDecode()
{
wait();
}
void Stream::run()
{
AVCodec *codec = Q_NULLPTR;
m_codecCtx = Q_NULLPTR;
m_parser = Q_NULLPTR;
// codec
codec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!codec) {
qCritical("H.264 decoder not found");
goto runQuit;
}
// codeCtx
m_codecCtx = avcodec_alloc_context3(codec);
if (!m_codecCtx) {
qCritical("Could not allocate codec context");
goto runQuit;
}
m_parser = av_parser_init(AV_CODEC_ID_H264);
if (!m_parser) {
qCritical("Could not initialize parser");
goto runQuit;
}
// We must only pass complete frames to av_parser_parse2()!
// It's more complicated, but this allows to reduce the latency by 1 frame!
m_parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
for (;;) {
AVPacket packet;
bool ok = recvPacket(&packet);
if (!ok) {
// end of stream
break;
}
ok = pushPacket(&packet);
av_packet_unref(&packet);
if (!ok) {
// cannot process packet (error already logged)
break;
}
}
qDebug("End of frames");
if (m_hasPending) {
av_packet_unref(&m_pending);
}
av_parser_close(m_parser);
runQuit:
if (m_codecCtx) {
avcodec_free_context(&m_codecCtx);
}
emit onStreamStop();
}
bool Stream::recvPacket(AVPacket *packet)
{
// The video stream contains raw packets, without time information. When we
// record, we retrieve the timestamps separately, from a "meta" header
// added by the server before each raw packet.
//
// The "meta" header length is 12 bytes:
// [. . . . . . . .|. . . .]. . . . . . . . . . . . . . . ...
// <-------------> <-----> <-----------------------------...
// PTS packet raw packet
// size
//
// It is followed by <packet_size> bytes containing the packet/frame.
quint8 header[HEADER_SIZE];
qint32 r = recvData(header, HEADER_SIZE);
if (r < HEADER_SIZE) {
return false;
}
quint64 pts = bufferRead64be(header);
quint32 len = bufferRead32be(&header[8]);
Q_ASSERT(pts == NO_PTS || (pts & 0x8000000000000000) == 0);
Q_ASSERT(len);
if (av_new_packet(packet, static_cast<int>(len))) {
qCritical("Could not allocate packet");
return false;
}
r = recvData(packet->data, static_cast<qint32>(len));
if (r < 0 || static_cast<quint32>(r) < len) {
av_packet_unref(packet);
return false;
}
packet->pts = pts != NO_PTS ? static_cast<int64_t>(pts) : static_cast<int64_t>(AV_NOPTS_VALUE);
return true;
}
bool Stream::pushPacket(AVPacket *packet)
{
bool isConfig = packet->pts == AV_NOPTS_VALUE;
// A config packet must not be decoded immetiately (it contains no
// frame); instead, it must be concatenated with the future data packet.
if (m_hasPending || isConfig) {
qint32 offset;
if (m_hasPending) {
offset = m_pending.size;
if (av_grow_packet(&m_pending, packet->size)) {
qCritical("Could not grow packet");
return false;
}
} else {
offset = 0;
if (av_new_packet(&m_pending, packet->size)) {
qCritical("Could not create packet");
return false;
}
m_hasPending = true;
}
memcpy(m_pending.data + offset, packet->data, static_cast<unsigned int>(packet->size));
if (!isConfig) {
// prepare the concat packet to send to the decoder
m_pending.pts = packet->pts;
m_pending.dts = packet->dts;
m_pending.flags = packet->flags;
packet = &m_pending;
}
}
if (isConfig) {
// config packet
bool ok = processConfigPacket(packet);
if (!ok) {
return false;
}
} else {
// data packet
bool ok = parse(packet);
if (m_hasPending) {
// the pending packet must be discarded (consumed or error)
m_hasPending = false;
av_packet_unref(&m_pending);
}
if (!ok) {
return false;
}
}
return true;
}
bool Stream::processConfigPacket(AVPacket *packet)
{
emit getConfigFrame(packet);
return true;
}
bool Stream::parse(AVPacket *packet)
{
quint8 *inData = packet->data;
int inLen = packet->size;
quint8 *outData = Q_NULLPTR;
int outLen = 0;
int r = av_parser_parse2(m_parser, m_codecCtx, &outData, &outLen, inData, inLen, AV_NOPTS_VALUE, AV_NOPTS_VALUE, -1);
// PARSER_FLAG_COMPLETE_FRAMES is set
Q_ASSERT(r == inLen);
(void)r;
Q_ASSERT(outLen == inLen);
if (m_parser->key_frame == 1) {
packet->flags |= AV_PKT_FLAG_KEY;
}
bool ok = processFrame(packet);
if (!ok) {
qCritical("Could not process frame");
return false;
}
return true;
}
bool Stream::processFrame(AVPacket *packet)
{
packet->dts = packet->pts;
emit getFrame(packet);
return true;
}

View file

@ -1,52 +0,0 @@
#ifndef STREAM_H
#define STREAM_H
#include <QPointer>
#include <QThread>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
}
class Stream : public QThread
{
Q_OBJECT
public:
Stream(std::function<qint32(quint8*, qint32)> recvData, QObject *parent = Q_NULLPTR);
virtual ~Stream();
public:
static bool init();
static void deInit();
bool startDecode();
void stopDecode();
signals:
void onStreamStop();
void getFrame(AVPacket* packet);
void getConfigFrame(AVPacket* packet);
protected:
void run();
bool recvPacket(AVPacket *packet);
bool pushPacket(AVPacket *packet);
bool processConfigPacket(AVPacket *packet);
bool parse(AVPacket *packet);
bool processFrame(AVPacket *packet);
qint32 recvData(quint8 *buf, qint32 bufSize);
private:
std::function<qint32(quint8*, qint32)> m_recvData = nullptr;
AVCodecContext *m_codecCtx = Q_NULLPTR;
AVCodecParserContext *m_parser = Q_NULLPTR;
// successive packets may need to be concatenated, until a non-config
// packet is available
bool m_hasPending = false;
AVPacket m_pending;
};
#endif // STREAM_H

View file

@ -1,141 +0,0 @@
#include <QDebug>
#include <QKeyEvent>
#include <QMouseEvent>
#include <QWheelEvent>
#include "devicemanage.h"
#include "server.h"
#include "device.h"
#include "stream.h"
namespace qsc {
#define DM_MAX_DEVICES_NUM 1000
IDeviceManage& IDeviceManage::getInstance() {
static DeviceManage dm;
return dm;
}
DeviceManage::DeviceManage() {
Stream::init();
}
DeviceManage::~DeviceManage() {
Stream::deInit();
}
QPointer<IDevice> DeviceManage::getDevice(const QString &serial)
{
if (!m_devices.contains(serial)) {
return QPointer<IDevice>();
}
return m_devices[serial];
}
bool DeviceManage::connectDevice(qsc::DeviceParams params)
{
if (params.serial.trimmed().isEmpty()) {
return false;
}
if (m_devices.contains(params.serial)) {
return false;
}
if (DM_MAX_DEVICES_NUM < m_devices.size()) {
qInfo("over the maximum number of connections");
return false;
}
/*
// 没有必要分配端口都用27183即可连接建立以后server会释放监听的
quint16 port = 0;
if (params.useReverse) {
port = getFreePort();
if (0 == port) {
qInfo("no port available, automatically switch to forward");
params.useReverse = false;
} else {
params.localPort = port;
qInfo("free port %d", port);
}
}
*/
IDevice *device = new Device(params);
connect(device, &Device::deviceConnected, this, &DeviceManage::onDeviceConnected);
connect(device, &Device::deviceDisconnected, this, &DeviceManage::onDeviceDisconnected);
if (!device->connectDevice()) {
delete device;
return false;
}
m_devices[params.serial] = device;
return true;
}
bool DeviceManage::disconnectDevice(const QString &serial)
{
bool ret = false;
if (!serial.isEmpty() && m_devices.contains(serial)) {
auto it = m_devices.find(serial);
if (it->data()) {
delete it->data();
ret = true;
}
}
return ret;
}
void DeviceManage::disconnectAllDevice()
{
QMapIterator<QString, QPointer<IDevice>> i(m_devices);
while (i.hasNext()) {
i.next();
if (i.value()) {
delete i.value();
}
}
}
void DeviceManage::onDeviceConnected(bool success, const QString &serial, const QString &deviceName, const QSize &size)
{
emit deviceConnected(success, serial, deviceName, size);
if (!success) {
removeDevice(serial);
}
}
void DeviceManage::onDeviceDisconnected(QString serial)
{
emit deviceDisconnected(serial);
removeDevice(serial);
}
quint16 DeviceManage::getFreePort()
{
quint16 port = m_localPortStart;
while (port < m_localPortStart + DM_MAX_DEVICES_NUM) {
bool used = false;
QMapIterator<QString, QPointer<IDevice>> i(m_devices);
while (i.hasNext()) {
i.next();
auto device = i.value();
if (device && device->isReversePort(port)) {
used = true;
break;
}
}
if (!used) {
return port;
}
port++;
}
return 0;
}
void DeviceManage::removeDevice(const QString &serial)
{
if (!serial.isEmpty() && m_devices.contains(serial)) {
m_devices[serial]->deleteLater();
m_devices.remove(serial);
}
}
}

View file

@ -1,39 +0,0 @@
#ifndef DEVICEMANAGE_H
#define DEVICEMANAGE_H
#include <QMap>
#include <QPointer>
#include "../../include/QtScrcpyCore.h"
namespace qsc {
class DeviceManage : public IDeviceManage
{
Q_OBJECT
public:
explicit DeviceManage();
virtual ~DeviceManage();
virtual QPointer<IDevice> getDevice(const QString& serial) override;
bool connectDevice(qsc::DeviceParams params) override;
bool disconnectDevice(const QString &serial) override;
void disconnectAllDevice() override;
protected slots:
void onDeviceConnected(bool success, const QString& serial, const QString& deviceName, const QSize& size);
void onDeviceDisconnected(QString serial);
private:
quint16 getFreePort();
void removeDevice(const QString& serial);
private:
QMap<QString, QPointer<IDevice>> m_devices;
quint16 m_localPortStart = 27183;
QString m_script;
};
}
#endif // DEVICEMANAGE_H

View file

@ -1,36 +0,0 @@
/*
* AC-3 parser prototypes
* Copyright (c) 2003 Fabrice Bellard
* Copyright (c) 2003 Michael Niedermayer
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_AC3_PARSER_H
#define AVCODEC_AC3_PARSER_H
#include <stddef.h>
#include <stdint.h>
/**
* Extract the bitstream ID and the frame size from AC-3 data.
*/
int av_ac3_parse_header(const uint8_t *buf, size_t size,
uint8_t *bitstream_id, uint16_t *frame_size);
#endif /* AVCODEC_AC3_PARSER_H */

View file

@ -1,37 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_ADTS_PARSER_H
#define AVCODEC_ADTS_PARSER_H
#include <stddef.h>
#include <stdint.h>
#define AV_AAC_ADTS_HEADER_SIZE 7
/**
* Extract the number of samples and frames from AAC data.
* @param[in] buf pointer to AAC data buffer
* @param[out] samples Pointer to where number of samples is written
* @param[out] frames Pointer to where number of frames is written
* @return Returns 0 on success, error code on failure.
*/
int av_adts_header_parse(const uint8_t *buf, uint32_t *samples,
uint8_t *frames);
#endif /* AVCODEC_ADTS_PARSER_H */

File diff suppressed because it is too large Load diff

View file

@ -1,84 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_AVDCT_H
#define AVCODEC_AVDCT_H
#include "libavutil/opt.h"
/**
* AVDCT context.
* @note function pointers can be NULL if the specific features have been
* disabled at build time.
*/
typedef struct AVDCT {
const AVClass *av_class;
void (*idct)(int16_t *block /* align 16 */);
/**
* IDCT input permutation.
* Several optimized IDCTs need a permutated input (relative to the
* normal order of the reference IDCT).
* This permutation must be performed before the idct_put/add.
* Note, normally this can be merged with the zigzag/alternate scan<br>
* An example to avoid confusion:
* - (->decode coeffs -> zigzag reorder -> dequant -> reference IDCT -> ...)
* - (x -> reference DCT -> reference IDCT -> x)
* - (x -> reference DCT -> simple_mmx_perm = idct_permutation
* -> simple_idct_mmx -> x)
* - (-> decode coeffs -> zigzag reorder -> simple_mmx_perm -> dequant
* -> simple_idct_mmx -> ...)
*/
uint8_t idct_permutation[64];
void (*fdct)(int16_t *block /* align 16 */);
/**
* DCT algorithm.
* must use AVOptions to set this field.
*/
int dct_algo;
/**
* IDCT algorithm.
* must use AVOptions to set this field.
*/
int idct_algo;
void (*get_pixels)(int16_t *block /* align 16 */,
const uint8_t *pixels /* align 8 */,
ptrdiff_t line_size);
int bits_per_sample;
} AVDCT;
/**
* Allocates a AVDCT context.
* This needs to be initialized with avcodec_dct_init() after optionally
* configuring it with AVOptions.
*
* To free it use av_free()
*/
AVDCT *avcodec_dct_alloc(void);
int avcodec_dct_init(AVDCT *);
const AVClass *avcodec_dct_get_class(void);
#endif /* AVCODEC_AVDCT_H */

View file

@ -1,118 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_AVFFT_H
#define AVCODEC_AVFFT_H
/**
* @file
* @ingroup lavc_fft
* FFT functions
*/
/**
* @defgroup lavc_fft FFT functions
* @ingroup lavc_misc
*
* @{
*/
typedef float FFTSample;
typedef struct FFTComplex {
FFTSample re, im;
} FFTComplex;
typedef struct FFTContext FFTContext;
/**
* Set up a complex FFT.
* @param nbits log2 of the length of the input array
* @param inverse if 0 perform the forward transform, if 1 perform the inverse
*/
FFTContext *av_fft_init(int nbits, int inverse);
/**
* Do the permutation needed BEFORE calling ff_fft_calc().
*/
void av_fft_permute(FFTContext *s, FFTComplex *z);
/**
* Do a complex FFT with the parameters defined in av_fft_init(). The
* input data must be permuted before. No 1.0/sqrt(n) normalization is done.
*/
void av_fft_calc(FFTContext *s, FFTComplex *z);
void av_fft_end(FFTContext *s);
FFTContext *av_mdct_init(int nbits, int inverse, double scale);
void av_imdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_imdct_half(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_mdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_mdct_end(FFTContext *s);
/* Real Discrete Fourier Transform */
enum RDFTransformType {
DFT_R2C,
IDFT_C2R,
IDFT_R2C,
DFT_C2R,
};
typedef struct RDFTContext RDFTContext;
/**
* Set up a real FFT.
* @param nbits log2 of the length of the input array
* @param trans the type of transform
*/
RDFTContext *av_rdft_init(int nbits, enum RDFTransformType trans);
void av_rdft_calc(RDFTContext *s, FFTSample *data);
void av_rdft_end(RDFTContext *s);
/* Discrete Cosine Transform */
typedef struct DCTContext DCTContext;
enum DCTTransformType {
DCT_II = 0,
DCT_III,
DCT_I,
DST_I,
};
/**
* Set up DCT.
*
* @param nbits size of the input array:
* (1 << nbits) for DCT-II, DCT-III and DST-I
* (1 << nbits) + 1 for DCT-I
* @param type the type of transform
*
* @note the first element of the input of DST-I is ignored
*/
DCTContext *av_dct_init(int nbits, enum DCTTransformType type);
void av_dct_calc(DCTContext *s, FFTSample *data);
void av_dct_end (DCTContext *s);
/**
* @}
*/
#endif /* AVCODEC_AVFFT_H */

View file

@ -1,112 +0,0 @@
/*
* Direct3D11 HW acceleration
*
* copyright (c) 2009 Laurent Aimar
* copyright (c) 2015 Steve Lhomme
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_D3D11VA_H
#define AVCODEC_D3D11VA_H
/**
* @file
* @ingroup lavc_codec_hwaccel_d3d11va
* Public libavcodec D3D11VA header.
*/
#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0602
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x0602
#endif
#include <stdint.h>
#include <d3d11.h>
/**
* @defgroup lavc_codec_hwaccel_d3d11va Direct3D11
* @ingroup lavc_codec_hwaccel
*
* @{
*/
#define FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG 1 ///< Work around for Direct3D11 and old UVD/UVD+ ATI video cards
#define FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO 2 ///< Work around for Direct3D11 and old Intel GPUs with ClearVideo interface
/**
* This structure is used to provides the necessary configurations and data
* to the Direct3D11 FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*
* Use av_d3d11va_alloc_context() exclusively to allocate an AVD3D11VAContext.
*/
typedef struct AVD3D11VAContext {
/**
* D3D11 decoder object
*/
ID3D11VideoDecoder *decoder;
/**
* D3D11 VideoContext
*/
ID3D11VideoContext *video_context;
/**
* D3D11 configuration used to create the decoder
*/
D3D11_VIDEO_DECODER_CONFIG *cfg;
/**
* The number of surface in the surface array
*/
unsigned surface_count;
/**
* The array of Direct3D surfaces used to create the decoder
*/
ID3D11VideoDecoderOutputView **surface;
/**
* A bit field configuring the workarounds needed for using the decoder
*/
uint64_t workaround;
/**
* Private to the FFmpeg AVHWAccel implementation
*/
unsigned report_id;
/**
* Mutex to access video_context
*/
HANDLE context_mutex;
} AVD3D11VAContext;
/**
* Allocate an AVD3D11VAContext.
*
* @return Newly-allocated AVD3D11VAContext or NULL on failure.
*/
AVD3D11VAContext *av_d3d11va_alloc_context(void);
/**
* @}
*/
#endif /* AVCODEC_D3D11VA_H */

View file

@ -1,131 +0,0 @@
/*
* Copyright (C) 2007 Marco Gerards <marco@gnu.org>
* Copyright (C) 2009 David Conrad
* Copyright (C) 2011 Jordi Ortiz
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DIRAC_H
#define AVCODEC_DIRAC_H
/**
* @file
* Interface to Dirac Decoder/Encoder
* @author Marco Gerards <marco@gnu.org>
* @author David Conrad
* @author Jordi Ortiz
*/
#include "avcodec.h"
/**
* The spec limits the number of wavelet decompositions to 4 for both
* level 1 (VC-2) and 128 (long-gop default).
* 5 decompositions is the maximum before >16-bit buffers are needed.
* Schroedinger allows this for DD 9,7 and 13,7 wavelets only, limiting
* the others to 4 decompositions (or 3 for the fidelity filter).
*
* We use this instead of MAX_DECOMPOSITIONS to save some memory.
*/
#define MAX_DWT_LEVELS 5
/**
* Parse code values:
*
* Dirac Specification ->
* 9.6.1 Table 9.1
*
* VC-2 Specification ->
* 10.4.1 Table 10.1
*/
enum DiracParseCodes {
DIRAC_PCODE_SEQ_HEADER = 0x00,
DIRAC_PCODE_END_SEQ = 0x10,
DIRAC_PCODE_AUX = 0x20,
DIRAC_PCODE_PAD = 0x30,
DIRAC_PCODE_PICTURE_CODED = 0x08,
DIRAC_PCODE_PICTURE_RAW = 0x48,
DIRAC_PCODE_PICTURE_LOW_DEL = 0xC8,
DIRAC_PCODE_PICTURE_HQ = 0xE8,
DIRAC_PCODE_INTER_NOREF_CO1 = 0x0A,
DIRAC_PCODE_INTER_NOREF_CO2 = 0x09,
DIRAC_PCODE_INTER_REF_CO1 = 0x0D,
DIRAC_PCODE_INTER_REF_CO2 = 0x0E,
DIRAC_PCODE_INTRA_REF_CO = 0x0C,
DIRAC_PCODE_INTRA_REF_RAW = 0x4C,
DIRAC_PCODE_INTRA_REF_PICT = 0xCC,
DIRAC_PCODE_MAGIC = 0x42424344,
};
typedef struct DiracVersionInfo {
int major;
int minor;
} DiracVersionInfo;
typedef struct AVDiracSeqHeader {
unsigned width;
unsigned height;
uint8_t chroma_format; ///< 0: 444 1: 422 2: 420
uint8_t interlaced;
uint8_t top_field_first;
uint8_t frame_rate_index; ///< index into dirac_frame_rate[]
uint8_t aspect_ratio_index; ///< index into dirac_aspect_ratio[]
uint16_t clean_width;
uint16_t clean_height;
uint16_t clean_left_offset;
uint16_t clean_right_offset;
uint8_t pixel_range_index; ///< index into dirac_pixel_range_presets[]
uint8_t color_spec_index; ///< index into dirac_color_spec_presets[]
int profile;
int level;
AVRational framerate;
AVRational sample_aspect_ratio;
enum AVPixelFormat pix_fmt;
enum AVColorRange color_range;
enum AVColorPrimaries color_primaries;
enum AVColorTransferCharacteristic color_trc;
enum AVColorSpace colorspace;
DiracVersionInfo version;
int bit_depth;
} AVDiracSeqHeader;
/**
* Parse a Dirac sequence header.
*
* @param dsh this function will allocate and fill an AVDiracSeqHeader struct
* and write it into this pointer. The caller must free it with
* av_free().
* @param buf the data buffer
* @param buf_size the size of the data buffer in bytes
* @param log_ctx if non-NULL, this function will log errors here
* @return 0 on success, a negative AVERROR code on failure
*/
int av_dirac_parse_sequence_header(AVDiracSeqHeader **dsh,
const uint8_t *buf, size_t buf_size,
void *log_ctx);
#endif /* AVCODEC_DIRAC_H */

View file

@ -1,83 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DV_PROFILE_H
#define AVCODEC_DV_PROFILE_H
#include <stdint.h>
#include "libavutil/pixfmt.h"
#include "libavutil/rational.h"
#include "avcodec.h"
/* minimum number of bytes to read from a DV stream in order to
* determine the profile */
#define DV_PROFILE_BYTES (6 * 80) /* 6 DIF blocks */
/*
* AVDVProfile is used to express the differences between various
* DV flavors. For now it's primarily used for differentiating
* 525/60 and 625/50, but the plans are to use it for various
* DV specs as well (e.g. SMPTE314M vs. IEC 61834).
*/
typedef struct AVDVProfile {
int dsf; /* value of the dsf in the DV header */
int video_stype; /* stype for VAUX source pack */
int frame_size; /* total size of one frame in bytes */
int difseg_size; /* number of DIF segments per DIF channel */
int n_difchan; /* number of DIF channels per frame */
AVRational time_base; /* 1/framerate */
int ltc_divisor; /* FPS from the LTS standpoint */
int height; /* picture height in pixels */
int width; /* picture width in pixels */
AVRational sar[2]; /* sample aspect ratios for 4:3 and 16:9 */
enum AVPixelFormat pix_fmt; /* picture pixel format */
int bpm; /* blocks per macroblock */
const uint8_t *block_sizes; /* AC block sizes, in bits */
int audio_stride; /* size of audio_shuffle table */
int audio_min_samples[3]; /* min amount of audio samples */
/* for 48kHz, 44.1kHz and 32kHz */
int audio_samples_dist[5]; /* how many samples are supposed to be */
/* in each frame in a 5 frames window */
const uint8_t (*audio_shuffle)[9]; /* PCM shuffling table */
} AVDVProfile;
/**
* Get a DV profile for the provided compressed frame.
*
* @param sys the profile used for the previous frame, may be NULL
* @param frame the compressed data buffer
* @param buf_size size of the buffer in bytes
* @return the DV profile for the supplied data or NULL on failure
*/
const AVDVProfile *av_dv_frame_profile(const AVDVProfile *sys,
const uint8_t *frame, unsigned buf_size);
/**
* Get a DV profile for the provided stream parameters.
*/
const AVDVProfile *av_dv_codec_profile(int width, int height, enum AVPixelFormat pix_fmt);
/**
* Get a DV profile for the provided stream parameters.
* The frame rate is used as a best-effort parameter.
*/
const AVDVProfile *av_dv_codec_profile2(int width, int height, enum AVPixelFormat pix_fmt, AVRational frame_rate);
#endif /* AVCODEC_DV_PROFILE_H */

View file

@ -1,93 +0,0 @@
/*
* DXVA2 HW acceleration
*
* copyright (c) 2009 Laurent Aimar
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DXVA2_H
#define AVCODEC_DXVA2_H
/**
* @file
* @ingroup lavc_codec_hwaccel_dxva2
* Public libavcodec DXVA2 header.
*/
#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0602
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x0602
#endif
#include <stdint.h>
#include <d3d9.h>
#include <dxva2api.h>
/**
* @defgroup lavc_codec_hwaccel_dxva2 DXVA2
* @ingroup lavc_codec_hwaccel
*
* @{
*/
#define FF_DXVA2_WORKAROUND_SCALING_LIST_ZIGZAG 1 ///< Work around for DXVA2 and old UVD/UVD+ ATI video cards
#define FF_DXVA2_WORKAROUND_INTEL_CLEARVIDEO 2 ///< Work around for DXVA2 and old Intel GPUs with ClearVideo interface
/**
* This structure is used to provides the necessary configurations and data
* to the DXVA2 FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*/
struct dxva_context {
/**
* DXVA2 decoder object
*/
IDirectXVideoDecoder *decoder;
/**
* DXVA2 configuration used to create the decoder
*/
const DXVA2_ConfigPictureDecode *cfg;
/**
* The number of surface in the surface array
*/
unsigned surface_count;
/**
* The array of Direct3D surfaces used to create the decoder
*/
LPDIRECT3DSURFACE9 *surface;
/**
* A bit field configuring the workarounds needed for using the decoder
*/
uint64_t workaround;
/**
* Private to the FFmpeg AVHWAccel implementation
*/
unsigned report_id;
};
/**
* @}
*/
#endif /* AVCODEC_DXVA2_H */

View file

@ -1,46 +0,0 @@
/*
* JNI public API functions
*
* Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_JNI_H
#define AVCODEC_JNI_H
/*
* Manually set a Java virtual machine which will be used to retrieve the JNI
* environment. Once a Java VM is set it cannot be changed afterwards, meaning
* you can call multiple times av_jni_set_java_vm with the same Java VM pointer
* however it will error out if you try to set a different Java VM.
*
* @param vm Java virtual machine
* @param log_ctx context used for logging, can be NULL
* @return 0 on success, < 0 otherwise
*/
int av_jni_set_java_vm(void *vm, void *log_ctx);
/*
* Get the Java virtual machine which has been set with av_jni_set_java_vm.
*
* @param vm Java virtual machine
* @return a pointer to the Java virtual machine
*/
void *av_jni_get_java_vm(void *log_ctx);
#endif /* AVCODEC_JNI_H */

View file

@ -1,101 +0,0 @@
/*
* Android MediaCodec public API
*
* Copyright (c) 2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_MEDIACODEC_H
#define AVCODEC_MEDIACODEC_H
#include "libavcodec/avcodec.h"
/**
* This structure holds a reference to a android/view/Surface object that will
* be used as output by the decoder.
*
*/
typedef struct AVMediaCodecContext {
/**
* android/view/Surface object reference.
*/
void *surface;
} AVMediaCodecContext;
/**
* Allocate and initialize a MediaCodec context.
*
* When decoding with MediaCodec is finished, the caller must free the
* MediaCodec context with av_mediacodec_default_free.
*
* @return a pointer to a newly allocated AVMediaCodecContext on success, NULL otherwise
*/
AVMediaCodecContext *av_mediacodec_alloc_context(void);
/**
* Convenience function that sets up the MediaCodec context.
*
* @param avctx codec context
* @param ctx MediaCodec context to initialize
* @param surface reference to an android/view/Surface
* @return 0 on success, < 0 otherwise
*/
int av_mediacodec_default_init(AVCodecContext *avctx, AVMediaCodecContext *ctx, void *surface);
/**
* This function must be called to free the MediaCodec context initialized with
* av_mediacodec_default_init().
*
* @param avctx codec context
*/
void av_mediacodec_default_free(AVCodecContext *avctx);
/**
* Opaque structure representing a MediaCodec buffer to render.
*/
typedef struct MediaCodecBuffer AVMediaCodecBuffer;
/**
* Release a MediaCodec buffer and render it to the surface that is associated
* with the decoder. This function should only be called once on a given
* buffer, once released the underlying buffer returns to the codec, thus
* subsequent calls to this function will have no effect.
*
* @param buffer the buffer to render
* @param render 1 to release and render the buffer to the surface or 0 to
* discard the buffer
* @return 0 on success, < 0 otherwise
*/
int av_mediacodec_release_buffer(AVMediaCodecBuffer *buffer, int render);
/**
* Release a MediaCodec buffer and render it at the given time to the surface
* that is associated with the decoder. The timestamp must be within one second
* of the current java/lang/System#nanoTime() (which is implemented using
* CLOCK_MONOTONIC on Android). See the Android MediaCodec documentation
* of android/media/MediaCodec#releaseOutputBuffer(int,long) for more details.
*
* @param buffer the buffer to render
* @param time timestamp in nanoseconds of when to render the buffer
* @return 0 on success, < 0 otherwise
*/
int av_mediacodec_render_buffer_at_time(AVMediaCodecBuffer *buffer, int64_t time);
#endif /* AVCODEC_MEDIACODEC_H */

View file

@ -1,107 +0,0 @@
/*
* Intel MediaSDK QSV public API
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_QSV_H
#define AVCODEC_QSV_H
#include <mfx/mfxvideo.h>
#include "libavutil/buffer.h"
/**
* This struct is used for communicating QSV parameters between libavcodec and
* the caller. It is managed by the caller and must be assigned to
* AVCodecContext.hwaccel_context.
* - decoding: hwaccel_context must be set on return from the get_format()
* callback
* - encoding: hwaccel_context must be set before avcodec_open2()
*/
typedef struct AVQSVContext {
/**
* If non-NULL, the session to use for encoding or decoding.
* Otherwise, libavcodec will try to create an internal session.
*/
mfxSession session;
/**
* The IO pattern to use.
*/
int iopattern;
/**
* Extra buffers to pass to encoder or decoder initialization.
*/
mfxExtBuffer **ext_buffers;
int nb_ext_buffers;
/**
* Encoding only. If this field is set to non-zero by the caller, libavcodec
* will create an mfxExtOpaqueSurfaceAlloc extended buffer and pass it to
* the encoder initialization. This only makes sense if iopattern is also
* set to MFX_IOPATTERN_IN_OPAQUE_MEMORY.
*
* The number of allocated opaque surfaces will be the sum of the number
* required by the encoder and the user-provided value nb_opaque_surfaces.
* The array of the opaque surfaces will be exported to the caller through
* the opaque_surfaces field.
*/
int opaque_alloc;
/**
* Encoding only, and only if opaque_alloc is set to non-zero. Before
* calling avcodec_open2(), the caller should set this field to the number
* of extra opaque surfaces to allocate beyond what is required by the
* encoder.
*
* On return from avcodec_open2(), this field will be set by libavcodec to
* the total number of allocated opaque surfaces.
*/
int nb_opaque_surfaces;
/**
* Encoding only, and only if opaque_alloc is set to non-zero. On return
* from avcodec_open2(), this field will be used by libavcodec to export the
* array of the allocated opaque surfaces to the caller, so they can be
* passed to other parts of the pipeline.
*
* The buffer reference exported here is owned and managed by libavcodec,
* the callers should make their own reference with av_buffer_ref() and free
* it with av_buffer_unref() when it is no longer needed.
*
* The buffer data is an nb_opaque_surfaces-sized array of mfxFrameSurface1.
*/
AVBufferRef *opaque_surfaces;
/**
* Encoding only, and only if opaque_alloc is set to non-zero. On return
* from avcodec_open2(), this field will be set to the surface type used in
* the opaque allocation request.
*/
int opaque_alloc_type;
} AVQSVContext;
/**
* Allocate a new context.
*
* It must be freed by the caller with av_free().
*/
AVQSVContext *av_qsv_alloc_context(void);
#endif /* AVCODEC_QSV_H */

View file

@ -1,86 +0,0 @@
/*
* Video Acceleration API (shared data between FFmpeg and the video player)
* HW decode acceleration for MPEG-2, MPEG-4, H.264 and VC-1
*
* Copyright (C) 2008-2009 Splitted-Desktop Systems
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_H
#define AVCODEC_VAAPI_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vaapi
* Public libavcodec VA API header.
*/
#include <stdint.h>
#include "libavutil/attributes.h"
#include "version.h"
#if FF_API_STRUCT_VAAPI_CONTEXT
/**
* @defgroup lavc_codec_hwaccel_vaapi VA API Decoding
* @ingroup lavc_codec_hwaccel
* @{
*/
/**
* This structure is used to share data between the FFmpeg library and
* the client video application.
* This shall be zero-allocated and available as
* AVCodecContext.hwaccel_context. All user members can be set once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*
* Deprecated: use AVCodecContext.hw_frames_ctx instead.
*/
struct attribute_deprecated vaapi_context {
/**
* Window system dependent data
*
* - encoding: unused
* - decoding: Set by user
*/
void *display;
/**
* Configuration ID
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t config_id;
/**
* Context ID (video decode pipeline)
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t context_id;
};
/* @} */
#endif /* FF_API_STRUCT_VAAPI_CONTEXT */
#endif /* AVCODEC_VAAPI_H */

View file

@ -1,176 +0,0 @@
/*
* The Video Decode and Presentation API for UNIX (VDPAU) is used for
* hardware-accelerated decoding of MPEG-1/2, H.264 and VC-1.
*
* Copyright (C) 2008 NVIDIA
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VDPAU_H
#define AVCODEC_VDPAU_H
/**
* @file
* @ingroup lavc_codec_hwaccel_vdpau
* Public libavcodec VDPAU header.
*/
/**
* @defgroup lavc_codec_hwaccel_vdpau VDPAU Decoder and Renderer
* @ingroup lavc_codec_hwaccel
*
* VDPAU hardware acceleration has two modules
* - VDPAU decoding
* - VDPAU presentation
*
* The VDPAU decoding module parses all headers using FFmpeg
* parsing mechanisms and uses VDPAU for the actual decoding.
*
* As per the current implementation, the actual decoding
* and rendering (API calls) are done as part of the VDPAU
* presentation (vo_vdpau.c) module.
*
* @{
*/
#include <vdpau/vdpau.h>
#include "libavutil/avconfig.h"
#include "libavutil/attributes.h"
#include "avcodec.h"
#include "version.h"
struct AVCodecContext;
struct AVFrame;
typedef int (*AVVDPAU_Render2)(struct AVCodecContext *, struct AVFrame *,
const VdpPictureInfo *, uint32_t,
const VdpBitstreamBuffer *);
/**
* This structure is used to share data between the libavcodec library and
* the client video application.
* The user shall allocate the structure via the av_alloc_vdpau_hwaccel
* function and make it available as
* AVCodecContext.hwaccel_context. Members can be set by the user once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*
* The size of this structure is not a part of the public ABI and must not
* be used outside of libavcodec. Use av_vdpau_alloc_context() to allocate an
* AVVDPAUContext.
*/
typedef struct AVVDPAUContext {
/**
* VDPAU decoder handle
*
* Set by user.
*/
VdpDecoder decoder;
/**
* VDPAU decoder render callback
*
* Set by the user.
*/
VdpDecoderRender *render;
AVVDPAU_Render2 render2;
} AVVDPAUContext;
/**
* @brief allocation function for AVVDPAUContext
*
* Allows extending the struct without breaking API/ABI
*/
AVVDPAUContext *av_alloc_vdpaucontext(void);
AVVDPAU_Render2 av_vdpau_hwaccel_get_render2(const AVVDPAUContext *);
void av_vdpau_hwaccel_set_render2(AVVDPAUContext *, AVVDPAU_Render2);
/**
* Associate a VDPAU device with a codec context for hardware acceleration.
* This function is meant to be called from the get_format() codec callback,
* or earlier. It can also be called after avcodec_flush_buffers() to change
* the underlying VDPAU device mid-stream (e.g. to recover from non-transparent
* display preemption).
*
* @note get_format() must return AV_PIX_FMT_VDPAU if this function completes
* successfully.
*
* @param avctx decoding context whose get_format() callback is invoked
* @param device VDPAU device handle to use for hardware acceleration
* @param get_proc_address VDPAU device driver
* @param flags zero of more OR'd AV_HWACCEL_FLAG_* flags
*
* @return 0 on success, an AVERROR code on failure.
*/
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
VdpGetProcAddress *get_proc_address, unsigned flags);
/**
* Gets the parameters to create an adequate VDPAU video surface for the codec
* context using VDPAU hardware decoding acceleration.
*
* @note Behavior is undefined if the context was not successfully bound to a
* VDPAU device using av_vdpau_bind_context().
*
* @param avctx the codec context being used for decoding the stream
* @param type storage space for the VDPAU video surface chroma type
* (or NULL to ignore)
* @param width storage space for the VDPAU video surface pixel width
* (or NULL to ignore)
* @param height storage space for the VDPAU video surface pixel height
* (or NULL to ignore)
*
* @return 0 on success, a negative AVERROR code on failure.
*/
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, VdpChromaType *type,
uint32_t *width, uint32_t *height);
/**
* Allocate an AVVDPAUContext.
*
* @return Newly-allocated AVVDPAUContext or NULL on failure.
*/
AVVDPAUContext *av_vdpau_alloc_context(void);
#if FF_API_VDPAU_PROFILE
/**
* Get a decoder profile that should be used for initializing a VDPAU decoder.
* Should be called from the AVCodecContext.get_format() callback.
*
* @deprecated Use av_vdpau_bind_context() instead.
*
* @param avctx the codec context being used for decoding the stream
* @param profile a pointer into which the result will be written on success.
* The contents of profile are undefined if this function returns
* an error.
*
* @return 0 on success (non-negative), a negative AVERROR on failure.
*/
attribute_deprecated
int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile);
#endif
/* @}*/
#endif /* AVCODEC_VDPAU_H */

View file

@ -1,140 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VERSION_H
#define AVCODEC_VERSION_H
/**
* @file
* @ingroup libavc
* Libavcodec version macros.
*/
#include "libavutil/version.h"
#define LIBAVCODEC_VERSION_MAJOR 58
#define LIBAVCODEC_VERSION_MINOR 54
#define LIBAVCODEC_VERSION_MICRO 100
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
LIBAVCODEC_VERSION_MICRO)
#define LIBAVCODEC_VERSION AV_VERSION(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
LIBAVCODEC_VERSION_MICRO)
#define LIBAVCODEC_BUILD LIBAVCODEC_VERSION_INT
#define LIBAVCODEC_IDENT "Lavc" AV_STRINGIFY(LIBAVCODEC_VERSION)
/**
* FF_API_* defines may be placed below to indicate public API that will be
* dropped at a future version bump. The defines themselves are not part of
* the public API and may change, break or disappear at any time.
*
* @note, when bumping the major version it is recommended to manually
* disable each FF_API_* in its own commit instead of disabling them all
* at once through the bump. This improves the git bisect-ability of the change.
*/
#ifndef FF_API_LOWRES
#define FF_API_LOWRES (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_DEBUG_MV
#define FF_API_DEBUG_MV (LIBAVCODEC_VERSION_MAJOR < 58)
#endif
#ifndef FF_API_AVCTX_TIMEBASE
#define FF_API_AVCTX_TIMEBASE (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_CODED_FRAME
#define FF_API_CODED_FRAME (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_SIDEDATA_ONLY_PKT
#define FF_API_SIDEDATA_ONLY_PKT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_VDPAU_PROFILE
#define FF_API_VDPAU_PROFILE (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_CONVERGENCE_DURATION
#define FF_API_CONVERGENCE_DURATION (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_AVPICTURE
#define FF_API_AVPICTURE (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_AVPACKET_OLD_API
#define FF_API_AVPACKET_OLD_API (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_RTP_CALLBACK
#define FF_API_RTP_CALLBACK (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_VBV_DELAY
#define FF_API_VBV_DELAY (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_CODER_TYPE
#define FF_API_CODER_TYPE (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_STAT_BITS
#define FF_API_STAT_BITS (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_PRIVATE_OPT
#define FF_API_PRIVATE_OPT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_ASS_TIMING
#define FF_API_ASS_TIMING (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_OLD_BSF
#define FF_API_OLD_BSF (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_COPY_CONTEXT
#define FF_API_COPY_CONTEXT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_GET_CONTEXT_DEFAULTS
#define FF_API_GET_CONTEXT_DEFAULTS (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_NVENC_OLD_NAME
#define FF_API_NVENC_OLD_NAME (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_STRUCT_VAAPI_CONTEXT
#define FF_API_STRUCT_VAAPI_CONTEXT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_MERGE_SD_API
#define FF_API_MERGE_SD_API (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_TAG_STRING
#define FF_API_TAG_STRING (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_GETCHROMA
#define FF_API_GETCHROMA (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_CODEC_GET_SET
#define FF_API_CODEC_GET_SET (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_USER_VISIBLE_AVHWACCEL
#define FF_API_USER_VISIBLE_AVHWACCEL (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_LOCKMGR
#define FF_API_LOCKMGR (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_NEXT
#define FF_API_NEXT (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_UNSANITIZED_BITRATES
#define FF_API_UNSANITIZED_BITRATES (LIBAVCODEC_VERSION_MAJOR < 59)
#endif
#endif /* AVCODEC_VERSION_H */

View file

@ -1,127 +0,0 @@
/*
* Videotoolbox hardware acceleration
*
* copyright (c) 2012 Sebastien Zwickert
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VIDEOTOOLBOX_H
#define AVCODEC_VIDEOTOOLBOX_H
/**
* @file
* @ingroup lavc_codec_hwaccel_videotoolbox
* Public libavcodec Videotoolbox header.
*/
#include <stdint.h>
#define Picture QuickdrawPicture
#include <VideoToolbox/VideoToolbox.h>
#undef Picture
#include "libavcodec/avcodec.h"
/**
* This struct holds all the information that needs to be passed
* between the caller and libavcodec for initializing Videotoolbox decoding.
* Its size is not a part of the public ABI, it must be allocated with
* av_videotoolbox_alloc_context() and freed with av_free().
*/
typedef struct AVVideotoolboxContext {
/**
* Videotoolbox decompression session object.
* Created and freed the caller.
*/
VTDecompressionSessionRef session;
/**
* The output callback that must be passed to the session.
* Set by av_videottoolbox_default_init()
*/
VTDecompressionOutputCallback output_callback;
/**
* CVPixelBuffer Format Type that Videotoolbox will use for decoded frames.
* set by the caller. If this is set to 0, then no specific format is
* requested from the decoder, and its native format is output.
*/
OSType cv_pix_fmt_type;
/**
* CoreMedia Format Description that Videotoolbox will use to create the decompression session.
* Set by the caller.
*/
CMVideoFormatDescriptionRef cm_fmt_desc;
/**
* CoreMedia codec type that Videotoolbox will use to create the decompression session.
* Set by the caller.
*/
int cm_codec_type;
} AVVideotoolboxContext;
/**
* Allocate and initialize a Videotoolbox context.
*
* This function should be called from the get_format() callback when the caller
* selects the AV_PIX_FMT_VIDETOOLBOX format. The caller must then create
* the decoder object (using the output callback provided by libavcodec) that
* will be used for Videotoolbox-accelerated decoding.
*
* When decoding with Videotoolbox is finished, the caller must destroy the decoder
* object and free the Videotoolbox context using av_free().
*
* @return the newly allocated context or NULL on failure
*/
AVVideotoolboxContext *av_videotoolbox_alloc_context(void);
/**
* This is a convenience function that creates and sets up the Videotoolbox context using
* an internal implementation.
*
* @param avctx the corresponding codec context
*
* @return >= 0 on success, a negative AVERROR code on failure
*/
int av_videotoolbox_default_init(AVCodecContext *avctx);
/**
* This is a convenience function that creates and sets up the Videotoolbox context using
* an internal implementation.
*
* @param avctx the corresponding codec context
* @param vtctx the Videotoolbox context to use
*
* @return >= 0 on success, a negative AVERROR code on failure
*/
int av_videotoolbox_default_init2(AVCodecContext *avctx, AVVideotoolboxContext *vtctx);
/**
* This function must be called to free the Videotoolbox context initialized with
* av_videotoolbox_default_init().
*
* @param avctx the corresponding codec context
*/
void av_videotoolbox_default_free(AVCodecContext *avctx);
/**
* @}
*/
#endif /* AVCODEC_VIDEOTOOLBOX_H */

View file

@ -1,74 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* A public API for Vorbis parsing
*
* Determines the duration for each packet.
*/
#ifndef AVCODEC_VORBIS_PARSER_H
#define AVCODEC_VORBIS_PARSER_H
#include <stdint.h>
typedef struct AVVorbisParseContext AVVorbisParseContext;
/**
* Allocate and initialize the Vorbis parser using headers in the extradata.
*/
AVVorbisParseContext *av_vorbis_parse_init(const uint8_t *extradata,
int extradata_size);
/**
* Free the parser and everything associated with it.
*/
void av_vorbis_parse_free(AVVorbisParseContext **s);
#define VORBIS_FLAG_HEADER 0x00000001
#define VORBIS_FLAG_COMMENT 0x00000002
#define VORBIS_FLAG_SETUP 0x00000004
/**
* Get the duration for a Vorbis packet.
*
* If @p flags is @c NULL,
* special frames are considered invalid.
*
* @param s Vorbis parser context
* @param buf buffer containing a Vorbis frame
* @param buf_size size of the buffer
* @param flags flags for special frames
*/
int av_vorbis_parse_frame_flags(AVVorbisParseContext *s, const uint8_t *buf,
int buf_size, int *flags);
/**
* Get the duration for a Vorbis packet.
*
* @param s Vorbis parser context
* @param buf buffer containing a Vorbis frame
* @param buf_size size of the buffer
*/
int av_vorbis_parse_frame(AVVorbisParseContext *s, const uint8_t *buf,
int buf_size);
void av_vorbis_parse_reset(AVVorbisParseContext *s);
#endif /* AVCODEC_VORBIS_PARSER_H */

View file

@ -1,170 +0,0 @@
/*
* Copyright (C) 2003 Ivan Kalvachev
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_XVMC_H
#define AVCODEC_XVMC_H
/**
* @file
* @ingroup lavc_codec_hwaccel_xvmc
* Public libavcodec XvMC header.
*/
#include <X11/extensions/XvMC.h>
#include "libavutil/attributes.h"
#include "version.h"
#include "avcodec.h"
/**
* @defgroup lavc_codec_hwaccel_xvmc XvMC
* @ingroup lavc_codec_hwaccel
*
* @{
*/
#define AV_XVMC_ID 0x1DC711C0 /**< special value to ensure that regular pixel routines haven't corrupted the struct
the number is 1337 speak for the letters IDCT MCo (motion compensation) */
struct attribute_deprecated xvmc_pix_fmt {
/** The field contains the special constant value AV_XVMC_ID.
It is used as a test that the application correctly uses the API,
and that there is no corruption caused by pixel routines.
- application - set during initialization
- libavcodec - unchanged
*/
int xvmc_id;
/** Pointer to the block array allocated by XvMCCreateBlocks().
The array has to be freed by XvMCDestroyBlocks().
Each group of 64 values represents one data block of differential
pixel information (in MoCo mode) or coefficients for IDCT.
- application - set the pointer during initialization
- libavcodec - fills coefficients/pixel data into the array
*/
short* data_blocks;
/** Pointer to the macroblock description array allocated by
XvMCCreateMacroBlocks() and freed by XvMCDestroyMacroBlocks().
- application - set the pointer during initialization
- libavcodec - fills description data into the array
*/
XvMCMacroBlock* mv_blocks;
/** Number of macroblock descriptions that can be stored in the mv_blocks
array.
- application - set during initialization
- libavcodec - unchanged
*/
int allocated_mv_blocks;
/** Number of blocks that can be stored at once in the data_blocks array.
- application - set during initialization
- libavcodec - unchanged
*/
int allocated_data_blocks;
/** Indicate that the hardware would interpret data_blocks as IDCT
coefficients and perform IDCT on them.
- application - set during initialization
- libavcodec - unchanged
*/
int idct;
/** In MoCo mode it indicates that intra macroblocks are assumed to be in
unsigned format; same as the XVMC_INTRA_UNSIGNED flag.
- application - set during initialization
- libavcodec - unchanged
*/
int unsigned_intra;
/** Pointer to the surface allocated by XvMCCreateSurface().
It has to be freed by XvMCDestroySurface() on application exit.
It identifies the frame and its state on the video hardware.
- application - set during initialization
- libavcodec - unchanged
*/
XvMCSurface* p_surface;
/** Set by the decoder before calling ff_draw_horiz_band(),
needed by the XvMCRenderSurface function. */
//@{
/** Pointer to the surface used as past reference
- application - unchanged
- libavcodec - set
*/
XvMCSurface* p_past_surface;
/** Pointer to the surface used as future reference
- application - unchanged
- libavcodec - set
*/
XvMCSurface* p_future_surface;
/** top/bottom field or frame
- application - unchanged
- libavcodec - set
*/
unsigned int picture_structure;
/** XVMC_SECOND_FIELD - 1st or 2nd field in the sequence
- application - unchanged
- libavcodec - set
*/
unsigned int flags;
//}@
/** Number of macroblock descriptions in the mv_blocks array
that have already been passed to the hardware.
- application - zeroes it on get_buffer().
A successful ff_draw_horiz_band() may increment it
with filled_mb_block_num or zero both.
- libavcodec - unchanged
*/
int start_mv_blocks_num;
/** Number of new macroblock descriptions in the mv_blocks array (after
start_mv_blocks_num) that are filled by libavcodec and have to be
passed to the hardware.
- application - zeroes it on get_buffer() or after successful
ff_draw_horiz_band().
- libavcodec - increment with one of each stored MB
*/
int filled_mv_blocks_num;
/** Number of the next free data block; one data block consists of
64 short values in the data_blocks array.
All blocks before this one have already been claimed by placing their
position into the corresponding block description structure field,
that are part of the mv_blocks array.
- application - zeroes it on get_buffer().
A successful ff_draw_horiz_band() may zero it together
with start_mb_blocks_num.
- libavcodec - each decoded macroblock increases it by the number
of coded blocks it contains.
*/
int next_free_data_block_num;
};
/**
* @}
*/
#endif /* AVCODEC_XVMC_H */

File diff suppressed because it is too large Load diff

View file

@ -1,861 +0,0 @@
/*
* copyright (c) 2001 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_AVIO_H
#define AVFORMAT_AVIO_H
/**
* @file
* @ingroup lavf_io
* Buffered I/O operations
*/
#include <stdint.h>
#include "libavutil/common.h"
#include "libavutil/dict.h"
#include "libavutil/log.h"
#include "libavformat/version.h"
/**
* Seeking works like for a local file.
*/
#define AVIO_SEEKABLE_NORMAL (1 << 0)
/**
* Seeking by timestamp with avio_seek_time() is possible.
*/
#define AVIO_SEEKABLE_TIME (1 << 1)
/**
* Callback for checking whether to abort blocking functions.
* AVERROR_EXIT is returned in this case by the interrupted
* function. During blocking operations, callback is called with
* opaque as parameter. If the callback returns 1, the
* blocking operation will be aborted.
*
* No members can be added to this struct without a major bump, if
* new elements have been added after this struct in AVFormatContext
* or AVIOContext.
*/
typedef struct AVIOInterruptCB {
int (*callback)(void*);
void *opaque;
} AVIOInterruptCB;
/**
* Directory entry types.
*/
enum AVIODirEntryType {
AVIO_ENTRY_UNKNOWN,
AVIO_ENTRY_BLOCK_DEVICE,
AVIO_ENTRY_CHARACTER_DEVICE,
AVIO_ENTRY_DIRECTORY,
AVIO_ENTRY_NAMED_PIPE,
AVIO_ENTRY_SYMBOLIC_LINK,
AVIO_ENTRY_SOCKET,
AVIO_ENTRY_FILE,
AVIO_ENTRY_SERVER,
AVIO_ENTRY_SHARE,
AVIO_ENTRY_WORKGROUP,
};
/**
* Describes single entry of the directory.
*
* Only name and type fields are guaranteed be set.
* Rest of fields are protocol or/and platform dependent and might be unknown.
*/
typedef struct AVIODirEntry {
char *name; /**< Filename */
int type; /**< Type of the entry */
int utf8; /**< Set to 1 when name is encoded with UTF-8, 0 otherwise.
Name can be encoded with UTF-8 even though 0 is set. */
int64_t size; /**< File size in bytes, -1 if unknown. */
int64_t modification_timestamp; /**< Time of last modification in microseconds since unix
epoch, -1 if unknown. */
int64_t access_timestamp; /**< Time of last access in microseconds since unix epoch,
-1 if unknown. */
int64_t status_change_timestamp; /**< Time of last status change in microseconds since unix
epoch, -1 if unknown. */
int64_t user_id; /**< User ID of owner, -1 if unknown. */
int64_t group_id; /**< Group ID of owner, -1 if unknown. */
int64_t filemode; /**< Unix file mode, -1 if unknown. */
} AVIODirEntry;
typedef struct AVIODirContext {
struct URLContext *url_context;
} AVIODirContext;
/**
* Different data types that can be returned via the AVIO
* write_data_type callback.
*/
enum AVIODataMarkerType {
/**
* Header data; this needs to be present for the stream to be decodeable.
*/
AVIO_DATA_MARKER_HEADER,
/**
* A point in the output bytestream where a decoder can start decoding
* (i.e. a keyframe). A demuxer/decoder given the data flagged with
* AVIO_DATA_MARKER_HEADER, followed by any AVIO_DATA_MARKER_SYNC_POINT,
* should give decodeable results.
*/
AVIO_DATA_MARKER_SYNC_POINT,
/**
* A point in the output bytestream where a demuxer can start parsing
* (for non self synchronizing bytestream formats). That is, any
* non-keyframe packet start point.
*/
AVIO_DATA_MARKER_BOUNDARY_POINT,
/**
* This is any, unlabelled data. It can either be a muxer not marking
* any positions at all, it can be an actual boundary/sync point
* that the muxer chooses not to mark, or a later part of a packet/fragment
* that is cut into multiple write callbacks due to limited IO buffer size.
*/
AVIO_DATA_MARKER_UNKNOWN,
/**
* Trailer data, which doesn't contain actual content, but only for
* finalizing the output file.
*/
AVIO_DATA_MARKER_TRAILER,
/**
* A point in the output bytestream where the underlying AVIOContext might
* flush the buffer depending on latency or buffering requirements. Typically
* means the end of a packet.
*/
AVIO_DATA_MARKER_FLUSH_POINT,
};
/**
* Bytestream IO Context.
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(AVIOContext) must not be used outside libav*.
*
* @note None of the function pointers in AVIOContext should be called
* directly, they should only be set by the client application
* when implementing custom I/O. Normally these are set to the
* function pointers specified in avio_alloc_context()
*/
typedef struct AVIOContext {
/**
* A class for private options.
*
* If this AVIOContext is created by avio_open2(), av_class is set and
* passes the options down to protocols.
*
* If this AVIOContext is manually allocated, then av_class may be set by
* the caller.
*
* warning -- this field can be NULL, be sure to not pass this AVIOContext
* to any av_opt_* functions in that case.
*/
const AVClass *av_class;
/*
* The following shows the relationship between buffer, buf_ptr,
* buf_ptr_max, buf_end, buf_size, and pos, when reading and when writing
* (since AVIOContext is used for both):
*
**********************************************************************************
* READING
**********************************************************************************
*
* | buffer_size |
* |---------------------------------------|
* | |
*
* buffer buf_ptr buf_end
* +---------------+-----------------------+
* |/ / / / / / / /|/ / / / / / /| |
* read buffer: |/ / consumed / | to be read /| |
* |/ / / / / / / /|/ / / / / / /| |
* +---------------+-----------------------+
*
* pos
* +-------------------------------------------+-----------------+
* input file: | | |
* +-------------------------------------------+-----------------+
*
*
**********************************************************************************
* WRITING
**********************************************************************************
*
* | buffer_size |
* |--------------------------------------|
* | |
*
* buf_ptr_max
* buffer (buf_ptr) buf_end
* +-----------------------+--------------+
* |/ / / / / / / / / / / /| |
* write buffer: | / / to be flushed / / | |
* |/ / / / / / / / / / / /| |
* +-----------------------+--------------+
* buf_ptr can be in this
* due to a backward seek
*
* pos
* +-------------+----------------------------------------------+
* output file: | | |
* +-------------+----------------------------------------------+
*
*/
unsigned char *buffer; /**< Start of the buffer. */
int buffer_size; /**< Maximum buffer size */
unsigned char *buf_ptr; /**< Current position in the buffer */
unsigned char *buf_end; /**< End of the data, may be less than
buffer+buffer_size if the read function returned
less data than requested, e.g. for streams where
no more data has been received yet. */
void *opaque; /**< A private pointer, passed to the read/write/seek/...
functions. */
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size);
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size);
int64_t (*seek)(void *opaque, int64_t offset, int whence);
int64_t pos; /**< position in the file of the current buffer */
int eof_reached; /**< true if was unable to read due to error or eof */
int write_flag; /**< true if open for writing */
int max_packet_size;
unsigned long checksum;
unsigned char *checksum_ptr;
unsigned long (*update_checksum)(unsigned long checksum, const uint8_t *buf, unsigned int size);
int error; /**< contains the error code or 0 if no error happened */
/**
* Pause or resume playback for network streaming protocols - e.g. MMS.
*/
int (*read_pause)(void *opaque, int pause);
/**
* Seek to a given timestamp in stream with the specified stream_index.
* Needed for some network streaming protocols which don't support seeking
* to byte position.
*/
int64_t (*read_seek)(void *opaque, int stream_index,
int64_t timestamp, int flags);
/**
* A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable.
*/
int seekable;
/**
* max filesize, used to limit allocations
* This field is internal to libavformat and access from outside is not allowed.
*/
int64_t maxsize;
/**
* avio_read and avio_write should if possible be satisfied directly
* instead of going through a buffer, and avio_seek will always
* call the underlying seek function directly.
*/
int direct;
/**
* Bytes read statistic
* This field is internal to libavformat and access from outside is not allowed.
*/
int64_t bytes_read;
/**
* seek statistic
* This field is internal to libavformat and access from outside is not allowed.
*/
int seek_count;
/**
* writeout statistic
* This field is internal to libavformat and access from outside is not allowed.
*/
int writeout_count;
/**
* Original buffer size
* used internally after probing and ensure seekback to reset the buffer size
* This field is internal to libavformat and access from outside is not allowed.
*/
int orig_buffer_size;
/**
* Threshold to favor readahead over seek.
* This is current internal only, do not use from outside.
*/
int short_seek_threshold;
/**
* ',' separated list of allowed protocols.
*/
const char *protocol_whitelist;
/**
* ',' separated list of disallowed protocols.
*/
const char *protocol_blacklist;
/**
* A callback that is used instead of write_packet.
*/
int (*write_data_type)(void *opaque, uint8_t *buf, int buf_size,
enum AVIODataMarkerType type, int64_t time);
/**
* If set, don't call write_data_type separately for AVIO_DATA_MARKER_BOUNDARY_POINT,
* but ignore them and treat them as AVIO_DATA_MARKER_UNKNOWN (to avoid needlessly
* small chunks of data returned from the callback).
*/
int ignore_boundary_point;
/**
* Internal, not meant to be used from outside of AVIOContext.
*/
enum AVIODataMarkerType current_type;
int64_t last_time;
/**
* A callback that is used instead of short_seek_threshold.
* This is current internal only, do not use from outside.
*/
int (*short_seek_get)(void *opaque);
int64_t written;
/**
* Maximum reached position before a backward seek in the write buffer,
* used keeping track of already written data for a later flush.
*/
unsigned char *buf_ptr_max;
/**
* Try to buffer at least this amount of data before flushing it
*/
int min_packet_size;
} AVIOContext;
/**
* Return the name of the protocol that will handle the passed URL.
*
* NULL is returned if no protocol could be found for the given URL.
*
* @return Name of the protocol or NULL.
*/
const char *avio_find_protocol_name(const char *url);
/**
* Return AVIO_FLAG_* access flags corresponding to the access permissions
* of the resource in url, or a negative value corresponding to an
* AVERROR code in case of failure. The returned access flags are
* masked by the value in flags.
*
* @note This function is intrinsically unsafe, in the sense that the
* checked resource may change its existence or permission status from
* one call to another. Thus you should not trust the returned value,
* unless you are sure that no other processes are accessing the
* checked resource.
*/
int avio_check(const char *url, int flags);
/**
* Move or rename a resource.
*
* @note url_src and url_dst should share the same protocol and authority.
*
* @param url_src url to resource to be moved
* @param url_dst new url to resource if the operation succeeded
* @return >=0 on success or negative on error.
*/
int avpriv_io_move(const char *url_src, const char *url_dst);
/**
* Delete a resource.
*
* @param url resource to be deleted.
* @return >=0 on success or negative on error.
*/
int avpriv_io_delete(const char *url);
/**
* Open directory for reading.
*
* @param s directory read context. Pointer to a NULL pointer must be passed.
* @param url directory to be listed.
* @param options A dictionary filled with protocol-private options. On return
* this parameter will be destroyed and replaced with a dictionary
* containing options that were not found. May be NULL.
* @return >=0 on success or negative on error.
*/
int avio_open_dir(AVIODirContext **s, const char *url, AVDictionary **options);
/**
* Get next directory entry.
*
* Returned entry must be freed with avio_free_directory_entry(). In particular
* it may outlive AVIODirContext.
*
* @param s directory read context.
* @param[out] next next entry or NULL when no more entries.
* @return >=0 on success or negative on error. End of list is not considered an
* error.
*/
int avio_read_dir(AVIODirContext *s, AVIODirEntry **next);
/**
* Close directory.
*
* @note Entries created using avio_read_dir() are not deleted and must be
* freeded with avio_free_directory_entry().
*
* @param s directory read context.
* @return >=0 on success or negative on error.
*/
int avio_close_dir(AVIODirContext **s);
/**
* Free entry allocated by avio_read_dir().
*
* @param entry entry to be freed.
*/
void avio_free_directory_entry(AVIODirEntry **entry);
/**
* Allocate and initialize an AVIOContext for buffered I/O. It must be later
* freed with avio_context_free().
*
* @param buffer Memory block for input/output operations via AVIOContext.
* The buffer must be allocated with av_malloc() and friends.
* It may be freed and replaced with a new buffer by libavformat.
* AVIOContext.buffer holds the buffer currently in use,
* which must be later freed with av_free().
* @param buffer_size The buffer size is very important for performance.
* For protocols with fixed blocksize it should be set to this blocksize.
* For others a typical size is a cache page, e.g. 4kb.
* @param write_flag Set to 1 if the buffer should be writable, 0 otherwise.
* @param opaque An opaque pointer to user-specific data.
* @param read_packet A function for refilling the buffer, may be NULL.
* For stream protocols, must never return 0 but rather
* a proper AVERROR code.
* @param write_packet A function for writing the buffer contents, may be NULL.
* The function may not change the input buffers content.
* @param seek A function for seeking to specified byte position, may be NULL.
*
* @return Allocated AVIOContext or NULL on failure.
*/
AVIOContext *avio_alloc_context(
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size),
int64_t (*seek)(void *opaque, int64_t offset, int whence));
/**
* Free the supplied IO context and everything associated with it.
*
* @param s Double pointer to the IO context. This function will write NULL
* into s.
*/
void avio_context_free(AVIOContext **s);
void avio_w8(AVIOContext *s, int b);
void avio_write(AVIOContext *s, const unsigned char *buf, int size);
void avio_wl64(AVIOContext *s, uint64_t val);
void avio_wb64(AVIOContext *s, uint64_t val);
void avio_wl32(AVIOContext *s, unsigned int val);
void avio_wb32(AVIOContext *s, unsigned int val);
void avio_wl24(AVIOContext *s, unsigned int val);
void avio_wb24(AVIOContext *s, unsigned int val);
void avio_wl16(AVIOContext *s, unsigned int val);
void avio_wb16(AVIOContext *s, unsigned int val);
/**
* Write a NULL-terminated string.
* @return number of bytes written.
*/
int avio_put_str(AVIOContext *s, const char *str);
/**
* Convert an UTF-8 string to UTF-16LE and write it.
* @param s the AVIOContext
* @param str NULL-terminated UTF-8 string
*
* @return number of bytes written.
*/
int avio_put_str16le(AVIOContext *s, const char *str);
/**
* Convert an UTF-8 string to UTF-16BE and write it.
* @param s the AVIOContext
* @param str NULL-terminated UTF-8 string
*
* @return number of bytes written.
*/
int avio_put_str16be(AVIOContext *s, const char *str);
/**
* Mark the written bytestream as a specific type.
*
* Zero-length ranges are omitted from the output.
*
* @param time the stream time the current bytestream pos corresponds to
* (in AV_TIME_BASE units), or AV_NOPTS_VALUE if unknown or not
* applicable
* @param type the kind of data written starting at the current pos
*/
void avio_write_marker(AVIOContext *s, int64_t time, enum AVIODataMarkerType type);
/**
* ORing this as the "whence" parameter to a seek function causes it to
* return the filesize without seeking anywhere. Supporting this is optional.
* If it is not supported then the seek function will return <0.
*/
#define AVSEEK_SIZE 0x10000
/**
* Passing this flag as the "whence" parameter to a seek function causes it to
* seek by any means (like reopening and linear reading) or other normally unreasonable
* means that can be extremely slow.
* This may be ignored by the seek code.
*/
#define AVSEEK_FORCE 0x20000
/**
* fseek() equivalent for AVIOContext.
* @return new position or AVERROR.
*/
int64_t avio_seek(AVIOContext *s, int64_t offset, int whence);
/**
* Skip given number of bytes forward
* @return new position or AVERROR.
*/
int64_t avio_skip(AVIOContext *s, int64_t offset);
/**
* ftell() equivalent for AVIOContext.
* @return position or AVERROR.
*/
static av_always_inline int64_t avio_tell(AVIOContext *s)
{
return avio_seek(s, 0, SEEK_CUR);
}
/**
* Get the filesize.
* @return filesize or AVERROR
*/
int64_t avio_size(AVIOContext *s);
/**
* Similar to feof() but also returns nonzero on read errors.
* @return non zero if and only if at end of file or a read error happened when reading.
*/
int avio_feof(AVIOContext *s);
/** @warning Writes up to 4 KiB per call */
int avio_printf(AVIOContext *s, const char *fmt, ...) av_printf_format(2, 3);
/**
* Force flushing of buffered data.
*
* For write streams, force the buffered data to be immediately written to the output,
* without to wait to fill the internal buffer.
*
* For read streams, discard all currently buffered data, and advance the
* reported file position to that of the underlying stream. This does not
* read new data, and does not perform any seeks.
*/
void avio_flush(AVIOContext *s);
/**
* Read size bytes from AVIOContext into buf.
* @return number of bytes read or AVERROR
*/
int avio_read(AVIOContext *s, unsigned char *buf, int size);
/**
* Read size bytes from AVIOContext into buf. Unlike avio_read(), this is allowed
* to read fewer bytes than requested. The missing bytes can be read in the next
* call. This always tries to read at least 1 byte.
* Useful to reduce latency in certain cases.
* @return number of bytes read or AVERROR
*/
int avio_read_partial(AVIOContext *s, unsigned char *buf, int size);
/**
* @name Functions for reading from AVIOContext
* @{
*
* @note return 0 if EOF, so you cannot use it if EOF handling is
* necessary
*/
int avio_r8 (AVIOContext *s);
unsigned int avio_rl16(AVIOContext *s);
unsigned int avio_rl24(AVIOContext *s);
unsigned int avio_rl32(AVIOContext *s);
uint64_t avio_rl64(AVIOContext *s);
unsigned int avio_rb16(AVIOContext *s);
unsigned int avio_rb24(AVIOContext *s);
unsigned int avio_rb32(AVIOContext *s);
uint64_t avio_rb64(AVIOContext *s);
/**
* @}
*/
/**
* Read a string from pb into buf. The reading will terminate when either
* a NULL character was encountered, maxlen bytes have been read, or nothing
* more can be read from pb. The result is guaranteed to be NULL-terminated, it
* will be truncated if buf is too small.
* Note that the string is not interpreted or validated in any way, it
* might get truncated in the middle of a sequence for multi-byte encodings.
*
* @return number of bytes read (is always <= maxlen).
* If reading ends on EOF or error, the return value will be one more than
* bytes actually read.
*/
int avio_get_str(AVIOContext *pb, int maxlen, char *buf, int buflen);
/**
* Read a UTF-16 string from pb and convert it to UTF-8.
* The reading will terminate when either a null or invalid character was
* encountered or maxlen bytes have been read.
* @return number of bytes read (is always <= maxlen)
*/
int avio_get_str16le(AVIOContext *pb, int maxlen, char *buf, int buflen);
int avio_get_str16be(AVIOContext *pb, int maxlen, char *buf, int buflen);
/**
* @name URL open modes
* The flags argument to avio_open must be one of the following
* constants, optionally ORed with other flags.
* @{
*/
#define AVIO_FLAG_READ 1 /**< read-only */
#define AVIO_FLAG_WRITE 2 /**< write-only */
#define AVIO_FLAG_READ_WRITE (AVIO_FLAG_READ|AVIO_FLAG_WRITE) /**< read-write pseudo flag */
/**
* @}
*/
/**
* Use non-blocking mode.
* If this flag is set, operations on the context will return
* AVERROR(EAGAIN) if they can not be performed immediately.
* If this flag is not set, operations on the context will never return
* AVERROR(EAGAIN).
* Note that this flag does not affect the opening/connecting of the
* context. Connecting a protocol will always block if necessary (e.g. on
* network protocols) but never hang (e.g. on busy devices).
* Warning: non-blocking protocols is work-in-progress; this flag may be
* silently ignored.
*/
#define AVIO_FLAG_NONBLOCK 8
/**
* Use direct mode.
* avio_read and avio_write should if possible be satisfied directly
* instead of going through a buffer, and avio_seek will always
* call the underlying seek function directly.
*/
#define AVIO_FLAG_DIRECT 0x8000
/**
* Create and initialize a AVIOContext for accessing the
* resource indicated by url.
* @note When the resource indicated by url has been opened in
* read+write mode, the AVIOContext can be used only for writing.
*
* @param s Used to return the pointer to the created AVIOContext.
* In case of failure the pointed to value is set to NULL.
* @param url resource to access
* @param flags flags which control how the resource indicated by url
* is to be opened
* @return >= 0 in case of success, a negative value corresponding to an
* AVERROR code in case of failure
*/
int avio_open(AVIOContext **s, const char *url, int flags);
/**
* Create and initialize a AVIOContext for accessing the
* resource indicated by url.
* @note When the resource indicated by url has been opened in
* read+write mode, the AVIOContext can be used only for writing.
*
* @param s Used to return the pointer to the created AVIOContext.
* In case of failure the pointed to value is set to NULL.
* @param url resource to access
* @param flags flags which control how the resource indicated by url
* is to be opened
* @param int_cb an interrupt callback to be used at the protocols level
* @param options A dictionary filled with protocol-private options. On return
* this parameter will be destroyed and replaced with a dict containing options
* that were not found. May be NULL.
* @return >= 0 in case of success, a negative value corresponding to an
* AVERROR code in case of failure
*/
int avio_open2(AVIOContext **s, const char *url, int flags,
const AVIOInterruptCB *int_cb, AVDictionary **options);
/**
* Close the resource accessed by the AVIOContext s and free it.
* This function can only be used if s was opened by avio_open().
*
* The internal buffer is automatically flushed before closing the
* resource.
*
* @return 0 on success, an AVERROR < 0 on error.
* @see avio_closep
*/
int avio_close(AVIOContext *s);
/**
* Close the resource accessed by the AVIOContext *s, free it
* and set the pointer pointing to it to NULL.
* This function can only be used if s was opened by avio_open().
*
* The internal buffer is automatically flushed before closing the
* resource.
*
* @return 0 on success, an AVERROR < 0 on error.
* @see avio_close
*/
int avio_closep(AVIOContext **s);
/**
* Open a write only memory stream.
*
* @param s new IO context
* @return zero if no error.
*/
int avio_open_dyn_buf(AVIOContext **s);
/**
* Return the written size and a pointer to the buffer.
* The AVIOContext stream is left intact.
* The buffer must NOT be freed.
* No padding is added to the buffer.
*
* @param s IO context
* @param pbuffer pointer to a byte buffer
* @return the length of the byte buffer
*/
int avio_get_dyn_buf(AVIOContext *s, uint8_t **pbuffer);
/**
* Return the written size and a pointer to the buffer. The buffer
* must be freed with av_free().
* Padding of AV_INPUT_BUFFER_PADDING_SIZE is added to the buffer.
*
* @param s IO context
* @param pbuffer pointer to a byte buffer
* @return the length of the byte buffer
*/
int avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer);
/**
* Iterate through names of available protocols.
*
* @param opaque A private pointer representing current protocol.
* It must be a pointer to NULL on first iteration and will
* be updated by successive calls to avio_enum_protocols.
* @param output If set to 1, iterate over output protocols,
* otherwise over input protocols.
*
* @return A static string containing the name of current protocol or NULL
*/
const char *avio_enum_protocols(void **opaque, int output);
/**
* Pause and resume playing - only meaningful if using a network streaming
* protocol (e.g. MMS).
*
* @param h IO context from which to call the read_pause function pointer
* @param pause 1 for pause, 0 for resume
*/
int avio_pause(AVIOContext *h, int pause);
/**
* Seek to a given timestamp relative to some component stream.
* Only meaningful if using a network streaming protocol (e.g. MMS.).
*
* @param h IO context from which to call the seek function pointers
* @param stream_index The stream index that the timestamp is relative to.
* If stream_index is (-1) the timestamp should be in AV_TIME_BASE
* units from the beginning of the presentation.
* If a stream_index >= 0 is used and the protocol does not support
* seeking based on component streams, the call will fail.
* @param timestamp timestamp in AVStream.time_base units
* or if there is no stream specified then in AV_TIME_BASE units.
* @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE
* and AVSEEK_FLAG_ANY. The protocol may silently ignore
* AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will
* fail if used and not supported.
* @return >= 0 on success
* @see AVInputFormat::read_seek
*/
int64_t avio_seek_time(AVIOContext *h, int stream_index,
int64_t timestamp, int flags);
/* Avoid a warning. The header can not be included because it breaks c++. */
struct AVBPrint;
/**
* Read contents of h into print buffer, up to max_size bytes, or up to EOF.
*
* @return 0 for success (max_size bytes read or EOF reached), negative error
* code otherwise
*/
int avio_read_to_bprint(AVIOContext *h, struct AVBPrint *pb, size_t max_size);
/**
* Accept and allocate a client context on a server context.
* @param s the server context
* @param c the client context, must be unallocated
* @return >= 0 on success or a negative value corresponding
* to an AVERROR on failure
*/
int avio_accept(AVIOContext *s, AVIOContext **c);
/**
* Perform one step of the protocol handshake to accept a new client.
* This function must be called on a client returned by avio_accept() before
* using it as a read/write context.
* It is separate from avio_accept() because it may block.
* A step of the handshake is defined by places where the application may
* decide to change the proceedings.
* For example, on a protocol with a request header and a reply header, each
* one can constitute a step because the application may use the parameters
* from the request to change parameters in the reply; or each individual
* chunk of the request can constitute a step.
* If the handshake is already finished, avio_handshake() does nothing and
* returns 0 immediately.
*
* @param c the client context to perform the handshake on
* @return 0 on a complete and successful handshake
* > 0 if the handshake progressed, but is not complete
* < 0 for an AVERROR code
*/
int avio_handshake(AVIOContext *c);
#endif /* AVFORMAT_AVIO_H */

View file

@ -1,114 +0,0 @@
/*
* Version macros.
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_VERSION_H
#define AVFORMAT_VERSION_H
/**
* @file
* @ingroup libavf
* Libavformat version macros
*/
#include "libavutil/version.h"
// Major bumping may affect Ticket5467, 5421, 5451(compatibility with Chromium)
// Also please add any ticket numbers that you believe might be affected here
#define LIBAVFORMAT_VERSION_MAJOR 58
#define LIBAVFORMAT_VERSION_MINOR 29
#define LIBAVFORMAT_VERSION_MICRO 100
#define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \
LIBAVFORMAT_VERSION_MICRO)
#define LIBAVFORMAT_VERSION AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \
LIBAVFORMAT_VERSION_MICRO)
#define LIBAVFORMAT_BUILD LIBAVFORMAT_VERSION_INT
#define LIBAVFORMAT_IDENT "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION)
/**
* FF_API_* defines may be placed below to indicate public API that will be
* dropped at a future version bump. The defines themselves are not part of
* the public API and may change, break or disappear at any time.
*
* @note, when bumping the major version it is recommended to manually
* disable each FF_API_* in its own commit instead of disabling them all
* at once through the bump. This improves the git bisect-ability of the change.
*
*/
#ifndef FF_API_COMPUTE_PKT_FIELDS2
#define FF_API_COMPUTE_PKT_FIELDS2 (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_OLD_OPEN_CALLBACKS
#define FF_API_OLD_OPEN_CALLBACKS (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_LAVF_AVCTX
#define FF_API_LAVF_AVCTX (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_HTTP_USER_AGENT
#define FF_API_HTTP_USER_AGENT (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_HLS_WRAP
#define FF_API_HLS_WRAP (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_HLS_USE_LOCALTIME
#define FF_API_HLS_USE_LOCALTIME (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_LAVF_KEEPSIDE_FLAG
#define FF_API_LAVF_KEEPSIDE_FLAG (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_OLD_ROTATE_API
#define FF_API_OLD_ROTATE_API (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_FORMAT_GET_SET
#define FF_API_FORMAT_GET_SET (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_OLD_AVIO_EOF_0
#define FF_API_OLD_AVIO_EOF_0 (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_LAVF_FFSERVER
#define FF_API_LAVF_FFSERVER (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_FORMAT_FILENAME
#define FF_API_FORMAT_FILENAME (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_OLD_RTSP_OPTIONS
#define FF_API_OLD_RTSP_OPTIONS (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_NEXT
#define FF_API_NEXT (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_DASH_MIN_SEG_DURATION
#define FF_API_DASH_MIN_SEG_DURATION (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_LAVF_MP4A_LATM
#define FF_API_LAVF_MP4A_LATM (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_AVIOFORMAT
#define FF_API_AVIOFORMAT (LIBAVFORMAT_VERSION_MAJOR < 59)
#endif
#ifndef FF_API_R_FRAME_RATE
#define FF_API_R_FRAME_RATE 1
#endif
#endif /* AVFORMAT_VERSION_H */

View file

@ -1,60 +0,0 @@
/*
* copyright (c) 2006 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* @ingroup lavu_adler32
* Public header for Adler-32 hash function implementation.
*/
#ifndef AVUTIL_ADLER32_H
#define AVUTIL_ADLER32_H
#include <stdint.h>
#include "attributes.h"
/**
* @defgroup lavu_adler32 Adler-32
* @ingroup lavu_hash
* Adler-32 hash function implementation.
*
* @{
*/
/**
* Calculate the Adler32 checksum of a buffer.
*
* Passing the return value to a subsequent av_adler32_update() call
* allows the checksum of multiple buffers to be calculated as though
* they were concatenated.
*
* @param adler initial checksum value
* @param buf pointer to input buffer
* @param len size of input buffer
* @return updated checksum
*/
unsigned long av_adler32_update(unsigned long adler, const uint8_t *buf,
unsigned int len) av_pure;
/**
* @}
*/
#endif /* AVUTIL_ADLER32_H */

View file

@ -1,65 +0,0 @@
/*
* copyright (c) 2007 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AES_H
#define AVUTIL_AES_H
#include <stdint.h>
#include "attributes.h"
#include "version.h"
/**
* @defgroup lavu_aes AES
* @ingroup lavu_crypto
* @{
*/
extern const int av_aes_size;
struct AVAES;
/**
* Allocate an AVAES context.
*/
struct AVAES *av_aes_alloc(void);
/**
* Initialize an AVAES context.
* @param key_bits 128, 192 or 256
* @param decrypt 0 for encryption, 1 for decryption
*/
int av_aes_init(struct AVAES *a, const uint8_t *key, int key_bits, int decrypt);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
* @param count number of 16 byte blocks
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param iv initialization vector for CBC mode, if NULL then ECB will be used
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_aes_crypt(struct AVAES *a, uint8_t *dst, const uint8_t *src, int count, uint8_t *iv, int decrypt);
/**
* @}
*/
#endif /* AVUTIL_AES_H */

View file

@ -1,88 +0,0 @@
/*
* AES-CTR cipher
* Copyright (c) 2015 Eran Kornblau <erankor at gmail dot com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AES_CTR_H
#define AVUTIL_AES_CTR_H
#include <stdint.h>
#include "attributes.h"
#include "version.h"
#define AES_CTR_KEY_SIZE (16)
#define AES_CTR_IV_SIZE (8)
struct AVAESCTR;
/**
* Allocate an AVAESCTR context.
*/
struct AVAESCTR *av_aes_ctr_alloc(void);
/**
* Initialize an AVAESCTR context.
* @param key encryption key, must have a length of AES_CTR_KEY_SIZE
*/
int av_aes_ctr_init(struct AVAESCTR *a, const uint8_t *key);
/**
* Release an AVAESCTR context.
*/
void av_aes_ctr_free(struct AVAESCTR *a);
/**
* Process a buffer using a previously initialized context.
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param size the size of src and dst
*/
void av_aes_ctr_crypt(struct AVAESCTR *a, uint8_t *dst, const uint8_t *src, int size);
/**
* Get the current iv
*/
const uint8_t* av_aes_ctr_get_iv(struct AVAESCTR *a);
/**
* Generate a random iv
*/
void av_aes_ctr_set_random_iv(struct AVAESCTR *a);
/**
* Forcefully change the 8-byte iv
*/
void av_aes_ctr_set_iv(struct AVAESCTR *a, const uint8_t* iv);
/**
* Forcefully change the "full" 16-byte iv, including the counter
*/
void av_aes_ctr_set_full_iv(struct AVAESCTR *a, const uint8_t* iv);
/**
* Increment the top 64 bit of the iv (performed after each frame)
*/
void av_aes_ctr_increment_iv(struct AVAESCTR *a);
/**
* @}
*/
#endif /* AVUTIL_AES_CTR_H */

View file

@ -1,167 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Macro definitions for various function/variable attributes
*/
#ifndef AVUTIL_ATTRIBUTES_H
#define AVUTIL_ATTRIBUTES_H
#ifdef __GNUC__
# define AV_GCC_VERSION_AT_LEAST(x,y) (__GNUC__ > (x) || __GNUC__ == (x) && __GNUC_MINOR__ >= (y))
# define AV_GCC_VERSION_AT_MOST(x,y) (__GNUC__ < (x) || __GNUC__ == (x) && __GNUC_MINOR__ <= (y))
#else
# define AV_GCC_VERSION_AT_LEAST(x,y) 0
# define AV_GCC_VERSION_AT_MOST(x,y) 0
#endif
#ifndef av_always_inline
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_always_inline __attribute__((always_inline)) inline
#elif defined(_MSC_VER)
# define av_always_inline __forceinline
#else
# define av_always_inline inline
#endif
#endif
#ifndef av_extern_inline
#if defined(__ICL) && __ICL >= 1210 || defined(__GNUC_STDC_INLINE__)
# define av_extern_inline extern inline
#else
# define av_extern_inline inline
#endif
#endif
#if AV_GCC_VERSION_AT_LEAST(3,4)
# define av_warn_unused_result __attribute__((warn_unused_result))
#else
# define av_warn_unused_result
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_noinline __attribute__((noinline))
#elif defined(_MSC_VER)
# define av_noinline __declspec(noinline)
#else
# define av_noinline
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1) || defined(__clang__)
# define av_pure __attribute__((pure))
#else
# define av_pure
#endif
#if AV_GCC_VERSION_AT_LEAST(2,6) || defined(__clang__)
# define av_const __attribute__((const))
#else
# define av_const
#endif
#if AV_GCC_VERSION_AT_LEAST(4,3) || defined(__clang__)
# define av_cold __attribute__((cold))
#else
# define av_cold
#endif
#if AV_GCC_VERSION_AT_LEAST(4,1) && !defined(__llvm__)
# define av_flatten __attribute__((flatten))
#else
# define av_flatten
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define attribute_deprecated __attribute__((deprecated))
#elif defined(_MSC_VER)
# define attribute_deprecated __declspec(deprecated)
#else
# define attribute_deprecated
#endif
/**
* Disable warnings about deprecated features
* This is useful for sections of code kept for backward compatibility and
* scheduled for removal.
*/
#ifndef AV_NOWARN_DEPRECATED
#if AV_GCC_VERSION_AT_LEAST(4,6)
# define AV_NOWARN_DEPRECATED(code) \
_Pragma("GCC diagnostic push") \
_Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") \
code \
_Pragma("GCC diagnostic pop")
#elif defined(_MSC_VER)
# define AV_NOWARN_DEPRECATED(code) \
__pragma(warning(push)) \
__pragma(warning(disable : 4996)) \
code; \
__pragma(warning(pop))
#else
# define AV_NOWARN_DEPRECATED(code) code
#endif
#endif
#if defined(__GNUC__) || defined(__clang__)
# define av_unused __attribute__((unused))
#else
# define av_unused
#endif
/**
* Mark a variable as used and prevent the compiler from optimizing it
* away. This is useful for variables accessed only from inline
* assembler without the compiler being aware.
*/
#if AV_GCC_VERSION_AT_LEAST(3,1) || defined(__clang__)
# define av_used __attribute__((used))
#else
# define av_used
#endif
#if AV_GCC_VERSION_AT_LEAST(3,3) || defined(__clang__)
# define av_alias __attribute__((may_alias))
#else
# define av_alias
#endif
#if (defined(__GNUC__) || defined(__clang__)) && !defined(__INTEL_COMPILER)
# define av_uninit(x) x=x
#else
# define av_uninit(x) x
#endif
#if defined(__GNUC__) || defined(__clang__)
# define av_builtin_constant_p __builtin_constant_p
# define av_printf_format(fmtpos, attrpos) __attribute__((__format__(__printf__, fmtpos, attrpos)))
#else
# define av_builtin_constant_p(x) 0
# define av_printf_format(fmtpos, attrpos)
#endif
#if AV_GCC_VERSION_AT_LEAST(2,5) || defined(__clang__)
# define av_noreturn __attribute__((noreturn))
#else
# define av_noreturn
#endif
#endif /* AVUTIL_ATTRIBUTES_H */

View file

@ -1,187 +0,0 @@
/*
* Audio FIFO
* Copyright (c) 2012 Justin Ruggles <justin.ruggles@gmail.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Audio FIFO Buffer
*/
#ifndef AVUTIL_AUDIO_FIFO_H
#define AVUTIL_AUDIO_FIFO_H
#include "avutil.h"
#include "fifo.h"
#include "samplefmt.h"
/**
* @addtogroup lavu_audio
* @{
*
* @defgroup lavu_audiofifo Audio FIFO Buffer
* @{
*/
/**
* Context for an Audio FIFO Buffer.
*
* - Operates at the sample level rather than the byte level.
* - Supports multiple channels with either planar or packed sample format.
* - Automatic reallocation when writing to a full buffer.
*/
typedef struct AVAudioFifo AVAudioFifo;
/**
* Free an AVAudioFifo.
*
* @param af AVAudioFifo to free
*/
void av_audio_fifo_free(AVAudioFifo *af);
/**
* Allocate an AVAudioFifo.
*
* @param sample_fmt sample format
* @param channels number of channels
* @param nb_samples initial allocation size, in samples
* @return newly allocated AVAudioFifo, or NULL on error
*/
AVAudioFifo *av_audio_fifo_alloc(enum AVSampleFormat sample_fmt, int channels,
int nb_samples);
/**
* Reallocate an AVAudioFifo.
*
* @param af AVAudioFifo to reallocate
* @param nb_samples new allocation size, in samples
* @return 0 if OK, or negative AVERROR code on failure
*/
av_warn_unused_result
int av_audio_fifo_realloc(AVAudioFifo *af, int nb_samples);
/**
* Write data to an AVAudioFifo.
*
* The AVAudioFifo will be reallocated automatically if the available space
* is less than nb_samples.
*
* @see enum AVSampleFormat
* The documentation for AVSampleFormat describes the data layout.
*
* @param af AVAudioFifo to write to
* @param data audio data plane pointers
* @param nb_samples number of samples to write
* @return number of samples actually written, or negative AVERROR
* code on failure. If successful, the number of samples
* actually written will always be nb_samples.
*/
int av_audio_fifo_write(AVAudioFifo *af, void **data, int nb_samples);
/**
* Peek data from an AVAudioFifo.
*
* @see enum AVSampleFormat
* The documentation for AVSampleFormat describes the data layout.
*
* @param af AVAudioFifo to read from
* @param data audio data plane pointers
* @param nb_samples number of samples to peek
* @return number of samples actually peek, or negative AVERROR code
* on failure. The number of samples actually peek will not
* be greater than nb_samples, and will only be less than
* nb_samples if av_audio_fifo_size is less than nb_samples.
*/
int av_audio_fifo_peek(AVAudioFifo *af, void **data, int nb_samples);
/**
* Peek data from an AVAudioFifo.
*
* @see enum AVSampleFormat
* The documentation for AVSampleFormat describes the data layout.
*
* @param af AVAudioFifo to read from
* @param data audio data plane pointers
* @param nb_samples number of samples to peek
* @param offset offset from current read position
* @return number of samples actually peek, or negative AVERROR code
* on failure. The number of samples actually peek will not
* be greater than nb_samples, and will only be less than
* nb_samples if av_audio_fifo_size is less than nb_samples.
*/
int av_audio_fifo_peek_at(AVAudioFifo *af, void **data, int nb_samples, int offset);
/**
* Read data from an AVAudioFifo.
*
* @see enum AVSampleFormat
* The documentation for AVSampleFormat describes the data layout.
*
* @param af AVAudioFifo to read from
* @param data audio data plane pointers
* @param nb_samples number of samples to read
* @return number of samples actually read, or negative AVERROR code
* on failure. The number of samples actually read will not
* be greater than nb_samples, and will only be less than
* nb_samples if av_audio_fifo_size is less than nb_samples.
*/
int av_audio_fifo_read(AVAudioFifo *af, void **data, int nb_samples);
/**
* Drain data from an AVAudioFifo.
*
* Removes the data without reading it.
*
* @param af AVAudioFifo to drain
* @param nb_samples number of samples to drain
* @return 0 if OK, or negative AVERROR code on failure
*/
int av_audio_fifo_drain(AVAudioFifo *af, int nb_samples);
/**
* Reset the AVAudioFifo buffer.
*
* This empties all data in the buffer.
*
* @param af AVAudioFifo to reset
*/
void av_audio_fifo_reset(AVAudioFifo *af);
/**
* Get the current number of samples in the AVAudioFifo available for reading.
*
* @param af the AVAudioFifo to query
* @return number of samples available for reading
*/
int av_audio_fifo_size(AVAudioFifo *af);
/**
* Get the current number of samples in the AVAudioFifo available for writing.
*
* @param af the AVAudioFifo to query
* @return number of samples available for writing
*/
int av_audio_fifo_space(AVAudioFifo *af);
/**
* @}
* @}
*/
#endif /* AVUTIL_AUDIO_FIFO_H */

View file

@ -1,75 +0,0 @@
/*
* copyright (c) 2010 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* simple assert() macros that are a bit more flexible than ISO C assert().
* @author Michael Niedermayer <michaelni@gmx.at>
*/
#ifndef AVUTIL_AVASSERT_H
#define AVUTIL_AVASSERT_H
#include <stdlib.h>
#include "avutil.h"
#include "log.h"
/**
* assert() equivalent, that is always enabled.
*/
#define av_assert0(cond) do { \
if (!(cond)) { \
av_log(NULL, AV_LOG_PANIC, "Assertion %s failed at %s:%d\n", \
AV_STRINGIFY(cond), __FILE__, __LINE__); \
abort(); \
} \
} while (0)
/**
* assert() equivalent, that does not lie in speed critical code.
* These asserts() thus can be enabled without fearing speed loss.
*/
#if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 0
#define av_assert1(cond) av_assert0(cond)
#else
#define av_assert1(cond) ((void)0)
#endif
/**
* assert() equivalent, that does lie in speed critical code.
*/
#if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 1
#define av_assert2(cond) av_assert0(cond)
#define av_assert2_fpu() av_assert0_fpu()
#else
#define av_assert2(cond) ((void)0)
#define av_assert2_fpu() ((void)0)
#endif
/**
* Assert that floating point operations can be executed.
*
* This will av_assert0() that the cpu is not in MMX state on X86
*/
void av_assert0_fpu(void);
#endif /* AVUTIL_AVASSERT_H */

View file

@ -1,6 +0,0 @@
/* Generated by ffmpeg configure */
#ifndef AVUTIL_AVCONFIG_H
#define AVUTIL_AVCONFIG_H
#define AV_HAVE_BIGENDIAN 0
#define AV_HAVE_FAST_UNALIGNED 1
#endif /* AVUTIL_AVCONFIG_H */

View file

@ -1,413 +0,0 @@
/*
* Copyright (c) 2007 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AVSTRING_H
#define AVUTIL_AVSTRING_H
#include <stddef.h>
#include <stdint.h>
#include "attributes.h"
/**
* @addtogroup lavu_string
* @{
*/
/**
* Return non-zero if pfx is a prefix of str. If it is, *ptr is set to
* the address of the first character in str after the prefix.
*
* @param str input string
* @param pfx prefix to test
* @param ptr updated if the prefix is matched inside str
* @return non-zero if the prefix matches, zero otherwise
*/
int av_strstart(const char *str, const char *pfx, const char **ptr);
/**
* Return non-zero if pfx is a prefix of str independent of case. If
* it is, *ptr is set to the address of the first character in str
* after the prefix.
*
* @param str input string
* @param pfx prefix to test
* @param ptr updated if the prefix is matched inside str
* @return non-zero if the prefix matches, zero otherwise
*/
int av_stristart(const char *str, const char *pfx, const char **ptr);
/**
* Locate the first case-independent occurrence in the string haystack
* of the string needle. A zero-length string needle is considered to
* match at the start of haystack.
*
* This function is a case-insensitive version of the standard strstr().
*
* @param haystack string to search in
* @param needle string to search for
* @return pointer to the located match within haystack
* or a null pointer if no match
*/
char *av_stristr(const char *haystack, const char *needle);
/**
* Locate the first occurrence of the string needle in the string haystack
* where not more than hay_length characters are searched. A zero-length
* string needle is considered to match at the start of haystack.
*
* This function is a length-limited version of the standard strstr().
*
* @param haystack string to search in
* @param needle string to search for
* @param hay_length length of string to search in
* @return pointer to the located match within haystack
* or a null pointer if no match
*/
char *av_strnstr(const char *haystack, const char *needle, size_t hay_length);
/**
* Copy the string src to dst, but no more than size - 1 bytes, and
* null-terminate dst.
*
* This function is the same as BSD strlcpy().
*
* @param dst destination buffer
* @param src source string
* @param size size of destination buffer
* @return the length of src
*
* @warning since the return value is the length of src, src absolutely
* _must_ be a properly 0-terminated string, otherwise this will read beyond
* the end of the buffer and possibly crash.
*/
size_t av_strlcpy(char *dst, const char *src, size_t size);
/**
* Append the string src to the string dst, but to a total length of
* no more than size - 1 bytes, and null-terminate dst.
*
* This function is similar to BSD strlcat(), but differs when
* size <= strlen(dst).
*
* @param dst destination buffer
* @param src source string
* @param size size of destination buffer
* @return the total length of src and dst
*
* @warning since the return value use the length of src and dst, these
* absolutely _must_ be a properly 0-terminated strings, otherwise this
* will read beyond the end of the buffer and possibly crash.
*/
size_t av_strlcat(char *dst, const char *src, size_t size);
/**
* Append output to a string, according to a format. Never write out of
* the destination buffer, and always put a terminating 0 within
* the buffer.
* @param dst destination buffer (string to which the output is
* appended)
* @param size total size of the destination buffer
* @param fmt printf-compatible format string, specifying how the
* following parameters are used
* @return the length of the string that would have been generated
* if enough space had been available
*/
size_t av_strlcatf(char *dst, size_t size, const char *fmt, ...) av_printf_format(3, 4);
/**
* Get the count of continuous non zero chars starting from the beginning.
*
* @param len maximum number of characters to check in the string, that
* is the maximum value which is returned by the function
*/
static inline size_t av_strnlen(const char *s, size_t len)
{
size_t i;
for (i = 0; i < len && s[i]; i++)
;
return i;
}
/**
* Print arguments following specified format into a large enough auto
* allocated buffer. It is similar to GNU asprintf().
* @param fmt printf-compatible format string, specifying how the
* following parameters are used.
* @return the allocated string
* @note You have to free the string yourself with av_free().
*/
char *av_asprintf(const char *fmt, ...) av_printf_format(1, 2);
/**
* Convert a number to an av_malloced string.
*/
char *av_d2str(double d);
/**
* Unescape the given string until a non escaped terminating char,
* and return the token corresponding to the unescaped string.
*
* The normal \ and ' escaping is supported. Leading and trailing
* whitespaces are removed, unless they are escaped with '\' or are
* enclosed between ''.
*
* @param buf the buffer to parse, buf will be updated to point to the
* terminating char
* @param term a 0-terminated list of terminating chars
* @return the malloced unescaped string, which must be av_freed by
* the user, NULL in case of allocation failure
*/
char *av_get_token(const char **buf, const char *term);
/**
* Split the string into several tokens which can be accessed by
* successive calls to av_strtok().
*
* A token is defined as a sequence of characters not belonging to the
* set specified in delim.
*
* On the first call to av_strtok(), s should point to the string to
* parse, and the value of saveptr is ignored. In subsequent calls, s
* should be NULL, and saveptr should be unchanged since the previous
* call.
*
* This function is similar to strtok_r() defined in POSIX.1.
*
* @param s the string to parse, may be NULL
* @param delim 0-terminated list of token delimiters, must be non-NULL
* @param saveptr user-provided pointer which points to stored
* information necessary for av_strtok() to continue scanning the same
* string. saveptr is updated to point to the next character after the
* first delimiter found, or to NULL if the string was terminated
* @return the found token, or NULL when no token is found
*/
char *av_strtok(char *s, const char *delim, char **saveptr);
/**
* Locale-independent conversion of ASCII isdigit.
*/
static inline av_const int av_isdigit(int c)
{
return c >= '0' && c <= '9';
}
/**
* Locale-independent conversion of ASCII isgraph.
*/
static inline av_const int av_isgraph(int c)
{
return c > 32 && c < 127;
}
/**
* Locale-independent conversion of ASCII isspace.
*/
static inline av_const int av_isspace(int c)
{
return c == ' ' || c == '\f' || c == '\n' || c == '\r' || c == '\t' ||
c == '\v';
}
/**
* Locale-independent conversion of ASCII characters to uppercase.
*/
static inline av_const int av_toupper(int c)
{
if (c >= 'a' && c <= 'z')
c ^= 0x20;
return c;
}
/**
* Locale-independent conversion of ASCII characters to lowercase.
*/
static inline av_const int av_tolower(int c)
{
if (c >= 'A' && c <= 'Z')
c ^= 0x20;
return c;
}
/**
* Locale-independent conversion of ASCII isxdigit.
*/
static inline av_const int av_isxdigit(int c)
{
c = av_tolower(c);
return av_isdigit(c) || (c >= 'a' && c <= 'f');
}
/**
* Locale-independent case-insensitive compare.
* @note This means only ASCII-range characters are case-insensitive
*/
int av_strcasecmp(const char *a, const char *b);
/**
* Locale-independent case-insensitive compare.
* @note This means only ASCII-range characters are case-insensitive
*/
int av_strncasecmp(const char *a, const char *b, size_t n);
/**
* Locale-independent strings replace.
* @note This means only ASCII-range characters are replace
*/
char *av_strireplace(const char *str, const char *from, const char *to);
/**
* Thread safe basename.
* @param path the path, on DOS both \ and / are considered separators.
* @return pointer to the basename substring.
*/
const char *av_basename(const char *path);
/**
* Thread safe dirname.
* @param path the path, on DOS both \ and / are considered separators.
* @return the path with the separator replaced by the string terminator or ".".
* @note the function may change the input string.
*/
const char *av_dirname(char *path);
/**
* Match instances of a name in a comma-separated list of names.
* List entries are checked from the start to the end of the names list,
* the first match ends further processing. If an entry prefixed with '-'
* matches, then 0 is returned. The "ALL" list entry is considered to
* match all names.
*
* @param name Name to look for.
* @param names List of names.
* @return 1 on match, 0 otherwise.
*/
int av_match_name(const char *name, const char *names);
/**
* Append path component to the existing path.
* Path separator '/' is placed between when needed.
* Resulting string have to be freed with av_free().
* @param path base path
* @param component component to be appended
* @return new path or NULL on error.
*/
char *av_append_path_component(const char *path, const char *component);
enum AVEscapeMode {
AV_ESCAPE_MODE_AUTO, ///< Use auto-selected escaping mode.
AV_ESCAPE_MODE_BACKSLASH, ///< Use backslash escaping.
AV_ESCAPE_MODE_QUOTE, ///< Use single-quote escaping.
};
/**
* Consider spaces special and escape them even in the middle of the
* string.
*
* This is equivalent to adding the whitespace characters to the special
* characters lists, except it is guaranteed to use the exact same list
* of whitespace characters as the rest of libavutil.
*/
#define AV_ESCAPE_FLAG_WHITESPACE (1 << 0)
/**
* Escape only specified special characters.
* Without this flag, escape also any characters that may be considered
* special by av_get_token(), such as the single quote.
*/
#define AV_ESCAPE_FLAG_STRICT (1 << 1)
/**
* Escape string in src, and put the escaped string in an allocated
* string in *dst, which must be freed with av_free().
*
* @param dst pointer where an allocated string is put
* @param src string to escape, must be non-NULL
* @param special_chars string containing the special characters which
* need to be escaped, can be NULL
* @param mode escape mode to employ, see AV_ESCAPE_MODE_* macros.
* Any unknown value for mode will be considered equivalent to
* AV_ESCAPE_MODE_BACKSLASH, but this behaviour can change without
* notice.
* @param flags flags which control how to escape, see AV_ESCAPE_FLAG_ macros
* @return the length of the allocated string, or a negative error code in case of error
* @see av_bprint_escape()
*/
av_warn_unused_result
int av_escape(char **dst, const char *src, const char *special_chars,
enum AVEscapeMode mode, int flags);
#define AV_UTF8_FLAG_ACCEPT_INVALID_BIG_CODES 1 ///< accept codepoints over 0x10FFFF
#define AV_UTF8_FLAG_ACCEPT_NON_CHARACTERS 2 ///< accept non-characters - 0xFFFE and 0xFFFF
#define AV_UTF8_FLAG_ACCEPT_SURROGATES 4 ///< accept UTF-16 surrogates codes
#define AV_UTF8_FLAG_EXCLUDE_XML_INVALID_CONTROL_CODES 8 ///< exclude control codes not accepted by XML
#define AV_UTF8_FLAG_ACCEPT_ALL \
AV_UTF8_FLAG_ACCEPT_INVALID_BIG_CODES|AV_UTF8_FLAG_ACCEPT_NON_CHARACTERS|AV_UTF8_FLAG_ACCEPT_SURROGATES
/**
* Read and decode a single UTF-8 code point (character) from the
* buffer in *buf, and update *buf to point to the next byte to
* decode.
*
* In case of an invalid byte sequence, the pointer will be updated to
* the next byte after the invalid sequence and the function will
* return an error code.
*
* Depending on the specified flags, the function will also fail in
* case the decoded code point does not belong to a valid range.
*
* @note For speed-relevant code a carefully implemented use of
* GET_UTF8() may be preferred.
*
* @param codep pointer used to return the parsed code in case of success.
* The value in *codep is set even in case the range check fails.
* @param bufp pointer to the address the first byte of the sequence
* to decode, updated by the function to point to the
* byte next after the decoded sequence
* @param buf_end pointer to the end of the buffer, points to the next
* byte past the last in the buffer. This is used to
* avoid buffer overreads (in case of an unfinished
* UTF-8 sequence towards the end of the buffer).
* @param flags a collection of AV_UTF8_FLAG_* flags
* @return >= 0 in case a sequence was successfully read, a negative
* value in case of invalid sequence
*/
av_warn_unused_result
int av_utf8_decode(int32_t *codep, const uint8_t **bufp, const uint8_t *buf_end,
unsigned int flags);
/**
* Check if a name is in a list.
* @returns 0 if not found, or the 1 based index where it has been found in the
* list.
*/
int av_match_list(const char *name, const char *list, char separator);
/**
* See libc sscanf manual for more information.
* Locale-independent sscanf implementation.
*/
int av_sscanf(const char *string, const char *format, ...);
/**
* @}
*/
#endif /* AVUTIL_AVSTRING_H */

View file

@ -1,365 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AVUTIL_H
#define AVUTIL_AVUTIL_H
/**
* @file
* @ingroup lavu
* Convenience header that includes @ref lavu "libavutil"'s core.
*/
/**
* @mainpage
*
* @section ffmpeg_intro Introduction
*
* This document describes the usage of the different libraries
* provided by FFmpeg.
*
* @li @ref libavc "libavcodec" encoding/decoding library
* @li @ref lavfi "libavfilter" graph-based frame editing library
* @li @ref libavf "libavformat" I/O and muxing/demuxing library
* @li @ref lavd "libavdevice" special devices muxing/demuxing library
* @li @ref lavu "libavutil" common utility library
* @li @ref lswr "libswresample" audio resampling, format conversion and mixing
* @li @ref lpp "libpostproc" post processing library
* @li @ref libsws "libswscale" color conversion and scaling library
*
* @section ffmpeg_versioning Versioning and compatibility
*
* Each of the FFmpeg libraries contains a version.h header, which defines a
* major, minor and micro version number with the
* <em>LIBRARYNAME_VERSION_{MAJOR,MINOR,MICRO}</em> macros. The major version
* number is incremented with backward incompatible changes - e.g. removing
* parts of the public API, reordering public struct members, etc. The minor
* version number is incremented for backward compatible API changes or major
* new features - e.g. adding a new public function or a new decoder. The micro
* version number is incremented for smaller changes that a calling program
* might still want to check for - e.g. changing behavior in a previously
* unspecified situation.
*
* FFmpeg guarantees backward API and ABI compatibility for each library as long
* as its major version number is unchanged. This means that no public symbols
* will be removed or renamed. Types and names of the public struct members and
* values of public macros and enums will remain the same (unless they were
* explicitly declared as not part of the public API). Documented behavior will
* not change.
*
* In other words, any correct program that works with a given FFmpeg snapshot
* should work just as well without any changes with any later snapshot with the
* same major versions. This applies to both rebuilding the program against new
* FFmpeg versions or to replacing the dynamic FFmpeg libraries that a program
* links against.
*
* However, new public symbols may be added and new members may be appended to
* public structs whose size is not part of public ABI (most public structs in
* FFmpeg). New macros and enum values may be added. Behavior in undocumented
* situations may change slightly (and be documented). All those are accompanied
* by an entry in doc/APIchanges and incrementing either the minor or micro
* version number.
*/
/**
* @defgroup lavu libavutil
* Common code shared across all FFmpeg libraries.
*
* @note
* libavutil is designed to be modular. In most cases, in order to use the
* functions provided by one component of libavutil you must explicitly include
* the specific header containing that feature. If you are only using
* media-related components, you could simply include libavutil/avutil.h, which
* brings in most of the "core" components.
*
* @{
*
* @defgroup lavu_crypto Crypto and Hashing
*
* @{
* @}
*
* @defgroup lavu_math Mathematics
* @{
*
* @}
*
* @defgroup lavu_string String Manipulation
*
* @{
*
* @}
*
* @defgroup lavu_mem Memory Management
*
* @{
*
* @}
*
* @defgroup lavu_data Data Structures
* @{
*
* @}
*
* @defgroup lavu_video Video related
*
* @{
*
* @}
*
* @defgroup lavu_audio Audio related
*
* @{
*
* @}
*
* @defgroup lavu_error Error Codes
*
* @{
*
* @}
*
* @defgroup lavu_log Logging Facility
*
* @{
*
* @}
*
* @defgroup lavu_misc Other
*
* @{
*
* @defgroup preproc_misc Preprocessor String Macros
*
* @{
*
* @}
*
* @defgroup version_utils Library Version Macros
*
* @{
*
* @}
*/
/**
* @addtogroup lavu_ver
* @{
*/
/**
* Return the LIBAVUTIL_VERSION_INT constant.
*/
unsigned avutil_version(void);
/**
* Return an informative version string. This usually is the actual release
* version number or a git commit description. This string has no fixed format
* and can change any time. It should never be parsed by code.
*/
const char *av_version_info(void);
/**
* Return the libavutil build-time configuration.
*/
const char *avutil_configuration(void);
/**
* Return the libavutil license.
*/
const char *avutil_license(void);
/**
* @}
*/
/**
* @addtogroup lavu_media Media Type
* @brief Media Type
*/
enum AVMediaType {
AVMEDIA_TYPE_UNKNOWN = -1, ///< Usually treated as AVMEDIA_TYPE_DATA
AVMEDIA_TYPE_VIDEO,
AVMEDIA_TYPE_AUDIO,
AVMEDIA_TYPE_DATA, ///< Opaque data information usually continuous
AVMEDIA_TYPE_SUBTITLE,
AVMEDIA_TYPE_ATTACHMENT, ///< Opaque data information usually sparse
AVMEDIA_TYPE_NB
};
/**
* Return a string describing the media_type enum, NULL if media_type
* is unknown.
*/
const char *av_get_media_type_string(enum AVMediaType media_type);
/**
* @defgroup lavu_const Constants
* @{
*
* @defgroup lavu_enc Encoding specific
*
* @note those definition should move to avcodec
* @{
*/
#define FF_LAMBDA_SHIFT 7
#define FF_LAMBDA_SCALE (1<<FF_LAMBDA_SHIFT)
#define FF_QP2LAMBDA 118 ///< factor to convert from H.263 QP to lambda
#define FF_LAMBDA_MAX (256*128-1)
#define FF_QUALITY_SCALE FF_LAMBDA_SCALE //FIXME maybe remove
/**
* @}
* @defgroup lavu_time Timestamp specific
*
* FFmpeg internal timebase and timestamp definitions
*
* @{
*/
/**
* @brief Undefined timestamp value
*
* Usually reported by demuxer that work on containers that do not provide
* either pts or dts.
*/
#define AV_NOPTS_VALUE ((int64_t)UINT64_C(0x8000000000000000))
/**
* Internal time base represented as integer
*/
#define AV_TIME_BASE 1000000
/**
* Internal time base represented as fractional value
*/
#define AV_TIME_BASE_Q (AVRational){1, AV_TIME_BASE}
/**
* @}
* @}
* @defgroup lavu_picture Image related
*
* AVPicture types, pixel formats and basic image planes manipulation.
*
* @{
*/
enum AVPictureType {
AV_PICTURE_TYPE_NONE = 0, ///< Undefined
AV_PICTURE_TYPE_I, ///< Intra
AV_PICTURE_TYPE_P, ///< Predicted
AV_PICTURE_TYPE_B, ///< Bi-dir predicted
AV_PICTURE_TYPE_S, ///< S(GMC)-VOP MPEG-4
AV_PICTURE_TYPE_SI, ///< Switching Intra
AV_PICTURE_TYPE_SP, ///< Switching Predicted
AV_PICTURE_TYPE_BI, ///< BI type
};
/**
* Return a single letter to describe the given picture type
* pict_type.
*
* @param[in] pict_type the picture type @return a single character
* representing the picture type, '?' if pict_type is unknown
*/
char av_get_picture_type_char(enum AVPictureType pict_type);
/**
* @}
*/
#include "common.h"
#include "error.h"
#include "rational.h"
#include "version.h"
#include "macros.h"
#include "mathematics.h"
#include "log.h"
#include "pixfmt.h"
/**
* Return x default pointer in case p is NULL.
*/
static inline void *av_x_if_null(const void *p, const void *x)
{
return (void *)(intptr_t)(p ? p : x);
}
/**
* Compute the length of an integer list.
*
* @param elsize size in bytes of each list element (only 1, 2, 4 or 8)
* @param term list terminator (usually 0 or -1)
* @param list pointer to the list
* @return length of the list, in elements, not counting the terminator
*/
unsigned av_int_list_length_for_size(unsigned elsize,
const void *list, uint64_t term) av_pure;
/**
* Compute the length of an integer list.
*
* @param term list terminator (usually 0 or -1)
* @param list pointer to the list
* @return length of the list, in elements, not counting the terminator
*/
#define av_int_list_length(list, term) \
av_int_list_length_for_size(sizeof(*(list)), list, term)
/**
* Open a file using a UTF-8 filename.
* The API of this function matches POSIX fopen(), errors are returned through
* errno.
*/
FILE *av_fopen_utf8(const char *path, const char *mode);
/**
* Return the fractional representation of the internal time base.
*/
AVRational av_get_time_base_q(void);
#define AV_FOURCC_MAX_STRING_SIZE 32
#define av_fourcc2str(fourcc) av_fourcc_make_string((char[AV_FOURCC_MAX_STRING_SIZE]){0}, fourcc)
/**
* Fill the provided buffer with a string containing a FourCC (four-character
* code) representation.
*
* @param buf a buffer with size in bytes of at least AV_FOURCC_MAX_STRING_SIZE
* @param fourcc the fourcc to represent
* @return the buffer in input
*/
char *av_fourcc_make_string(char *buf, uint32_t fourcc);
/**
* @}
* @}
*/
#endif /* AVUTIL_AVUTIL_H */

View file

@ -1,72 +0,0 @@
/*
* Copyright (c) 2006 Ryan Martell. (rdm4@martellventures.com)
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BASE64_H
#define AVUTIL_BASE64_H
#include <stdint.h>
/**
* @defgroup lavu_base64 Base64
* @ingroup lavu_crypto
* @{
*/
/**
* Decode a base64-encoded string.
*
* @param out buffer for decoded data
* @param in null-terminated input string
* @param out_size size in bytes of the out buffer, must be at
* least 3/4 of the length of in, that is AV_BASE64_DECODE_SIZE(strlen(in))
* @return number of bytes written, or a negative value in case of
* invalid input
*/
int av_base64_decode(uint8_t *out, const char *in, int out_size);
/**
* Calculate the output size in bytes needed to decode a base64 string
* with length x to a data buffer.
*/
#define AV_BASE64_DECODE_SIZE(x) ((x) * 3LL / 4)
/**
* Encode data to base64 and null-terminate.
*
* @param out buffer for encoded data
* @param out_size size in bytes of the out buffer (including the
* null terminator), must be at least AV_BASE64_SIZE(in_size)
* @param in input buffer containing the data to encode
* @param in_size size in bytes of the in buffer
* @return out or NULL in case of error
*/
char *av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size);
/**
* Calculate the output size needed to base64-encode x bytes to a
* null-terminated string.
*/
#define AV_BASE64_SIZE(x) (((x)+2) / 3 * 4 + 1)
/**
* @}
*/
#endif /* AVUTIL_BASE64_H */

View file

@ -1,82 +0,0 @@
/*
* Blowfish algorithm
* Copyright (c) 2012 Samuel Pitoiset
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BLOWFISH_H
#define AVUTIL_BLOWFISH_H
#include <stdint.h>
/**
* @defgroup lavu_blowfish Blowfish
* @ingroup lavu_crypto
* @{
*/
#define AV_BF_ROUNDS 16
typedef struct AVBlowfish {
uint32_t p[AV_BF_ROUNDS + 2];
uint32_t s[4][256];
} AVBlowfish;
/**
* Allocate an AVBlowfish context.
*/
AVBlowfish *av_blowfish_alloc(void);
/**
* Initialize an AVBlowfish context.
*
* @param ctx an AVBlowfish context
* @param key a key
* @param key_len length of the key
*/
void av_blowfish_init(struct AVBlowfish *ctx, const uint8_t *key, int key_len);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
*
* @param ctx an AVBlowfish context
* @param xl left four bytes halves of input to be encrypted
* @param xr right four bytes halves of input to be encrypted
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_blowfish_crypt_ecb(struct AVBlowfish *ctx, uint32_t *xl, uint32_t *xr,
int decrypt);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
*
* @param ctx an AVBlowfish context
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param count number of 8 byte blocks
* @param iv initialization vector for CBC mode, if NULL ECB will be used
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_blowfish_crypt(struct AVBlowfish *ctx, uint8_t *dst, const uint8_t *src,
int count, uint8_t *iv, int decrypt);
/**
* @}
*/
#endif /* AVUTIL_BLOWFISH_H */

View file

@ -1,219 +0,0 @@
/*
* Copyright (c) 2012 Nicolas George
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BPRINT_H
#define AVUTIL_BPRINT_H
#include <stdarg.h>
#include "attributes.h"
#include "avstring.h"
/**
* Define a structure with extra padding to a fixed size
* This helps ensuring binary compatibility with future versions.
*/
#define FF_PAD_STRUCTURE(name, size, ...) \
struct ff_pad_helper_##name { __VA_ARGS__ }; \
typedef struct name { \
__VA_ARGS__ \
char reserved_padding[size - sizeof(struct ff_pad_helper_##name)]; \
} name;
/**
* Buffer to print data progressively
*
* The string buffer grows as necessary and is always 0-terminated.
* The content of the string is never accessed, and thus is
* encoding-agnostic and can even hold binary data.
*
* Small buffers are kept in the structure itself, and thus require no
* memory allocation at all (unless the contents of the buffer is needed
* after the structure goes out of scope). This is almost as lightweight as
* declaring a local "char buf[512]".
*
* The length of the string can go beyond the allocated size: the buffer is
* then truncated, but the functions still keep account of the actual total
* length.
*
* In other words, buf->len can be greater than buf->size and records the
* total length of what would have been to the buffer if there had been
* enough memory.
*
* Append operations do not need to be tested for failure: if a memory
* allocation fails, data stop being appended to the buffer, but the length
* is still updated. This situation can be tested with
* av_bprint_is_complete().
*
* The size_max field determines several possible behaviours:
*
* size_max = -1 (= UINT_MAX) or any large value will let the buffer be
* reallocated as necessary, with an amortized linear cost.
*
* size_max = 0 prevents writing anything to the buffer: only the total
* length is computed. The write operations can then possibly be repeated in
* a buffer with exactly the necessary size
* (using size_init = size_max = len + 1).
*
* size_max = 1 is automatically replaced by the exact size available in the
* structure itself, thus ensuring no dynamic memory allocation. The
* internal buffer is large enough to hold a reasonable paragraph of text,
* such as the current paragraph.
*/
FF_PAD_STRUCTURE(AVBPrint, 1024,
char *str; /**< string so far */
unsigned len; /**< length so far */
unsigned size; /**< allocated memory */
unsigned size_max; /**< maximum allocated memory */
char reserved_internal_buffer[1];
)
/**
* Convenience macros for special values for av_bprint_init() size_max
* parameter.
*/
#define AV_BPRINT_SIZE_UNLIMITED ((unsigned)-1)
#define AV_BPRINT_SIZE_AUTOMATIC 1
#define AV_BPRINT_SIZE_COUNT_ONLY 0
/**
* Init a print buffer.
*
* @param buf buffer to init
* @param size_init initial size (including the final 0)
* @param size_max maximum size;
* 0 means do not write anything, just count the length;
* 1 is replaced by the maximum value for automatic storage;
* any large value means that the internal buffer will be
* reallocated as needed up to that limit; -1 is converted to
* UINT_MAX, the largest limit possible.
* Check also AV_BPRINT_SIZE_* macros.
*/
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max);
/**
* Init a print buffer using a pre-existing buffer.
*
* The buffer will not be reallocated.
*
* @param buf buffer structure to init
* @param buffer byte buffer to use for the string data
* @param size size of buffer
*/
void av_bprint_init_for_buffer(AVBPrint *buf, char *buffer, unsigned size);
/**
* Append a formatted string to a print buffer.
*/
void av_bprintf(AVBPrint *buf, const char *fmt, ...) av_printf_format(2, 3);
/**
* Append a formatted string to a print buffer.
*/
void av_vbprintf(AVBPrint *buf, const char *fmt, va_list vl_arg);
/**
* Append char c n times to a print buffer.
*/
void av_bprint_chars(AVBPrint *buf, char c, unsigned n);
/**
* Append data to a print buffer.
*
* param buf bprint buffer to use
* param data pointer to data
* param size size of data
*/
void av_bprint_append_data(AVBPrint *buf, const char *data, unsigned size);
struct tm;
/**
* Append a formatted date and time to a print buffer.
*
* param buf bprint buffer to use
* param fmt date and time format string, see strftime()
* param tm broken-down time structure to translate
*
* @note due to poor design of the standard strftime function, it may
* produce poor results if the format string expands to a very long text and
* the bprint buffer is near the limit stated by the size_max option.
*/
void av_bprint_strftime(AVBPrint *buf, const char *fmt, const struct tm *tm);
/**
* Allocate bytes in the buffer for external use.
*
* @param[in] buf buffer structure
* @param[in] size required size
* @param[out] mem pointer to the memory area
* @param[out] actual_size size of the memory area after allocation;
* can be larger or smaller than size
*/
void av_bprint_get_buffer(AVBPrint *buf, unsigned size,
unsigned char **mem, unsigned *actual_size);
/**
* Reset the string to "" but keep internal allocated data.
*/
void av_bprint_clear(AVBPrint *buf);
/**
* Test if the print buffer is complete (not truncated).
*
* It may have been truncated due to a memory allocation failure
* or the size_max limit (compare size and size_max if necessary).
*/
static inline int av_bprint_is_complete(const AVBPrint *buf)
{
return buf->len < buf->size;
}
/**
* Finalize a print buffer.
*
* The print buffer can no longer be used afterwards,
* but the len and size fields are still valid.
*
* @arg[out] ret_str if not NULL, used to return a permanent copy of the
* buffer contents, or NULL if memory allocation fails;
* if NULL, the buffer is discarded and freed
* @return 0 for success or error code (probably AVERROR(ENOMEM))
*/
int av_bprint_finalize(AVBPrint *buf, char **ret_str);
/**
* Escape the content in src and append it to dstbuf.
*
* @param dstbuf already inited destination bprint buffer
* @param src string containing the text to escape
* @param special_chars string containing the special characters which
* need to be escaped, can be NULL
* @param mode escape mode to employ, see AV_ESCAPE_MODE_* macros.
* Any unknown value for mode will be considered equivalent to
* AV_ESCAPE_MODE_BACKSLASH, but this behaviour can change without
* notice.
* @param flags flags which control how to escape, see AV_ESCAPE_FLAG_* macros
*/
void av_bprint_escape(AVBPrint *dstbuf, const char *src, const char *special_chars,
enum AVEscapeMode mode, int flags);
#endif /* AVUTIL_BPRINT_H */

View file

@ -1,109 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* byte swapping routines
*/
#ifndef AVUTIL_BSWAP_H
#define AVUTIL_BSWAP_H
#include <stdint.h>
#include "libavutil/avconfig.h"
#include "attributes.h"
#ifdef HAVE_AV_CONFIG_H
#include "config.h"
#if ARCH_AARCH64
# include "aarch64/bswap.h"
#elif ARCH_ARM
# include "arm/bswap.h"
#elif ARCH_AVR32
# include "avr32/bswap.h"
#elif ARCH_SH4
# include "sh4/bswap.h"
#elif ARCH_X86
# include "x86/bswap.h"
#endif
#endif /* HAVE_AV_CONFIG_H */
#define AV_BSWAP16C(x) (((x) << 8 & 0xff00) | ((x) >> 8 & 0x00ff))
#define AV_BSWAP32C(x) (AV_BSWAP16C(x) << 16 | AV_BSWAP16C((x) >> 16))
#define AV_BSWAP64C(x) (AV_BSWAP32C(x) << 32 | AV_BSWAP32C((x) >> 32))
#define AV_BSWAPC(s, x) AV_BSWAP##s##C(x)
#ifndef av_bswap16
static av_always_inline av_const uint16_t av_bswap16(uint16_t x)
{
x= (x>>8) | (x<<8);
return x;
}
#endif
#ifndef av_bswap32
static av_always_inline av_const uint32_t av_bswap32(uint32_t x)
{
return AV_BSWAP32C(x);
}
#endif
#ifndef av_bswap64
static inline uint64_t av_const av_bswap64(uint64_t x)
{
return (uint64_t)av_bswap32(x) << 32 | av_bswap32(x >> 32);
}
#endif
// be2ne ... big-endian to native-endian
// le2ne ... little-endian to native-endian
#if AV_HAVE_BIGENDIAN
#define av_be2ne16(x) (x)
#define av_be2ne32(x) (x)
#define av_be2ne64(x) (x)
#define av_le2ne16(x) av_bswap16(x)
#define av_le2ne32(x) av_bswap32(x)
#define av_le2ne64(x) av_bswap64(x)
#define AV_BE2NEC(s, x) (x)
#define AV_LE2NEC(s, x) AV_BSWAPC(s, x)
#else
#define av_be2ne16(x) av_bswap16(x)
#define av_be2ne32(x) av_bswap32(x)
#define av_be2ne64(x) av_bswap64(x)
#define av_le2ne16(x) (x)
#define av_le2ne32(x) (x)
#define av_le2ne64(x) (x)
#define AV_BE2NEC(s, x) AV_BSWAPC(s, x)
#define AV_LE2NEC(s, x) (x)
#endif
#define AV_BE2NE16C(x) AV_BE2NEC(16, x)
#define AV_BE2NE32C(x) AV_BE2NEC(32, x)
#define AV_BE2NE64C(x) AV_BE2NEC(64, x)
#define AV_LE2NE16C(x) AV_LE2NEC(16, x)
#define AV_LE2NE32C(x) AV_LE2NEC(32, x)
#define AV_LE2NE64C(x) AV_LE2NEC(64, x)
#endif /* AVUTIL_BSWAP_H */

View file

@ -1,291 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* @ingroup lavu_buffer
* refcounted data buffer API
*/
#ifndef AVUTIL_BUFFER_H
#define AVUTIL_BUFFER_H
#include <stdint.h>
/**
* @defgroup lavu_buffer AVBuffer
* @ingroup lavu_data
*
* @{
* AVBuffer is an API for reference-counted data buffers.
*
* There are two core objects in this API -- AVBuffer and AVBufferRef. AVBuffer
* represents the data buffer itself; it is opaque and not meant to be accessed
* by the caller directly, but only through AVBufferRef. However, the caller may
* e.g. compare two AVBuffer pointers to check whether two different references
* are describing the same data buffer. AVBufferRef represents a single
* reference to an AVBuffer and it is the object that may be manipulated by the
* caller directly.
*
* There are two functions provided for creating a new AVBuffer with a single
* reference -- av_buffer_alloc() to just allocate a new buffer, and
* av_buffer_create() to wrap an existing array in an AVBuffer. From an existing
* reference, additional references may be created with av_buffer_ref().
* Use av_buffer_unref() to free a reference (this will automatically free the
* data once all the references are freed).
*
* The convention throughout this API and the rest of FFmpeg is such that the
* buffer is considered writable if there exists only one reference to it (and
* it has not been marked as read-only). The av_buffer_is_writable() function is
* provided to check whether this is true and av_buffer_make_writable() will
* automatically create a new writable buffer when necessary.
* Of course nothing prevents the calling code from violating this convention,
* however that is safe only when all the existing references are under its
* control.
*
* @note Referencing and unreferencing the buffers is thread-safe and thus
* may be done from multiple threads simultaneously without any need for
* additional locking.
*
* @note Two different references to the same buffer can point to different
* parts of the buffer (i.e. their AVBufferRef.data will not be equal).
*/
/**
* A reference counted buffer type. It is opaque and is meant to be used through
* references (AVBufferRef).
*/
typedef struct AVBuffer AVBuffer;
/**
* A reference to a data buffer.
*
* The size of this struct is not a part of the public ABI and it is not meant
* to be allocated directly.
*/
typedef struct AVBufferRef {
AVBuffer *buffer;
/**
* The data buffer. It is considered writable if and only if
* this is the only reference to the buffer, in which case
* av_buffer_is_writable() returns 1.
*/
uint8_t *data;
/**
* Size of data in bytes.
*/
int size;
} AVBufferRef;
/**
* Allocate an AVBuffer of the given size using av_malloc().
*
* @return an AVBufferRef of given size or NULL when out of memory
*/
AVBufferRef *av_buffer_alloc(int size);
/**
* Same as av_buffer_alloc(), except the returned buffer will be initialized
* to zero.
*/
AVBufferRef *av_buffer_allocz(int size);
/**
* Always treat the buffer as read-only, even when it has only one
* reference.
*/
#define AV_BUFFER_FLAG_READONLY (1 << 0)
/**
* Create an AVBuffer from an existing array.
*
* If this function is successful, data is owned by the AVBuffer. The caller may
* only access data through the returned AVBufferRef and references derived from
* it.
* If this function fails, data is left untouched.
* @param data data array
* @param size size of data in bytes
* @param free a callback for freeing this buffer's data
* @param opaque parameter to be got for processing or passed to free
* @param flags a combination of AV_BUFFER_FLAG_*
*
* @return an AVBufferRef referring to data on success, NULL on failure.
*/
AVBufferRef *av_buffer_create(uint8_t *data, int size,
void (*free)(void *opaque, uint8_t *data),
void *opaque, int flags);
/**
* Default free callback, which calls av_free() on the buffer data.
* This function is meant to be passed to av_buffer_create(), not called
* directly.
*/
void av_buffer_default_free(void *opaque, uint8_t *data);
/**
* Create a new reference to an AVBuffer.
*
* @return a new AVBufferRef referring to the same AVBuffer as buf or NULL on
* failure.
*/
AVBufferRef *av_buffer_ref(AVBufferRef *buf);
/**
* Free a given reference and automatically free the buffer if there are no more
* references to it.
*
* @param buf the reference to be freed. The pointer is set to NULL on return.
*/
void av_buffer_unref(AVBufferRef **buf);
/**
* @return 1 if the caller may write to the data referred to by buf (which is
* true if and only if buf is the only reference to the underlying AVBuffer).
* Return 0 otherwise.
* A positive answer is valid until av_buffer_ref() is called on buf.
*/
int av_buffer_is_writable(const AVBufferRef *buf);
/**
* @return the opaque parameter set by av_buffer_create.
*/
void *av_buffer_get_opaque(const AVBufferRef *buf);
int av_buffer_get_ref_count(const AVBufferRef *buf);
/**
* Create a writable reference from a given buffer reference, avoiding data copy
* if possible.
*
* @param buf buffer reference to make writable. On success, buf is either left
* untouched, or it is unreferenced and a new writable AVBufferRef is
* written in its place. On failure, buf is left untouched.
* @return 0 on success, a negative AVERROR on failure.
*/
int av_buffer_make_writable(AVBufferRef **buf);
/**
* Reallocate a given buffer.
*
* @param buf a buffer reference to reallocate. On success, buf will be
* unreferenced and a new reference with the required size will be
* written in its place. On failure buf will be left untouched. *buf
* may be NULL, then a new buffer is allocated.
* @param size required new buffer size.
* @return 0 on success, a negative AVERROR on failure.
*
* @note the buffer is actually reallocated with av_realloc() only if it was
* initially allocated through av_buffer_realloc(NULL) and there is only one
* reference to it (i.e. the one passed to this function). In all other cases
* a new buffer is allocated and the data is copied.
*/
int av_buffer_realloc(AVBufferRef **buf, int size);
/**
* @}
*/
/**
* @defgroup lavu_bufferpool AVBufferPool
* @ingroup lavu_data
*
* @{
* AVBufferPool is an API for a lock-free thread-safe pool of AVBuffers.
*
* Frequently allocating and freeing large buffers may be slow. AVBufferPool is
* meant to solve this in cases when the caller needs a set of buffers of the
* same size (the most obvious use case being buffers for raw video or audio
* frames).
*
* At the beginning, the user must call av_buffer_pool_init() to create the
* buffer pool. Then whenever a buffer is needed, call av_buffer_pool_get() to
* get a reference to a new buffer, similar to av_buffer_alloc(). This new
* reference works in all aspects the same way as the one created by
* av_buffer_alloc(). However, when the last reference to this buffer is
* unreferenced, it is returned to the pool instead of being freed and will be
* reused for subsequent av_buffer_pool_get() calls.
*
* When the caller is done with the pool and no longer needs to allocate any new
* buffers, av_buffer_pool_uninit() must be called to mark the pool as freeable.
* Once all the buffers are released, it will automatically be freed.
*
* Allocating and releasing buffers with this API is thread-safe as long as
* either the default alloc callback is used, or the user-supplied one is
* thread-safe.
*/
/**
* The buffer pool. This structure is opaque and not meant to be accessed
* directly. It is allocated with av_buffer_pool_init() and freed with
* av_buffer_pool_uninit().
*/
typedef struct AVBufferPool AVBufferPool;
/**
* Allocate and initialize a buffer pool.
*
* @param size size of each buffer in this pool
* @param alloc a function that will be used to allocate new buffers when the
* pool is empty. May be NULL, then the default allocator will be used
* (av_buffer_alloc()).
* @return newly created buffer pool on success, NULL on error.
*/
AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size));
/**
* Allocate and initialize a buffer pool with a more complex allocator.
*
* @param size size of each buffer in this pool
* @param opaque arbitrary user data used by the allocator
* @param alloc a function that will be used to allocate new buffers when the
* pool is empty.
* @param pool_free a function that will be called immediately before the pool
* is freed. I.e. after av_buffer_pool_uninit() is called
* by the caller and all the frames are returned to the pool
* and freed. It is intended to uninitialize the user opaque
* data.
* @return newly created buffer pool on success, NULL on error.
*/
AVBufferPool *av_buffer_pool_init2(int size, void *opaque,
AVBufferRef* (*alloc)(void *opaque, int size),
void (*pool_free)(void *opaque));
/**
* Mark the pool as being available for freeing. It will actually be freed only
* once all the allocated buffers associated with the pool are released. Thus it
* is safe to call this function while some of the allocated buffers are still
* in use.
*
* @param pool pointer to the pool to be freed. It will be set to NULL.
*/
void av_buffer_pool_uninit(AVBufferPool **pool);
/**
* Allocate a new AVBuffer, reusing an old buffer from the pool when available.
* This function may be called simultaneously from multiple threads.
*
* @return a reference to the new buffer on success, NULL on error.
*/
AVBufferRef *av_buffer_pool_get(AVBufferPool *pool);
/**
* @}
*/
#endif /* AVUTIL_BUFFER_H */

View file

@ -1,70 +0,0 @@
/*
* An implementation of the CAMELLIA algorithm as mentioned in RFC3713
* Copyright (c) 2014 Supraja Meedinti
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CAMELLIA_H
#define AVUTIL_CAMELLIA_H
#include <stdint.h>
/**
* @file
* @brief Public header for libavutil CAMELLIA algorithm
* @defgroup lavu_camellia CAMELLIA
* @ingroup lavu_crypto
* @{
*/
extern const int av_camellia_size;
struct AVCAMELLIA;
/**
* Allocate an AVCAMELLIA context
* To free the struct: av_free(ptr)
*/
struct AVCAMELLIA *av_camellia_alloc(void);
/**
* Initialize an AVCAMELLIA context.
*
* @param ctx an AVCAMELLIA context
* @param key a key of 16, 24, 32 bytes used for encryption/decryption
* @param key_bits number of keybits: possible are 128, 192, 256
*/
int av_camellia_init(struct AVCAMELLIA *ctx, const uint8_t *key, int key_bits);
/**
* Encrypt or decrypt a buffer using a previously initialized context
*
* @param ctx an AVCAMELLIA context
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param count number of 16 byte blocks
* @paran iv initialization vector for CBC mode, NULL for ECB mode
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_camellia_crypt(struct AVCAMELLIA *ctx, uint8_t *dst, const uint8_t *src, int count, uint8_t* iv, int decrypt);
/**
* @}
*/
#endif /* AVUTIL_CAMELLIA_H */

View file

@ -1,80 +0,0 @@
/*
* An implementation of the CAST128 algorithm as mentioned in RFC2144
* Copyright (c) 2014 Supraja Meedinti
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CAST5_H
#define AVUTIL_CAST5_H
#include <stdint.h>
/**
* @file
* @brief Public header for libavutil CAST5 algorithm
* @defgroup lavu_cast5 CAST5
* @ingroup lavu_crypto
* @{
*/
extern const int av_cast5_size;
struct AVCAST5;
/**
* Allocate an AVCAST5 context
* To free the struct: av_free(ptr)
*/
struct AVCAST5 *av_cast5_alloc(void);
/**
* Initialize an AVCAST5 context.
*
* @param ctx an AVCAST5 context
* @param key a key of 5,6,...16 bytes used for encryption/decryption
* @param key_bits number of keybits: possible are 40,48,...,128
* @return 0 on success, less than 0 on failure
*/
int av_cast5_init(struct AVCAST5 *ctx, const uint8_t *key, int key_bits);
/**
* Encrypt or decrypt a buffer using a previously initialized context, ECB mode only
*
* @param ctx an AVCAST5 context
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param count number of 8 byte blocks
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_cast5_crypt(struct AVCAST5 *ctx, uint8_t *dst, const uint8_t *src, int count, int decrypt);
/**
* Encrypt or decrypt a buffer using a previously initialized context
*
* @param ctx an AVCAST5 context
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param count number of 8 byte blocks
* @param iv initialization vector for CBC mode, NULL for ECB mode
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_cast5_crypt2(struct AVCAST5 *ctx, uint8_t *dst, const uint8_t *src, int count, uint8_t *iv, int decrypt);
/**
* @}
*/
#endif /* AVUTIL_CAST5_H */

View file

@ -1,232 +0,0 @@
/*
* Copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
* Copyright (c) 2008 Peter Ross
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CHANNEL_LAYOUT_H
#define AVUTIL_CHANNEL_LAYOUT_H
#include <stdint.h>
/**
* @file
* audio channel layout utility functions
*/
/**
* @addtogroup lavu_audio
* @{
*/
/**
* @defgroup channel_masks Audio channel masks
*
* A channel layout is a 64-bits integer with a bit set for every channel.
* The number of bits set must be equal to the number of channels.
* The value 0 means that the channel layout is not known.
* @note this data structure is not powerful enough to handle channels
* combinations that have the same channel multiple times, such as
* dual-mono.
*
* @{
*/
#define AV_CH_FRONT_LEFT 0x00000001
#define AV_CH_FRONT_RIGHT 0x00000002
#define AV_CH_FRONT_CENTER 0x00000004
#define AV_CH_LOW_FREQUENCY 0x00000008
#define AV_CH_BACK_LEFT 0x00000010
#define AV_CH_BACK_RIGHT 0x00000020
#define AV_CH_FRONT_LEFT_OF_CENTER 0x00000040
#define AV_CH_FRONT_RIGHT_OF_CENTER 0x00000080
#define AV_CH_BACK_CENTER 0x00000100
#define AV_CH_SIDE_LEFT 0x00000200
#define AV_CH_SIDE_RIGHT 0x00000400
#define AV_CH_TOP_CENTER 0x00000800
#define AV_CH_TOP_FRONT_LEFT 0x00001000
#define AV_CH_TOP_FRONT_CENTER 0x00002000
#define AV_CH_TOP_FRONT_RIGHT 0x00004000
#define AV_CH_TOP_BACK_LEFT 0x00008000
#define AV_CH_TOP_BACK_CENTER 0x00010000
#define AV_CH_TOP_BACK_RIGHT 0x00020000
#define AV_CH_STEREO_LEFT 0x20000000 ///< Stereo downmix.
#define AV_CH_STEREO_RIGHT 0x40000000 ///< See AV_CH_STEREO_LEFT.
#define AV_CH_WIDE_LEFT 0x0000000080000000ULL
#define AV_CH_WIDE_RIGHT 0x0000000100000000ULL
#define AV_CH_SURROUND_DIRECT_LEFT 0x0000000200000000ULL
#define AV_CH_SURROUND_DIRECT_RIGHT 0x0000000400000000ULL
#define AV_CH_LOW_FREQUENCY_2 0x0000000800000000ULL
/** Channel mask value used for AVCodecContext.request_channel_layout
to indicate that the user requests the channel order of the decoder output
to be the native codec channel order. */
#define AV_CH_LAYOUT_NATIVE 0x8000000000000000ULL
/**
* @}
* @defgroup channel_mask_c Audio channel layouts
* @{
* */
#define AV_CH_LAYOUT_MONO (AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_STEREO (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT)
#define AV_CH_LAYOUT_2POINT1 (AV_CH_LAYOUT_STEREO|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_2_1 (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_SURROUND (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_3POINT1 (AV_CH_LAYOUT_SURROUND|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_4POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_4POINT1 (AV_CH_LAYOUT_4POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_2_2 (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_QUAD (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_5POINT1 (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_5POINT0_BACK (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT1_BACK (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_6POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT0_FRONT (AV_CH_LAYOUT_2_2|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_HEXAGONAL (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1_BACK (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_6POINT1_FRONT (AV_CH_LAYOUT_6POINT0_FRONT|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_7POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT0_FRONT (AV_CH_LAYOUT_5POINT0|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_7POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT1_WIDE (AV_CH_LAYOUT_5POINT1|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_7POINT1_WIDE_BACK (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_OCTAGONAL (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_CENTER|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_HEXADECAGONAL (AV_CH_LAYOUT_OCTAGONAL|AV_CH_WIDE_LEFT|AV_CH_WIDE_RIGHT|AV_CH_TOP_BACK_LEFT|AV_CH_TOP_BACK_RIGHT|AV_CH_TOP_BACK_CENTER|AV_CH_TOP_FRONT_CENTER|AV_CH_TOP_FRONT_LEFT|AV_CH_TOP_FRONT_RIGHT)
#define AV_CH_LAYOUT_STEREO_DOWNMIX (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT)
enum AVMatrixEncoding {
AV_MATRIX_ENCODING_NONE,
AV_MATRIX_ENCODING_DOLBY,
AV_MATRIX_ENCODING_DPLII,
AV_MATRIX_ENCODING_DPLIIX,
AV_MATRIX_ENCODING_DPLIIZ,
AV_MATRIX_ENCODING_DOLBYEX,
AV_MATRIX_ENCODING_DOLBYHEADPHONE,
AV_MATRIX_ENCODING_NB
};
/**
* Return a channel layout id that matches name, or 0 if no match is found.
*
* name can be one or several of the following notations,
* separated by '+' or '|':
* - the name of an usual channel layout (mono, stereo, 4.0, quad, 5.0,
* 5.0(side), 5.1, 5.1(side), 7.1, 7.1(wide), downmix);
* - the name of a single channel (FL, FR, FC, LFE, BL, BR, FLC, FRC, BC,
* SL, SR, TC, TFL, TFC, TFR, TBL, TBC, TBR, DL, DR);
* - a number of channels, in decimal, followed by 'c', yielding
* the default channel layout for that number of channels (@see
* av_get_default_channel_layout);
* - a channel layout mask, in hexadecimal starting with "0x" (see the
* AV_CH_* macros).
*
* Example: "stereo+FC" = "2c+FC" = "2c+1c" = "0x7"
*/
uint64_t av_get_channel_layout(const char *name);
/**
* Return a channel layout and the number of channels based on the specified name.
*
* This function is similar to (@see av_get_channel_layout), but can also parse
* unknown channel layout specifications.
*
* @param[in] name channel layout specification string
* @param[out] channel_layout parsed channel layout (0 if unknown)
* @param[out] nb_channels number of channels
*
* @return 0 on success, AVERROR(EINVAL) if the parsing fails.
*/
int av_get_extended_channel_layout(const char *name, uint64_t* channel_layout, int* nb_channels);
/**
* Return a description of a channel layout.
* If nb_channels is <= 0, it is guessed from the channel_layout.
*
* @param buf put here the string containing the channel layout
* @param buf_size size in bytes of the buffer
*/
void av_get_channel_layout_string(char *buf, int buf_size, int nb_channels, uint64_t channel_layout);
struct AVBPrint;
/**
* Append a description of a channel layout to a bprint buffer.
*/
void av_bprint_channel_layout(struct AVBPrint *bp, int nb_channels, uint64_t channel_layout);
/**
* Return the number of channels in the channel layout.
*/
int av_get_channel_layout_nb_channels(uint64_t channel_layout);
/**
* Return default channel layout for a given number of channels.
*/
int64_t av_get_default_channel_layout(int nb_channels);
/**
* Get the index of a channel in channel_layout.
*
* @param channel a channel layout describing exactly one channel which must be
* present in channel_layout.
*
* @return index of channel in channel_layout on success, a negative AVERROR
* on error.
*/
int av_get_channel_layout_channel_index(uint64_t channel_layout,
uint64_t channel);
/**
* Get the channel with the given index in channel_layout.
*/
uint64_t av_channel_layout_extract_channel(uint64_t channel_layout, int index);
/**
* Get the name of a given channel.
*
* @return channel name on success, NULL on error.
*/
const char *av_get_channel_name(uint64_t channel);
/**
* Get the description of a given channel.
*
* @param channel a channel layout with a single channel
* @return channel description on success, NULL on error
*/
const char *av_get_channel_description(uint64_t channel);
/**
* Get the value and name of a standard channel layout.
*
* @param[in] index index in an internal list, starting at 0
* @param[out] layout channel layout mask
* @param[out] name name of the layout
* @return 0 if the layout exists,
* <0 if index is beyond the limits
*/
int av_get_standard_channel_layout(unsigned index, uint64_t *layout,
const char **name);
/**
* @}
* @}
*/
#endif /* AVUTIL_CHANNEL_LAYOUT_H */

View file

@ -1,560 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* common internal and external API header
*/
#ifndef AVUTIL_COMMON_H
#define AVUTIL_COMMON_H
#if defined(__cplusplus) && !defined(__STDC_CONSTANT_MACROS) && !defined(UINT64_C)
#error missing -D__STDC_CONSTANT_MACROS / #define __STDC_CONSTANT_MACROS
#endif
#include <errno.h>
#include <inttypes.h>
#include <limits.h>
#include <math.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "attributes.h"
#include "macros.h"
#include "version.h"
#include "libavutil/avconfig.h"
#if AV_HAVE_BIGENDIAN
# define AV_NE(be, le) (be)
#else
# define AV_NE(be, le) (le)
#endif
//rounded division & shift
#define RSHIFT(a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b))
/* assume b>0 */
#define ROUNDED_DIV(a,b) (((a)>0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b))
/* Fast a/(1<<b) rounded toward +inf. Assume a>=0 and b>=0 */
#define AV_CEIL_RSHIFT(a,b) (!av_builtin_constant_p(b) ? -((-(a)) >> (b)) \
: ((a) + (1<<(b)) - 1) >> (b))
/* Backwards compat. */
#define FF_CEIL_RSHIFT AV_CEIL_RSHIFT
#define FFUDIV(a,b) (((a)>0 ?(a):(a)-(b)+1) / (b))
#define FFUMOD(a,b) ((a)-(b)*FFUDIV(a,b))
/**
* Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they
* are not representable as absolute values of their type. This is the same
* as with *abs()
* @see FFNABS()
*/
#define FFABS(a) ((a) >= 0 ? (a) : (-(a)))
#define FFSIGN(a) ((a) > 0 ? 1 : -1)
/**
* Negative Absolute value.
* this works for all integers of all types.
* As with many macros, this evaluates its argument twice, it thus must not have
* a sideeffect, that is FFNABS(x++) has undefined behavior.
*/
#define FFNABS(a) ((a) <= 0 ? (a) : (-(a)))
/**
* Comparator.
* For two numerical expressions x and y, gives 1 if x > y, -1 if x < y, and 0
* if x == y. This is useful for instance in a qsort comparator callback.
* Furthermore, compilers are able to optimize this to branchless code, and
* there is no risk of overflow with signed types.
* As with many macros, this evaluates its argument multiple times, it thus
* must not have a side-effect.
*/
#define FFDIFFSIGN(x,y) (((x)>(y)) - ((x)<(y)))
#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
#define FFMAX3(a,b,c) FFMAX(FFMAX(a,b),c)
#define FFMIN(a,b) ((a) > (b) ? (b) : (a))
#define FFMIN3(a,b,c) FFMIN(FFMIN(a,b),c)
#define FFSWAP(type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0)
#define FF_ARRAY_ELEMS(a) (sizeof(a) / sizeof((a)[0]))
/* misc math functions */
#ifdef HAVE_AV_CONFIG_H
# include "config.h"
# include "intmath.h"
#endif
/* Pull in unguarded fallback defines at the end of this file. */
#include "common.h"
#ifndef av_log2
av_const int av_log2(unsigned v);
#endif
#ifndef av_log2_16bit
av_const int av_log2_16bit(unsigned v);
#endif
/**
* Clip a signed integer value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const int av_clip_c(int a, int amin, int amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a signed 64bit integer value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const int64_t av_clip64_c(int64_t a, int64_t amin, int64_t amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a signed integer value into the 0-255 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const uint8_t av_clip_uint8_c(int a)
{
if (a&(~0xFF)) return (~a)>>31;
else return a;
}
/**
* Clip a signed integer value into the -128,127 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int8_t av_clip_int8_c(int a)
{
if ((a+0x80U) & ~0xFF) return (a>>31) ^ 0x7F;
else return a;
}
/**
* Clip a signed integer value into the 0-65535 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const uint16_t av_clip_uint16_c(int a)
{
if (a&(~0xFFFF)) return (~a)>>31;
else return a;
}
/**
* Clip a signed integer value into the -32768,32767 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int16_t av_clip_int16_c(int a)
{
if ((a+0x8000U) & ~0xFFFF) return (a>>31) ^ 0x7FFF;
else return a;
}
/**
* Clip a signed 64-bit integer value into the -2147483648,2147483647 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int32_t av_clipl_int32_c(int64_t a)
{
if ((a+0x80000000u) & ~UINT64_C(0xFFFFFFFF)) return (int32_t)((a>>63) ^ 0x7FFFFFFF);
else return (int32_t)a;
}
/**
* Clip a signed integer into the -(2^p),(2^p-1) range.
* @param a value to clip
* @param p bit position to clip at
* @return clipped value
*/
static av_always_inline av_const int av_clip_intp2_c(int a, int p)
{
if (((unsigned)a + (1 << p)) & ~((2 << p) - 1))
return (a >> 31) ^ ((1 << p) - 1);
else
return a;
}
/**
* Clip a signed integer to an unsigned power of two range.
* @param a value to clip
* @param p bit position to clip at
* @return clipped value
*/
static av_always_inline av_const unsigned av_clip_uintp2_c(int a, int p)
{
if (a & ~((1<<p) - 1)) return (~a) >> 31 & ((1<<p) - 1);
else return a;
}
/**
* Clear high bits from an unsigned integer starting with specific bit position
* @param a value to clip
* @param p bit position to clip at
* @return clipped value
*/
static av_always_inline av_const unsigned av_mod_uintp2_c(unsigned a, unsigned p)
{
return a & ((1 << p) - 1);
}
/**
* Add two signed 32-bit values with saturation.
*
* @param a one value
* @param b another value
* @return sum with signed saturation
*/
static av_always_inline int av_sat_add32_c(int a, int b)
{
return av_clipl_int32((int64_t)a + b);
}
/**
* Add a doubled value to another value with saturation at both stages.
*
* @param a first value
* @param b value doubled and added to a
* @return sum sat(a + sat(2*b)) with signed saturation
*/
static av_always_inline int av_sat_dadd32_c(int a, int b)
{
return av_sat_add32(a, av_sat_add32(b, b));
}
/**
* Subtract two signed 32-bit values with saturation.
*
* @param a one value
* @param b another value
* @return difference with signed saturation
*/
static av_always_inline int av_sat_sub32_c(int a, int b)
{
return av_clipl_int32((int64_t)a - b);
}
/**
* Subtract a doubled value from another value with saturation at both stages.
*
* @param a first value
* @param b value doubled and subtracted from a
* @return difference sat(a - sat(2*b)) with signed saturation
*/
static av_always_inline int av_sat_dsub32_c(int a, int b)
{
return av_sat_sub32(a, av_sat_add32(b, b));
}
/**
* Clip a float value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const float av_clipf_c(float a, float amin, float amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a double value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const double av_clipd_c(double a, double amin, double amax)
{
#if defined(HAVE_AV_CONFIG_H) && defined(ASSERT_LEVEL) && ASSERT_LEVEL >= 2
if (amin > amax) abort();
#endif
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/** Compute ceil(log2(x)).
* @param x value used to compute ceil(log2(x))
* @return computed ceiling of log2(x)
*/
static av_always_inline av_const int av_ceil_log2_c(int x)
{
return av_log2((x - 1) << 1);
}
/**
* Count number of bits set to one in x
* @param x value to count bits of
* @return the number of bits set to one in x
*/
static av_always_inline av_const int av_popcount_c(uint32_t x)
{
x -= (x >> 1) & 0x55555555;
x = (x & 0x33333333) + ((x >> 2) & 0x33333333);
x = (x + (x >> 4)) & 0x0F0F0F0F;
x += x >> 8;
return (x + (x >> 16)) & 0x3F;
}
/**
* Count number of bits set to one in x
* @param x value to count bits of
* @return the number of bits set to one in x
*/
static av_always_inline av_const int av_popcount64_c(uint64_t x)
{
return av_popcount((uint32_t)x) + av_popcount((uint32_t)(x >> 32));
}
static av_always_inline av_const int av_parity_c(uint32_t v)
{
return av_popcount(v) & 1;
}
#define MKTAG(a,b,c,d) ((a) | ((b) << 8) | ((c) << 16) | ((unsigned)(d) << 24))
#define MKBETAG(a,b,c,d) ((d) | ((c) << 8) | ((b) << 16) | ((unsigned)(a) << 24))
/**
* Convert a UTF-8 character (up to 4 bytes) to its 32-bit UCS-4 encoded form.
*
* @param val Output value, must be an lvalue of type uint32_t.
* @param GET_BYTE Expression reading one byte from the input.
* Evaluated up to 7 times (4 for the currently
* assigned Unicode range). With a memory buffer
* input, this could be *ptr++.
* @param ERROR Expression to be evaluated on invalid input,
* typically a goto statement.
*
* @warning ERROR should not contain a loop control statement which
* could interact with the internal while loop, and should force an
* exit from the macro code (e.g. through a goto or a return) in order
* to prevent undefined results.
*/
#define GET_UTF8(val, GET_BYTE, ERROR)\
val= (GET_BYTE);\
{\
uint32_t top = (val & 128) >> 1;\
if ((val & 0xc0) == 0x80 || val >= 0xFE)\
ERROR\
while (val & top) {\
int tmp= (GET_BYTE) - 128;\
if(tmp>>6)\
ERROR\
val= (val<<6) + tmp;\
top <<= 5;\
}\
val &= (top << 1) - 1;\
}
/**
* Convert a UTF-16 character (2 or 4 bytes) to its 32-bit UCS-4 encoded form.
*
* @param val Output value, must be an lvalue of type uint32_t.
* @param GET_16BIT Expression returning two bytes of UTF-16 data converted
* to native byte order. Evaluated one or two times.
* @param ERROR Expression to be evaluated on invalid input,
* typically a goto statement.
*/
#define GET_UTF16(val, GET_16BIT, ERROR)\
val = GET_16BIT;\
{\
unsigned int hi = val - 0xD800;\
if (hi < 0x800) {\
val = GET_16BIT - 0xDC00;\
if (val > 0x3FFU || hi > 0x3FFU)\
ERROR\
val += (hi<<10) + 0x10000;\
}\
}\
/**
* @def PUT_UTF8(val, tmp, PUT_BYTE)
* Convert a 32-bit Unicode character to its UTF-8 encoded form (up to 4 bytes long).
* @param val is an input-only argument and should be of type uint32_t. It holds
* a UCS-4 encoded Unicode character that is to be converted to UTF-8. If
* val is given as a function it is executed only once.
* @param tmp is a temporary variable and should be of type uint8_t. It
* represents an intermediate value during conversion that is to be
* output by PUT_BYTE.
* @param PUT_BYTE writes the converted UTF-8 bytes to any proper destination.
* It could be a function or a statement, and uses tmp as the input byte.
* For example, PUT_BYTE could be "*output++ = tmp;" PUT_BYTE will be
* executed up to 4 times for values in the valid UTF-8 range and up to
* 7 times in the general case, depending on the length of the converted
* Unicode character.
*/
#define PUT_UTF8(val, tmp, PUT_BYTE)\
{\
int bytes, shift;\
uint32_t in = val;\
if (in < 0x80) {\
tmp = in;\
PUT_BYTE\
} else {\
bytes = (av_log2(in) + 4) / 5;\
shift = (bytes - 1) * 6;\
tmp = (256 - (256 >> bytes)) | (in >> shift);\
PUT_BYTE\
while (shift >= 6) {\
shift -= 6;\
tmp = 0x80 | ((in >> shift) & 0x3f);\
PUT_BYTE\
}\
}\
}
/**
* @def PUT_UTF16(val, tmp, PUT_16BIT)
* Convert a 32-bit Unicode character to its UTF-16 encoded form (2 or 4 bytes).
* @param val is an input-only argument and should be of type uint32_t. It holds
* a UCS-4 encoded Unicode character that is to be converted to UTF-16. If
* val is given as a function it is executed only once.
* @param tmp is a temporary variable and should be of type uint16_t. It
* represents an intermediate value during conversion that is to be
* output by PUT_16BIT.
* @param PUT_16BIT writes the converted UTF-16 data to any proper destination
* in desired endianness. It could be a function or a statement, and uses tmp
* as the input byte. For example, PUT_BYTE could be "*output++ = tmp;"
* PUT_BYTE will be executed 1 or 2 times depending on input character.
*/
#define PUT_UTF16(val, tmp, PUT_16BIT)\
{\
uint32_t in = val;\
if (in < 0x10000) {\
tmp = in;\
PUT_16BIT\
} else {\
tmp = 0xD800 | ((in - 0x10000) >> 10);\
PUT_16BIT\
tmp = 0xDC00 | ((in - 0x10000) & 0x3FF);\
PUT_16BIT\
}\
}\
#include "mem.h"
#ifdef HAVE_AV_CONFIG_H
# include "internal.h"
#endif /* HAVE_AV_CONFIG_H */
#endif /* AVUTIL_COMMON_H */
/*
* The following definitions are outside the multiple inclusion guard
* to ensure they are immediately available in intmath.h.
*/
#ifndef av_ceil_log2
# define av_ceil_log2 av_ceil_log2_c
#endif
#ifndef av_clip
# define av_clip av_clip_c
#endif
#ifndef av_clip64
# define av_clip64 av_clip64_c
#endif
#ifndef av_clip_uint8
# define av_clip_uint8 av_clip_uint8_c
#endif
#ifndef av_clip_int8
# define av_clip_int8 av_clip_int8_c
#endif
#ifndef av_clip_uint16
# define av_clip_uint16 av_clip_uint16_c
#endif
#ifndef av_clip_int16
# define av_clip_int16 av_clip_int16_c
#endif
#ifndef av_clipl_int32
# define av_clipl_int32 av_clipl_int32_c
#endif
#ifndef av_clip_intp2
# define av_clip_intp2 av_clip_intp2_c
#endif
#ifndef av_clip_uintp2
# define av_clip_uintp2 av_clip_uintp2_c
#endif
#ifndef av_mod_uintp2
# define av_mod_uintp2 av_mod_uintp2_c
#endif
#ifndef av_sat_add32
# define av_sat_add32 av_sat_add32_c
#endif
#ifndef av_sat_dadd32
# define av_sat_dadd32 av_sat_dadd32_c
#endif
#ifndef av_sat_sub32
# define av_sat_sub32 av_sat_sub32_c
#endif
#ifndef av_sat_dsub32
# define av_sat_dsub32 av_sat_dsub32_c
#endif
#ifndef av_clipf
# define av_clipf av_clipf_c
#endif
#ifndef av_clipd
# define av_clipd av_clipd_c
#endif
#ifndef av_popcount
# define av_popcount av_popcount_c
#endif
#ifndef av_popcount64
# define av_popcount64 av_popcount64_c
#endif
#ifndef av_parity
# define av_parity av_parity_c
#endif

View file

@ -1,130 +0,0 @@
/*
* Copyright (c) 2000, 2001, 2002 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CPU_H
#define AVUTIL_CPU_H
#include <stddef.h>
#include "attributes.h"
#define AV_CPU_FLAG_FORCE 0x80000000 /* force usage of selected flags (OR) */
/* lower 16 bits - CPU features */
#define AV_CPU_FLAG_MMX 0x0001 ///< standard MMX
#define AV_CPU_FLAG_MMXEXT 0x0002 ///< SSE integer functions or AMD MMX ext
#define AV_CPU_FLAG_MMX2 0x0002 ///< SSE integer functions or AMD MMX ext
#define AV_CPU_FLAG_3DNOW 0x0004 ///< AMD 3DNOW
#define AV_CPU_FLAG_SSE 0x0008 ///< SSE functions
#define AV_CPU_FLAG_SSE2 0x0010 ///< PIV SSE2 functions
#define AV_CPU_FLAG_SSE2SLOW 0x40000000 ///< SSE2 supported, but usually not faster
///< than regular MMX/SSE (e.g. Core1)
#define AV_CPU_FLAG_3DNOWEXT 0x0020 ///< AMD 3DNowExt
#define AV_CPU_FLAG_SSE3 0x0040 ///< Prescott SSE3 functions
#define AV_CPU_FLAG_SSE3SLOW 0x20000000 ///< SSE3 supported, but usually not faster
///< than regular MMX/SSE (e.g. Core1)
#define AV_CPU_FLAG_SSSE3 0x0080 ///< Conroe SSSE3 functions
#define AV_CPU_FLAG_SSSE3SLOW 0x4000000 ///< SSSE3 supported, but usually not faster
#define AV_CPU_FLAG_ATOM 0x10000000 ///< Atom processor, some SSSE3 instructions are slower
#define AV_CPU_FLAG_SSE4 0x0100 ///< Penryn SSE4.1 functions
#define AV_CPU_FLAG_SSE42 0x0200 ///< Nehalem SSE4.2 functions
#define AV_CPU_FLAG_AESNI 0x80000 ///< Advanced Encryption Standard functions
#define AV_CPU_FLAG_AVX 0x4000 ///< AVX functions: requires OS support even if YMM registers aren't used
#define AV_CPU_FLAG_AVXSLOW 0x8000000 ///< AVX supported, but slow when using YMM registers (e.g. Bulldozer)
#define AV_CPU_FLAG_XOP 0x0400 ///< Bulldozer XOP functions
#define AV_CPU_FLAG_FMA4 0x0800 ///< Bulldozer FMA4 functions
#define AV_CPU_FLAG_CMOV 0x1000 ///< supports cmov instruction
#define AV_CPU_FLAG_AVX2 0x8000 ///< AVX2 functions: requires OS support even if YMM registers aren't used
#define AV_CPU_FLAG_FMA3 0x10000 ///< Haswell FMA3 functions
#define AV_CPU_FLAG_BMI1 0x20000 ///< Bit Manipulation Instruction Set 1
#define AV_CPU_FLAG_BMI2 0x40000 ///< Bit Manipulation Instruction Set 2
#define AV_CPU_FLAG_AVX512 0x100000 ///< AVX-512 functions: requires OS support even if YMM/ZMM registers aren't used
#define AV_CPU_FLAG_ALTIVEC 0x0001 ///< standard
#define AV_CPU_FLAG_VSX 0x0002 ///< ISA 2.06
#define AV_CPU_FLAG_POWER8 0x0004 ///< ISA 2.07
#define AV_CPU_FLAG_ARMV5TE (1 << 0)
#define AV_CPU_FLAG_ARMV6 (1 << 1)
#define AV_CPU_FLAG_ARMV6T2 (1 << 2)
#define AV_CPU_FLAG_VFP (1 << 3)
#define AV_CPU_FLAG_VFPV3 (1 << 4)
#define AV_CPU_FLAG_NEON (1 << 5)
#define AV_CPU_FLAG_ARMV8 (1 << 6)
#define AV_CPU_FLAG_VFP_VM (1 << 7) ///< VFPv2 vector mode, deprecated in ARMv7-A and unavailable in various CPUs implementations
#define AV_CPU_FLAG_SETEND (1 <<16)
/**
* Return the flags which specify extensions supported by the CPU.
* The returned value is affected by av_force_cpu_flags() if that was used
* before. So av_get_cpu_flags() can easily be used in an application to
* detect the enabled cpu flags.
*/
int av_get_cpu_flags(void);
/**
* Disables cpu detection and forces the specified flags.
* -1 is a special case that disables forcing of specific flags.
*/
void av_force_cpu_flags(int flags);
/**
* Set a mask on flags returned by av_get_cpu_flags().
* This function is mainly useful for testing.
* Please use av_force_cpu_flags() and av_get_cpu_flags() instead which are more flexible
*/
attribute_deprecated void av_set_cpu_flags_mask(int mask);
/**
* Parse CPU flags from a string.
*
* The returned flags contain the specified flags as well as related unspecified flags.
*
* This function exists only for compatibility with libav.
* Please use av_parse_cpu_caps() when possible.
* @return a combination of AV_CPU_* flags, negative on error.
*/
attribute_deprecated
int av_parse_cpu_flags(const char *s);
/**
* Parse CPU caps from a string and update the given AV_CPU_* flags based on that.
*
* @return negative on error.
*/
int av_parse_cpu_caps(unsigned *flags, const char *s);
/**
* @return the number of logical CPU cores present.
*/
int av_cpu_count(void);
/**
* Get the maximum data alignment that may be required by FFmpeg.
*
* Note that this is affected by the build configuration and the CPU flags mask,
* so e.g. if the CPU supports AVX, but libavutil has been built with
* --disable-avx or the AV_CPU_FLAG_AVX flag has been disabled through
* av_set_cpu_flags_mask(), then this function will behave as if AVX is not
* present.
*/
size_t av_cpu_max_align(void);
#endif /* AVUTIL_CPU_H */

View file

@ -1,100 +0,0 @@
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* @ingroup lavu_crc32
* Public header for CRC hash function implementation.
*/
#ifndef AVUTIL_CRC_H
#define AVUTIL_CRC_H
#include <stdint.h>
#include <stddef.h>
#include "attributes.h"
#include "version.h"
/**
* @defgroup lavu_crc32 CRC
* @ingroup lavu_hash
* CRC (Cyclic Redundancy Check) hash function implementation.
*
* This module supports numerous CRC polynomials, in addition to the most
* widely used CRC-32-IEEE. See @ref AVCRCId for a list of available
* polynomials.
*
* @{
*/
typedef uint32_t AVCRC;
typedef enum {
AV_CRC_8_ATM,
AV_CRC_16_ANSI,
AV_CRC_16_CCITT,
AV_CRC_32_IEEE,
AV_CRC_32_IEEE_LE, /*< reversed bitorder version of AV_CRC_32_IEEE */
AV_CRC_16_ANSI_LE, /*< reversed bitorder version of AV_CRC_16_ANSI */
AV_CRC_24_IEEE,
AV_CRC_8_EBU,
AV_CRC_MAX, /*< Not part of public API! Do not use outside libavutil. */
}AVCRCId;
/**
* Initialize a CRC table.
* @param ctx must be an array of size sizeof(AVCRC)*257 or sizeof(AVCRC)*1024
* @param le If 1, the lowest bit represents the coefficient for the highest
* exponent of the corresponding polynomial (both for poly and
* actual CRC).
* If 0, you must swap the CRC parameter and the result of av_crc
* if you need the standard representation (can be simplified in
* most cases to e.g. bswap16):
* av_bswap32(crc << (32-bits))
* @param bits number of bits for the CRC
* @param poly generator polynomial without the x**bits coefficient, in the
* representation as specified by le
* @param ctx_size size of ctx in bytes
* @return <0 on failure
*/
int av_crc_init(AVCRC *ctx, int le, int bits, uint32_t poly, int ctx_size);
/**
* Get an initialized standard CRC table.
* @param crc_id ID of a standard CRC
* @return a pointer to the CRC table or NULL on failure
*/
const AVCRC *av_crc_get_table(AVCRCId crc_id);
/**
* Calculate the CRC of a block.
* @param crc CRC of previous blocks if any or initial value for CRC
* @return CRC updated with the data from the given block
*
* @see av_crc_init() "le" parameter
*/
uint32_t av_crc(const AVCRC *ctx, uint32_t crc,
const uint8_t *buffer, size_t length) av_pure;
/**
* @}
*/
#endif /* AVUTIL_CRC_H */

View file

@ -1,77 +0,0 @@
/*
* DES encryption/decryption
* Copyright (c) 2007 Reimar Doeffinger
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_DES_H
#define AVUTIL_DES_H
#include <stdint.h>
/**
* @defgroup lavu_des DES
* @ingroup lavu_crypto
* @{
*/
typedef struct AVDES {
uint64_t round_keys[3][16];
int triple_des;
} AVDES;
/**
* Allocate an AVDES context.
*/
AVDES *av_des_alloc(void);
/**
* @brief Initializes an AVDES context.
*
* @param key_bits must be 64 or 192
* @param decrypt 0 for encryption/CBC-MAC, 1 for decryption
* @return zero on success, negative value otherwise
*/
int av_des_init(struct AVDES *d, const uint8_t *key, int key_bits, int decrypt);
/**
* @brief Encrypts / decrypts using the DES algorithm.
*
* @param count number of 8 byte blocks
* @param dst destination array, can be equal to src, must be 8-byte aligned
* @param src source array, can be equal to dst, must be 8-byte aligned, may be NULL
* @param iv initialization vector for CBC mode, if NULL then ECB will be used,
* must be 8-byte aligned
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_des_crypt(struct AVDES *d, uint8_t *dst, const uint8_t *src, int count, uint8_t *iv, int decrypt);
/**
* @brief Calculates CBC-MAC using the DES algorithm.
*
* @param count number of 8 byte blocks
* @param dst destination array, can be equal to src, must be 8-byte aligned
* @param src source array, can be equal to dst, must be 8-byte aligned, may be NULL
*/
void av_des_mac(struct AVDES *d, uint8_t *dst, const uint8_t *src, int count);
/**
* @}
*/
#endif /* AVUTIL_DES_H */

View file

@ -1,200 +0,0 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Public dictionary API.
* @deprecated
* AVDictionary is provided for compatibility with libav. It is both in
* implementation as well as API inefficient. It does not scale and is
* extremely slow with large dictionaries.
* It is recommended that new code uses our tree container from tree.c/h
* where applicable, which uses AVL trees to achieve O(log n) performance.
*/
#ifndef AVUTIL_DICT_H
#define AVUTIL_DICT_H
#include <stdint.h>
#include "version.h"
/**
* @addtogroup lavu_dict AVDictionary
* @ingroup lavu_data
*
* @brief Simple key:value store
*
* @{
* Dictionaries are used for storing key:value pairs. To create
* an AVDictionary, simply pass an address of a NULL pointer to
* av_dict_set(). NULL can be used as an empty dictionary wherever
* a pointer to an AVDictionary is required.
* Use av_dict_get() to retrieve an entry or iterate over all
* entries and finally av_dict_free() to free the dictionary
* and all its contents.
*
@code
AVDictionary *d = NULL; // "create" an empty dictionary
AVDictionaryEntry *t = NULL;
av_dict_set(&d, "foo", "bar", 0); // add an entry
char *k = av_strdup("key"); // if your strings are already allocated,
char *v = av_strdup("value"); // you can avoid copying them like this
av_dict_set(&d, k, v, AV_DICT_DONT_STRDUP_KEY | AV_DICT_DONT_STRDUP_VAL);
while (t = av_dict_get(d, "", t, AV_DICT_IGNORE_SUFFIX)) {
<....> // iterate over all entries in d
}
av_dict_free(&d);
@endcode
*/
#define AV_DICT_MATCH_CASE 1 /**< Only get an entry with exact-case key match. Only relevant in av_dict_get(). */
#define AV_DICT_IGNORE_SUFFIX 2 /**< Return first entry in a dictionary whose first part corresponds to the search key,
ignoring the suffix of the found key string. Only relevant in av_dict_get(). */
#define AV_DICT_DONT_STRDUP_KEY 4 /**< Take ownership of a key that's been
allocated with av_malloc() or another memory allocation function. */
#define AV_DICT_DONT_STRDUP_VAL 8 /**< Take ownership of a value that's been
allocated with av_malloc() or another memory allocation function. */
#define AV_DICT_DONT_OVERWRITE 16 ///< Don't overwrite existing entries.
#define AV_DICT_APPEND 32 /**< If the entry already exists, append to it. Note that no
delimiter is added, the strings are simply concatenated. */
#define AV_DICT_MULTIKEY 64 /**< Allow to store several equal keys in the dictionary */
typedef struct AVDictionaryEntry {
char *key;
char *value;
} AVDictionaryEntry;
typedef struct AVDictionary AVDictionary;
/**
* Get a dictionary entry with matching key.
*
* The returned entry key or value must not be changed, or it will
* cause undefined behavior.
*
* To iterate through all the dictionary entries, you can set the matching key
* to the null string "" and set the AV_DICT_IGNORE_SUFFIX flag.
*
* @param prev Set to the previous matching element to find the next.
* If set to NULL the first matching element is returned.
* @param key matching key
* @param flags a collection of AV_DICT_* flags controlling how the entry is retrieved
* @return found entry or NULL in case no matching entry was found in the dictionary
*/
AVDictionaryEntry *av_dict_get(const AVDictionary *m, const char *key,
const AVDictionaryEntry *prev, int flags);
/**
* Get number of entries in dictionary.
*
* @param m dictionary
* @return number of entries in dictionary
*/
int av_dict_count(const AVDictionary *m);
/**
* Set the given entry in *pm, overwriting an existing entry.
*
* Note: If AV_DICT_DONT_STRDUP_KEY or AV_DICT_DONT_STRDUP_VAL is set,
* these arguments will be freed on error.
*
* Warning: Adding a new entry to a dictionary invalidates all existing entries
* previously returned with av_dict_get.
*
* @param pm pointer to a pointer to a dictionary struct. If *pm is NULL
* a dictionary struct is allocated and put in *pm.
* @param key entry key to add to *pm (will either be av_strduped or added as a new key depending on flags)
* @param value entry value to add to *pm (will be av_strduped or added as a new key depending on flags).
* Passing a NULL value will cause an existing entry to be deleted.
* @return >= 0 on success otherwise an error code <0
*/
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags);
/**
* Convenience wrapper for av_dict_set that converts the value to a string
* and stores it.
*
* Note: If AV_DICT_DONT_STRDUP_KEY is set, key will be freed on error.
*/
int av_dict_set_int(AVDictionary **pm, const char *key, int64_t value, int flags);
/**
* Parse the key/value pairs list and add the parsed entries to a dictionary.
*
* In case of failure, all the successfully set entries are stored in
* *pm. You may need to manually free the created dictionary.
*
* @param key_val_sep a 0-terminated list of characters used to separate
* key from value
* @param pairs_sep a 0-terminated list of characters used to separate
* two pairs from each other
* @param flags flags to use when adding to dictionary.
* AV_DICT_DONT_STRDUP_KEY and AV_DICT_DONT_STRDUP_VAL
* are ignored since the key/value tokens will always
* be duplicated.
* @return 0 on success, negative AVERROR code on failure
*/
int av_dict_parse_string(AVDictionary **pm, const char *str,
const char *key_val_sep, const char *pairs_sep,
int flags);
/**
* Copy entries from one AVDictionary struct into another.
* @param dst pointer to a pointer to a AVDictionary struct. If *dst is NULL,
* this function will allocate a struct for you and put it in *dst
* @param src pointer to source AVDictionary struct
* @param flags flags to use when setting entries in *dst
* @note metadata is read using the AV_DICT_IGNORE_SUFFIX flag
* @return 0 on success, negative AVERROR code on failure. If dst was allocated
* by this function, callers should free the associated memory.
*/
int av_dict_copy(AVDictionary **dst, const AVDictionary *src, int flags);
/**
* Free all the memory allocated for an AVDictionary struct
* and all keys and values.
*/
void av_dict_free(AVDictionary **m);
/**
* Get dictionary entries as a string.
*
* Create a string containing dictionary's entries.
* Such string may be passed back to av_dict_parse_string().
* @note String is escaped with backslashes ('\').
*
* @param[in] m dictionary
* @param[out] buffer Pointer to buffer that will be allocated with string containg entries.
* Buffer must be freed by the caller when is no longer needed.
* @param[in] key_val_sep character used to separate key from value
* @param[in] pairs_sep character used to separate two pairs from each other
* @return >= 0 on success, negative on error
* @warning Separators cannot be neither '\\' nor '\0'. They also cannot be the same.
*/
int av_dict_get_string(const AVDictionary *m, char **buffer,
const char key_val_sep, const char pairs_sep);
/**
* @}
*/
#endif /* AVUTIL_DICT_H */

View file

@ -1,114 +0,0 @@
/*
* Copyright (c) 2014 Vittorio Giovara <vittorio.giovara@gmail.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Display matrix
*/
#ifndef AVUTIL_DISPLAY_H
#define AVUTIL_DISPLAY_H
#include <stdint.h>
#include "common.h"
/**
* @addtogroup lavu_video
* @{
*
* @defgroup lavu_video_display Display transformation matrix functions
* @{
*/
/**
* @addtogroup lavu_video_display
* The display transformation matrix specifies an affine transformation that
* should be applied to video frames for correct presentation. It is compatible
* with the matrices stored in the ISO/IEC 14496-12 container format.
*
* The data is a 3x3 matrix represented as a 9-element array:
*
* @code{.unparsed}
* | a b u |
* (a, b, u, c, d, v, x, y, w) -> | c d v |
* | x y w |
* @endcode
*
* All numbers are stored in native endianness, as 16.16 fixed-point values,
* except for u, v and w, which are stored as 2.30 fixed-point values.
*
* The transformation maps a point (p, q) in the source (pre-transformation)
* frame to the point (p', q') in the destination (post-transformation) frame as
* follows:
*
* @code{.unparsed}
* | a b u |
* (p, q, 1) . | c d v | = z * (p', q', 1)
* | x y w |
* @endcode
*
* The transformation can also be more explicitly written in components as
* follows:
*
* @code{.unparsed}
* p' = (a * p + c * q + x) / z;
* q' = (b * p + d * q + y) / z;
* z = u * p + v * q + w
* @endcode
*/
/**
* Extract the rotation component of the transformation matrix.
*
* @param matrix the transformation matrix
* @return the angle (in degrees) by which the transformation rotates the frame
* counterclockwise. The angle will be in range [-180.0, 180.0],
* or NaN if the matrix is singular.
*
* @note floating point numbers are inherently inexact, so callers are
* recommended to round the return value to nearest integer before use.
*/
double av_display_rotation_get(const int32_t matrix[9]);
/**
* Initialize a transformation matrix describing a pure counterclockwise
* rotation by the specified angle (in degrees).
*
* @param matrix an allocated transformation matrix (will be fully overwritten
* by this function)
* @param angle rotation angle in degrees.
*/
void av_display_rotation_set(int32_t matrix[9], double angle);
/**
* Flip the input matrix horizontally and/or vertically.
*
* @param matrix an allocated transformation matrix
* @param hflip whether the matrix should be flipped horizontally
* @param vflip whether the matrix should be flipped vertically
*/
void av_display_matrix_flip(int32_t matrix[9], int hflip, int vflip);
/**
* @}
* @}
*/
#endif /* AVUTIL_DISPLAY_H */

View file

@ -1,115 +0,0 @@
/*
* Copyright (c) 2014 Tim Walker <tdskywalker@gmail.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_DOWNMIX_INFO_H
#define AVUTIL_DOWNMIX_INFO_H
#include "frame.h"
/**
* @file
* audio downmix medatata
*/
/**
* @addtogroup lavu_audio
* @{
*/
/**
* @defgroup downmix_info Audio downmix metadata
* @{
*/
/**
* Possible downmix types.
*/
enum AVDownmixType {
AV_DOWNMIX_TYPE_UNKNOWN, /**< Not indicated. */
AV_DOWNMIX_TYPE_LORO, /**< Lo/Ro 2-channel downmix (Stereo). */
AV_DOWNMIX_TYPE_LTRT, /**< Lt/Rt 2-channel downmix, Dolby Surround compatible. */
AV_DOWNMIX_TYPE_DPLII, /**< Lt/Rt 2-channel downmix, Dolby Pro Logic II compatible. */
AV_DOWNMIX_TYPE_NB /**< Number of downmix types. Not part of ABI. */
};
/**
* This structure describes optional metadata relevant to a downmix procedure.
*
* All fields are set by the decoder to the value indicated in the audio
* bitstream (if present), or to a "sane" default otherwise.
*/
typedef struct AVDownmixInfo {
/**
* Type of downmix preferred by the mastering engineer.
*/
enum AVDownmixType preferred_downmix_type;
/**
* Absolute scale factor representing the nominal level of the center
* channel during a regular downmix.
*/
double center_mix_level;
/**
* Absolute scale factor representing the nominal level of the center
* channel during an Lt/Rt compatible downmix.
*/
double center_mix_level_ltrt;
/**
* Absolute scale factor representing the nominal level of the surround
* channels during a regular downmix.
*/
double surround_mix_level;
/**
* Absolute scale factor representing the nominal level of the surround
* channels during an Lt/Rt compatible downmix.
*/
double surround_mix_level_ltrt;
/**
* Absolute scale factor representing the level at which the LFE data is
* mixed into L/R channels during downmixing.
*/
double lfe_mix_level;
} AVDownmixInfo;
/**
* Get a frame's AV_FRAME_DATA_DOWNMIX_INFO side data for editing.
*
* If the side data is absent, it is created and added to the frame.
*
* @param frame the frame for which the side data is to be obtained or created
*
* @return the AVDownmixInfo structure to be edited by the caller, or NULL if
* the structure cannot be allocated.
*/
AVDownmixInfo *av_downmix_info_update_side_data(AVFrame *frame);
/**
* @}
*/
/**
* @}
*/
#endif /* AVUTIL_DOWNMIX_INFO_H */

View file

@ -1,205 +0,0 @@
/**
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_ENCRYPTION_INFO_H
#define AVUTIL_ENCRYPTION_INFO_H
#include <stddef.h>
#include <stdint.h>
typedef struct AVSubsampleEncryptionInfo {
/** The number of bytes that are clear. */
unsigned int bytes_of_clear_data;
/**
* The number of bytes that are protected. If using pattern encryption,
* the pattern applies to only the protected bytes; if not using pattern
* encryption, all these bytes are encrypted.
*/
unsigned int bytes_of_protected_data;
} AVSubsampleEncryptionInfo;
/**
* This describes encryption info for a packet. This contains frame-specific
* info for how to decrypt the packet before passing it to the decoder.
*
* The size of this struct is not part of the public ABI.
*/
typedef struct AVEncryptionInfo {
/** The fourcc encryption scheme, in big-endian byte order. */
uint32_t scheme;
/**
* Only used for pattern encryption. This is the number of 16-byte blocks
* that are encrypted.
*/
uint32_t crypt_byte_block;
/**
* Only used for pattern encryption. This is the number of 16-byte blocks
* that are clear.
*/
uint32_t skip_byte_block;
/**
* The ID of the key used to encrypt the packet. This should always be
* 16 bytes long, but may be changed in the future.
*/
uint8_t *key_id;
uint32_t key_id_size;
/**
* The initialization vector. This may have been zero-filled to be the
* correct block size. This should always be 16 bytes long, but may be
* changed in the future.
*/
uint8_t *iv;
uint32_t iv_size;
/**
* An array of subsample encryption info specifying how parts of the sample
* are encrypted. If there are no subsamples, then the whole sample is
* encrypted.
*/
AVSubsampleEncryptionInfo *subsamples;
uint32_t subsample_count;
} AVEncryptionInfo;
/**
* This describes info used to initialize an encryption key system.
*
* The size of this struct is not part of the public ABI.
*/
typedef struct AVEncryptionInitInfo {
/**
* A unique identifier for the key system this is for, can be NULL if it
* is not known. This should always be 16 bytes, but may change in the
* future.
*/
uint8_t* system_id;
uint32_t system_id_size;
/**
* An array of key IDs this initialization data is for. All IDs are the
* same length. Can be NULL if there are no known key IDs.
*/
uint8_t** key_ids;
/** The number of key IDs. */
uint32_t num_key_ids;
/**
* The number of bytes in each key ID. This should always be 16, but may
* change in the future.
*/
uint32_t key_id_size;
/**
* Key-system specific initialization data. This data is copied directly
* from the file and the format depends on the specific key system. This
* can be NULL if there is no initialization data; in that case, there
* will be at least one key ID.
*/
uint8_t* data;
uint32_t data_size;
/**
* An optional pointer to the next initialization info in the list.
*/
struct AVEncryptionInitInfo *next;
} AVEncryptionInitInfo;
/**
* Allocates an AVEncryptionInfo structure and sub-pointers to hold the given
* number of subsamples. This will allocate pointers for the key ID, IV,
* and subsample entries, set the size members, and zero-initialize the rest.
*
* @param subsample_count The number of subsamples.
* @param key_id_size The number of bytes in the key ID, should be 16.
* @param iv_size The number of bytes in the IV, should be 16.
*
* @return The new AVEncryptionInfo structure, or NULL on error.
*/
AVEncryptionInfo *av_encryption_info_alloc(uint32_t subsample_count, uint32_t key_id_size, uint32_t iv_size);
/**
* Allocates an AVEncryptionInfo structure with a copy of the given data.
* @return The new AVEncryptionInfo structure, or NULL on error.
*/
AVEncryptionInfo *av_encryption_info_clone(const AVEncryptionInfo *info);
/**
* Frees the given encryption info object. This MUST NOT be used to free the
* side-data data pointer, that should use normal side-data methods.
*/
void av_encryption_info_free(AVEncryptionInfo *info);
/**
* Creates a copy of the AVEncryptionInfo that is contained in the given side
* data. The resulting object should be passed to av_encryption_info_free()
* when done.
*
* @return The new AVEncryptionInfo structure, or NULL on error.
*/
AVEncryptionInfo *av_encryption_info_get_side_data(const uint8_t *side_data, size_t side_data_size);
/**
* Allocates and initializes side data that holds a copy of the given encryption
* info. The resulting pointer should be either freed using av_free or given
* to av_packet_add_side_data().
*
* @return The new side-data pointer, or NULL.
*/
uint8_t *av_encryption_info_add_side_data(
const AVEncryptionInfo *info, size_t *side_data_size);
/**
* Allocates an AVEncryptionInitInfo structure and sub-pointers to hold the
* given sizes. This will allocate pointers and set all the fields.
*
* @return The new AVEncryptionInitInfo structure, or NULL on error.
*/
AVEncryptionInitInfo *av_encryption_init_info_alloc(
uint32_t system_id_size, uint32_t num_key_ids, uint32_t key_id_size, uint32_t data_size);
/**
* Frees the given encryption init info object. This MUST NOT be used to free
* the side-data data pointer, that should use normal side-data methods.
*/
void av_encryption_init_info_free(AVEncryptionInitInfo* info);
/**
* Creates a copy of the AVEncryptionInitInfo that is contained in the given
* side data. The resulting object should be passed to
* av_encryption_init_info_free() when done.
*
* @return The new AVEncryptionInitInfo structure, or NULL on error.
*/
AVEncryptionInitInfo *av_encryption_init_info_get_side_data(
const uint8_t* side_data, size_t side_data_size);
/**
* Allocates and initializes side data that holds a copy of the given encryption
* init info. The resulting pointer should be either freed using av_free or
* given to av_packet_add_side_data().
*
* @return The new side-data pointer, or NULL.
*/
uint8_t *av_encryption_init_info_add_side_data(
const AVEncryptionInitInfo *info, size_t *side_data_size);
#endif /* AVUTIL_ENCRYPTION_INFO_H */

Some files were not shown because too many files have changed in this diff Show more