mirror of
https://github.com/Genymobile/scrcpy.git
synced 2025-07-13 12:31:39 +00:00
Add audio playback capture method
Add a new method to capture audio playback. It requires Android 13 (where the Shell app has MODIFY_AUDIO_ROUTING permission). The main benefit is that it supports keeping audio playing on the device (implemented in a further commit). Fixes #4380 <https://github.com/Genymobile/scrcpy/issues/4380> PR #5102 <https://github.com/Genymobile/scrcpy/pull/5102> Co-authored-by: Simon Chan <1330321+yume-chan@users.noreply.github.com>
This commit is contained in:
parent
53c6eb66ea
commit
a10f8cd798
9 changed files with 182 additions and 9 deletions
|
@ -111,7 +111,7 @@ _scrcpy() {
|
||||||
return
|
return
|
||||||
;;
|
;;
|
||||||
--audio-source)
|
--audio-source)
|
||||||
COMPREPLY=($(compgen -W 'output mic' -- "$cur"))
|
COMPREPLY=($(compgen -W 'output mic playback' -- "$cur"))
|
||||||
return
|
return
|
||||||
;;
|
;;
|
||||||
--camera-facing)
|
--camera-facing)
|
||||||
|
|
|
@ -14,7 +14,7 @@ arguments=(
|
||||||
'--audio-codec=[Select the audio codec]:codec:(opus aac flac raw)'
|
'--audio-codec=[Select the audio codec]:codec:(opus aac flac raw)'
|
||||||
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
|
'--audio-codec-options=[Set a list of comma-separated key\:type=value options for the device audio encoder]'
|
||||||
'--audio-encoder=[Use a specific MediaCodec audio encoder]'
|
'--audio-encoder=[Use a specific MediaCodec audio encoder]'
|
||||||
'--audio-source=[Select the audio source]:source:(output mic)'
|
'--audio-source=[Select the audio source]:source:(output mic playback)'
|
||||||
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
|
'--audio-output-buffer=[Configure the size of the SDL audio output buffer (in milliseconds)]'
|
||||||
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
|
{-b,--video-bit-rate=}'[Encode the video at the given bit-rate]'
|
||||||
'--camera-ar=[Select the camera size by its aspect ratio]'
|
'--camera-ar=[Select the camera size by its aspect ratio]'
|
||||||
|
|
|
@ -57,7 +57,13 @@ The available encoders can be listed by \fB\-\-list\-encoders\fR.
|
||||||
|
|
||||||
.TP
|
.TP
|
||||||
.BI "\-\-audio\-source " source
|
.BI "\-\-audio\-source " source
|
||||||
Select the audio source (output or mic).
|
Select the audio source (output, mic or playback).
|
||||||
|
|
||||||
|
The "output" source forwards the whole audio output, and disables playback on the device.
|
||||||
|
|
||||||
|
The "playback" source captures the audio playback (Android apps can opt-out, so the whole output is not necessarily captured).
|
||||||
|
|
||||||
|
The "mic" source captures the microphone.
|
||||||
|
|
||||||
Default is output.
|
Default is output.
|
||||||
|
|
||||||
|
|
|
@ -189,7 +189,13 @@ static const struct sc_option options[] = {
|
||||||
.longopt_id = OPT_AUDIO_SOURCE,
|
.longopt_id = OPT_AUDIO_SOURCE,
|
||||||
.longopt = "audio-source",
|
.longopt = "audio-source",
|
||||||
.argdesc = "source",
|
.argdesc = "source",
|
||||||
.text = "Select the audio source (output or mic).\n"
|
.text = "Select the audio source (output, mic or playback).\n"
|
||||||
|
"The \"output\" source forwards the whole audio output, and "
|
||||||
|
"disables playback on the device.\n"
|
||||||
|
"The \"playback\" source captures the audio playback (Android "
|
||||||
|
"apps can opt-out, so the whole output is not necessarily "
|
||||||
|
"captured).\n"
|
||||||
|
"The \"mic\" source captures the microphone.\n"
|
||||||
"Default is output.",
|
"Default is output.",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -1931,7 +1937,13 @@ parse_audio_source(const char *optarg, enum sc_audio_source *source) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
LOGE("Unsupported audio source: %s (expected output or mic)", optarg);
|
if (!strcmp(optarg, "playback")) {
|
||||||
|
*source = SC_AUDIO_SOURCE_PLAYBACK;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
LOGE("Unsupported audio source: %s (expected output, mic or playback)",
|
||||||
|
optarg);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -59,6 +59,7 @@ enum sc_audio_source {
|
||||||
SC_AUDIO_SOURCE_AUTO, // OUTPUT for video DISPLAY, MIC for video CAMERA
|
SC_AUDIO_SOURCE_AUTO, // OUTPUT for video DISPLAY, MIC for video CAMERA
|
||||||
SC_AUDIO_SOURCE_OUTPUT,
|
SC_AUDIO_SOURCE_OUTPUT,
|
||||||
SC_AUDIO_SOURCE_MIC,
|
SC_AUDIO_SOURCE_MIC,
|
||||||
|
SC_AUDIO_SOURCE_PLAYBACK,
|
||||||
};
|
};
|
||||||
|
|
||||||
enum sc_camera_facing {
|
enum sc_camera_facing {
|
||||||
|
|
|
@ -203,6 +203,21 @@ sc_server_get_camera_facing_name(enum sc_camera_facing camera_facing) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static const char *
|
||||||
|
sc_server_get_audio_source_name(enum sc_audio_source audio_source) {
|
||||||
|
switch (audio_source) {
|
||||||
|
case SC_AUDIO_SOURCE_OUTPUT:
|
||||||
|
return "output";
|
||||||
|
case SC_AUDIO_SOURCE_MIC:
|
||||||
|
return "mic";
|
||||||
|
case SC_AUDIO_SOURCE_PLAYBACK:
|
||||||
|
return "playback";
|
||||||
|
default:
|
||||||
|
assert(!"unexpected audio source");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static sc_pid
|
static sc_pid
|
||||||
execute_server(struct sc_server *server,
|
execute_server(struct sc_server *server,
|
||||||
const struct sc_server_params *params) {
|
const struct sc_server_params *params) {
|
||||||
|
@ -273,8 +288,9 @@ execute_server(struct sc_server *server,
|
||||||
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
|
assert(params->video_source == SC_VIDEO_SOURCE_CAMERA);
|
||||||
ADD_PARAM("video_source=camera");
|
ADD_PARAM("video_source=camera");
|
||||||
}
|
}
|
||||||
if (params->audio_source == SC_AUDIO_SOURCE_MIC) {
|
if (params->audio_source != SC_AUDIO_SOURCE_OUTPUT) {
|
||||||
ADD_PARAM("audio_source=mic");
|
ADD_PARAM("audio_source=%s",
|
||||||
|
sc_server_get_audio_source_name(params->audio_source));
|
||||||
}
|
}
|
||||||
if (params->max_size) {
|
if (params->max_size) {
|
||||||
ADD_PARAM("max_size=%" PRIu16, params->max_size);
|
ADD_PARAM("max_size=%" PRIu16, params->max_size);
|
||||||
|
|
|
@ -4,7 +4,9 @@ import com.genymobile.scrcpy.audio.AudioCapture;
|
||||||
import com.genymobile.scrcpy.audio.AudioCodec;
|
import com.genymobile.scrcpy.audio.AudioCodec;
|
||||||
import com.genymobile.scrcpy.audio.AudioDirectCapture;
|
import com.genymobile.scrcpy.audio.AudioDirectCapture;
|
||||||
import com.genymobile.scrcpy.audio.AudioEncoder;
|
import com.genymobile.scrcpy.audio.AudioEncoder;
|
||||||
|
import com.genymobile.scrcpy.audio.AudioPlaybackCapture;
|
||||||
import com.genymobile.scrcpy.audio.AudioRawRecorder;
|
import com.genymobile.scrcpy.audio.AudioRawRecorder;
|
||||||
|
import com.genymobile.scrcpy.audio.AudioSource;
|
||||||
import com.genymobile.scrcpy.control.ControlChannel;
|
import com.genymobile.scrcpy.control.ControlChannel;
|
||||||
import com.genymobile.scrcpy.control.Controller;
|
import com.genymobile.scrcpy.control.Controller;
|
||||||
import com.genymobile.scrcpy.control.DeviceMessage;
|
import com.genymobile.scrcpy.control.DeviceMessage;
|
||||||
|
@ -164,7 +166,8 @@ public final class Server {
|
||||||
|
|
||||||
if (audio) {
|
if (audio) {
|
||||||
AudioCodec audioCodec = options.getAudioCodec();
|
AudioCodec audioCodec = options.getAudioCodec();
|
||||||
AudioCapture audioCapture = new AudioDirectCapture(options.getAudioSource());
|
AudioSource audioSource = options.getAudioSource();
|
||||||
|
AudioCapture audioCapture = audioSource.isDirect() ? new AudioDirectCapture(audioSource) : new AudioPlaybackCapture();
|
||||||
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(), options.getSendFrameMeta());
|
Streamer audioStreamer = new Streamer(connection.getAudioFd(), audioCodec, options.getSendCodecMeta(), options.getSendFrameMeta());
|
||||||
AsyncProcessor audioRecorder;
|
AsyncProcessor audioRecorder;
|
||||||
if (audioCodec == AudioCodec.RAW) {
|
if (audioCodec == AudioCodec.RAW) {
|
||||||
|
|
|
@ -0,0 +1,130 @@
|
||||||
|
package com.genymobile.scrcpy.audio;
|
||||||
|
|
||||||
|
import com.genymobile.scrcpy.FakeContext;
|
||||||
|
import com.genymobile.scrcpy.util.Ln;
|
||||||
|
|
||||||
|
import android.annotation.SuppressLint;
|
||||||
|
import android.annotation.TargetApi;
|
||||||
|
import android.content.Context;
|
||||||
|
import android.media.AudioAttributes;
|
||||||
|
import android.media.AudioFormat;
|
||||||
|
import android.media.AudioManager;
|
||||||
|
import android.media.AudioRecord;
|
||||||
|
import android.media.MediaCodec;
|
||||||
|
import android.os.Build;
|
||||||
|
|
||||||
|
import java.lang.reflect.Method;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
public final class AudioPlaybackCapture implements AudioCapture {
|
||||||
|
|
||||||
|
private AudioRecord recorder;
|
||||||
|
private AudioRecordReader reader;
|
||||||
|
|
||||||
|
@SuppressLint("PrivateApi")
|
||||||
|
private AudioRecord createAudioRecord() throws AudioCaptureException {
|
||||||
|
// See <https://github.com/Genymobile/scrcpy/issues/4380>
|
||||||
|
try {
|
||||||
|
Class<?> audioMixingRuleClass = Class.forName("android.media.audiopolicy.AudioMixingRule");
|
||||||
|
Class<?> audioMixingRuleBuilderClass = Class.forName("android.media.audiopolicy.AudioMixingRule$Builder");
|
||||||
|
|
||||||
|
// AudioMixingRule.Builder audioMixingRuleBuilder = new AudioMixingRule.Builder();
|
||||||
|
Object audioMixingRuleBuilder = audioMixingRuleBuilderClass.getConstructor().newInstance();
|
||||||
|
|
||||||
|
// audioMixingRuleBuilder.setTargetMixRole(AudioMixingRule.MIX_ROLE_PLAYERS);
|
||||||
|
int mixRolePlayersConstant = audioMixingRuleClass.getField("MIX_ROLE_PLAYERS").getInt(null);
|
||||||
|
Method setTargetMixRoleMethod = audioMixingRuleBuilderClass.getMethod("setTargetMixRole", int.class);
|
||||||
|
setTargetMixRoleMethod.invoke(audioMixingRuleBuilder, mixRolePlayersConstant);
|
||||||
|
|
||||||
|
AudioAttributes attributes = new AudioAttributes.Builder().setUsage(AudioAttributes.USAGE_MEDIA).build();
|
||||||
|
|
||||||
|
// audioMixingRuleBuilder.addMixRule(AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE, attributes);
|
||||||
|
int ruleMatchAttributeUsageConstant = audioMixingRuleClass.getField("RULE_MATCH_ATTRIBUTE_USAGE").getInt(null);
|
||||||
|
Method addMixRuleMethod = audioMixingRuleBuilderClass.getMethod("addMixRule", int.class, Object.class);
|
||||||
|
addMixRuleMethod.invoke(audioMixingRuleBuilder, ruleMatchAttributeUsageConstant, attributes);
|
||||||
|
|
||||||
|
// AudioMixingRule audioMixingRule = builder.build();
|
||||||
|
Object audioMixingRule = audioMixingRuleBuilderClass.getMethod("build").invoke(audioMixingRuleBuilder);
|
||||||
|
|
||||||
|
// audioMixingRuleBuilder.voiceCommunicationCaptureAllowed(true);
|
||||||
|
Method voiceCommunicationCaptureAllowedMethod = audioMixingRuleBuilderClass.getMethod("voiceCommunicationCaptureAllowed", boolean.class);
|
||||||
|
voiceCommunicationCaptureAllowedMethod.invoke(audioMixingRuleBuilder, true);
|
||||||
|
|
||||||
|
Class<?> audioMixClass = Class.forName("android.media.audiopolicy.AudioMix");
|
||||||
|
Class<?> audioMixBuilderClass = Class.forName("android.media.audiopolicy.AudioMix$Builder");
|
||||||
|
|
||||||
|
// AudioMix.Builder audioMixBuilder = new AudioMix.Builder(audioMixingRule);
|
||||||
|
Object audioMixBuilder = audioMixBuilderClass.getConstructor(audioMixingRuleClass).newInstance(audioMixingRule);
|
||||||
|
|
||||||
|
// audioMixBuilder.setFormat(createAudioFormat());
|
||||||
|
Method setFormat = audioMixBuilder.getClass().getMethod("setFormat", AudioFormat.class);
|
||||||
|
setFormat.invoke(audioMixBuilder, AudioConfig.createAudioFormat());
|
||||||
|
|
||||||
|
int routeFlags = audioMixClass.getField("ROUTE_FLAG_LOOP_BACK").getInt(null);
|
||||||
|
|
||||||
|
// audioMixBuilder.setRouteFlags(routeFlag);
|
||||||
|
Method setRouteFlags = audioMixBuilder.getClass().getMethod("setRouteFlags", int.class);
|
||||||
|
setRouteFlags.invoke(audioMixBuilder, routeFlags);
|
||||||
|
|
||||||
|
// AudioMix audioMix = audioMixBuilder.build();
|
||||||
|
Object audioMix = audioMixBuilderClass.getMethod("build").invoke(audioMixBuilder);
|
||||||
|
|
||||||
|
Class<?> audioPolicyClass = Class.forName("android.media.audiopolicy.AudioPolicy");
|
||||||
|
Class<?> audioPolicyBuilderClass = Class.forName("android.media.audiopolicy.AudioPolicy$Builder");
|
||||||
|
|
||||||
|
// AudioPolicy.Builder audioPolicyBuilder = new AudioPolicy.Builder();
|
||||||
|
Object audioPolicyBuilder = audioPolicyBuilderClass.getConstructor(Context.class).newInstance(FakeContext.get());
|
||||||
|
|
||||||
|
// audioPolicyBuilder.addMix(audioMix);
|
||||||
|
Method addMixMethod = audioPolicyBuilderClass.getMethod("addMix", audioMixClass);
|
||||||
|
addMixMethod.invoke(audioPolicyBuilder, audioMix);
|
||||||
|
|
||||||
|
// AudioPolicy audioPolicy = audioPolicyBuilder.build();
|
||||||
|
Object audioPolicy = audioPolicyBuilderClass.getMethod("build").invoke(audioPolicyBuilder);
|
||||||
|
|
||||||
|
// AudioManager.registerAudioPolicyStatic(audioPolicy);
|
||||||
|
Method registerAudioPolicyStaticMethod = AudioManager.class.getDeclaredMethod("registerAudioPolicyStatic", audioPolicyClass);
|
||||||
|
registerAudioPolicyStaticMethod.setAccessible(true);
|
||||||
|
int result = (int) registerAudioPolicyStaticMethod.invoke(null, audioPolicy);
|
||||||
|
if (result != 0) {
|
||||||
|
throw new RuntimeException("registerAudioPolicy() returned " + result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// audioPolicy.createAudioRecordSink(audioPolicy);
|
||||||
|
Method createAudioRecordSinkClass = audioPolicyClass.getMethod("createAudioRecordSink", audioMixClass);
|
||||||
|
return (AudioRecord) createAudioRecordSinkClass.invoke(audioPolicy, audioMix);
|
||||||
|
} catch (Exception e) {
|
||||||
|
Ln.e("Could not capture audio playback", e);
|
||||||
|
throw new AudioCaptureException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkCompatibility() throws AudioCaptureException {
|
||||||
|
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU) {
|
||||||
|
Ln.w("Audio disabled: audio playback capture source not supported before Android 13");
|
||||||
|
throw new AudioCaptureException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void start() throws AudioCaptureException {
|
||||||
|
recorder = createAudioRecord();
|
||||||
|
recorder.startRecording();
|
||||||
|
reader = new AudioRecordReader(recorder);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stop() {
|
||||||
|
if (recorder != null) {
|
||||||
|
// Will call .stop() if necessary, without throwing an IllegalStateException
|
||||||
|
recorder.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
@TargetApi(Build.VERSION_CODES.N)
|
||||||
|
public int read(ByteBuffer outDirectBuffer, MediaCodec.BufferInfo outBufferInfo) {
|
||||||
|
return reader.read(outDirectBuffer, outBufferInfo);
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,7 +2,8 @@ package com.genymobile.scrcpy.audio;
|
||||||
|
|
||||||
public enum AudioSource {
|
public enum AudioSource {
|
||||||
OUTPUT("output"),
|
OUTPUT("output"),
|
||||||
MIC("mic");
|
MIC("mic"),
|
||||||
|
PLAYBACK("playback");
|
||||||
|
|
||||||
private final String name;
|
private final String name;
|
||||||
|
|
||||||
|
@ -10,6 +11,10 @@ public enum AudioSource {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isDirect() {
|
||||||
|
return this != PLAYBACK;
|
||||||
|
}
|
||||||
|
|
||||||
public static AudioSource findByName(String name) {
|
public static AudioSource findByName(String name) {
|
||||||
for (AudioSource audioSource : AudioSource.values()) {
|
for (AudioSource audioSource : AudioSource.values()) {
|
||||||
if (name.equals(audioSource.name)) {
|
if (name.equals(audioSource.name)) {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue