Merge branch 'master' of gitlab.futo.org:videostreaming/grayjay

This commit is contained in:
Kelvin 2025-07-07 16:18:35 +02:00
commit b9239b6177
2 changed files with 134 additions and 124 deletions

View file

@ -438,130 +438,108 @@ class StateCasting {
_castId.incrementAndGet() _castId.incrementAndGet()
} }
fun castIfAvailable(contentResolver: ContentResolver, video: IPlatformVideoDetails, videoSource: IVideoSource?, audioSource: IAudioSource?, subtitleSource: ISubtitleSource?, ms: Long = -1, speed: Double?, onLoadingEstimate: ((Int) -> Unit)? = null, onLoading: ((Boolean) -> Unit)? = null): Boolean { suspend fun castIfAvailable(contentResolver: ContentResolver, video: IPlatformVideoDetails, videoSource: IVideoSource?, audioSource: IAudioSource?, subtitleSource: ISubtitleSource?, ms: Long = -1, speed: Double?, onLoadingEstimate: ((Int) -> Unit)? = null, onLoading: ((Boolean) -> Unit)? = null): Boolean {
val ad = activeDevice ?: return false; return withContext(Dispatchers.IO) {
if (ad.connectionState != CastConnectionState.CONNECTED) { val ad = activeDevice ?: return@withContext false;
return false; if (ad.connectionState != CastConnectionState.CONNECTED) {
} return@withContext false;
}
val resumePosition = if (ms > 0L) (ms.toDouble() / 1000.0) else 0.0; val resumePosition = if (ms > 0L) (ms.toDouble() / 1000.0) else 0.0;
val castId = _castId.incrementAndGet() val castId = _castId.incrementAndGet()
var sourceCount = 0; var sourceCount = 0;
if (videoSource != null) sourceCount++; if (videoSource != null) sourceCount++;
if (audioSource != null) sourceCount++; if (audioSource != null) sourceCount++;
if (subtitleSource != null) sourceCount++; if (subtitleSource != null) sourceCount++;
if (sourceCount < 1) { if (sourceCount < 1) {
throw Exception("At least one source should be specified."); throw Exception("At least one source should be specified.");
} }
if (sourceCount > 1) { if (sourceCount > 1) {
if (videoSource is LocalVideoSource || audioSource is LocalAudioSource || subtitleSource is LocalSubtitleSource) { if (videoSource is LocalVideoSource || audioSource is LocalAudioSource || subtitleSource is LocalSubtitleSource) {
if (ad is AirPlayCastingDevice) { if (ad is AirPlayCastingDevice) {
Logger.i(TAG, "Casting as local HLS"); Logger.i(TAG, "Casting as local HLS");
castLocalHls(video, videoSource as LocalVideoSource?, audioSource as LocalAudioSource?, subtitleSource as LocalSubtitleSource?, resumePosition, speed); castLocalHls(video, videoSource as LocalVideoSource?, audioSource as LocalAudioSource?, subtitleSource as LocalSubtitleSource?, resumePosition, speed);
} else {
Logger.i(TAG, "Casting as local DASH");
castLocalDash(video, videoSource as LocalVideoSource?, audioSource as LocalAudioSource?, subtitleSource as LocalSubtitleSource?, resumePosition, speed);
}
} else { } else {
Logger.i(TAG, "Casting as local DASH"); val isRawDash = videoSource is JSDashManifestRawSource || audioSource is JSDashManifestRawAudioSource
castLocalDash(video, videoSource as LocalVideoSource?, audioSource as LocalAudioSource?, subtitleSource as LocalSubtitleSource?, resumePosition, speed); if (isRawDash) {
} Logger.i(TAG, "Casting as raw DASH");
} else {
StateApp.instance.scope.launch(Dispatchers.IO) {
try {
val isRawDash = videoSource is JSDashManifestRawSource || audioSource is JSDashManifestRawAudioSource
if (isRawDash) {
Logger.i(TAG, "Casting as raw DASH");
try { castDashRaw(contentResolver, video, videoSource as JSDashManifestRawSource?, audioSource as JSDashManifestRawAudioSource?, subtitleSource, resumePosition, speed, castId, onLoadingEstimate, onLoading);
castDashRaw(contentResolver, video, videoSource as JSDashManifestRawSource?, audioSource as JSDashManifestRawAudioSource?, subtitleSource, resumePosition, speed, castId, onLoadingEstimate, onLoading); } else {
} catch (e: Throwable) { if (ad is FCastCastingDevice) {
Logger.e(TAG, "Failed to start casting DASH raw videoSource=${videoSource} audioSource=${audioSource}.", e); Logger.i(TAG, "Casting as DASH direct");
} castDashDirect(contentResolver, video, videoSource as IVideoUrlSource?, audioSource as IAudioUrlSource?, subtitleSource, resumePosition, speed);
} else if (ad is AirPlayCastingDevice) {
Logger.i(TAG, "Casting as HLS indirect");
castHlsIndirect(contentResolver, video, videoSource as IVideoUrlSource?, audioSource as IAudioUrlSource?, subtitleSource, resumePosition, speed);
} else { } else {
if (ad is FCastCastingDevice) { Logger.i(TAG, "Casting as DASH indirect");
Logger.i(TAG, "Casting as DASH direct"); castDashIndirect(contentResolver, video, videoSource as IVideoUrlSource?, audioSource as IAudioUrlSource?, subtitleSource, resumePosition, speed);
castDashDirect(contentResolver, video, videoSource as IVideoUrlSource?, audioSource as IAudioUrlSource?, subtitleSource, resumePosition, speed);
} else if (ad is AirPlayCastingDevice) {
Logger.i(TAG, "Casting as HLS indirect");
castHlsIndirect(contentResolver, video, videoSource as IVideoUrlSource?, audioSource as IAudioUrlSource?, subtitleSource, resumePosition, speed);
} else {
Logger.i(TAG, "Casting as DASH indirect");
castDashIndirect(contentResolver, video, videoSource as IVideoUrlSource?, audioSource as IAudioUrlSource?, subtitleSource, resumePosition, speed);
}
} }
} catch (e: Throwable) {
Logger.e(TAG, "Failed to start casting DASH videoSource=${videoSource} audioSource=${audioSource}.", e);
}
}
}
} else {
val proxyStreams = Settings.instance.casting.alwaysProxyRequests;
val url = getLocalUrl(ad);
val id = UUID.randomUUID();
if (videoSource is IVideoUrlSource) {
val videoPath = "/video-${id}"
val videoUrl = if(proxyStreams) url + videoPath else videoSource.getVideoUrl();
Logger.i(TAG, "Casting as singular video");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", videoSource.container, videoUrl, resumePosition, video.duration.toDouble(), speed);
} else if (audioSource is IAudioUrlSource) {
val audioPath = "/audio-${id}"
val audioUrl = if(proxyStreams) url + audioPath else audioSource.getAudioUrl();
Logger.i(TAG, "Casting as singular audio");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", audioSource.container, audioUrl, resumePosition, video.duration.toDouble(), speed);
} else if(videoSource is IHLSManifestSource) {
if (proxyStreams || ad is ChromecastCastingDevice) {
Logger.i(TAG, "Casting as proxied HLS");
castProxiedHls(video, videoSource.url, videoSource.codec, resumePosition, speed);
} else {
Logger.i(TAG, "Casting as non-proxied HLS");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", videoSource.container, videoSource.url, resumePosition, video.duration.toDouble(), speed);
}
} else if(audioSource is IHLSManifestAudioSource) {
if (proxyStreams || ad is ChromecastCastingDevice) {
Logger.i(TAG, "Casting as proxied audio HLS");
castProxiedHls(video, audioSource.url, audioSource.codec, resumePosition, speed);
} else {
Logger.i(TAG, "Casting as non-proxied audio HLS");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", audioSource.container, audioSource.url, resumePosition, video.duration.toDouble(), speed);
}
} else if (videoSource is LocalVideoSource) {
Logger.i(TAG, "Casting as local video");
castLocalVideo(video, videoSource, resumePosition, speed);
} else if (audioSource is LocalAudioSource) {
Logger.i(TAG, "Casting as local audio");
castLocalAudio(video, audioSource, resumePosition, speed);
} else if (videoSource is JSDashManifestRawSource) {
Logger.i(TAG, "Casting as JSDashManifestRawSource video");
StateApp.instance.scope.launch(Dispatchers.IO) {
try {
castDashRaw(contentResolver, video, videoSource as JSDashManifestRawSource?, null, null, resumePosition, speed, castId, onLoadingEstimate, onLoading);
} catch (e: Throwable) {
Logger.e(TAG, "Failed to start casting DASH raw videoSource=${videoSource}.", e);
}
}
} else if (audioSource is JSDashManifestRawAudioSource) {
Logger.i(TAG, "Casting as JSDashManifestRawSource audio");
StateApp.instance.scope.launch(Dispatchers.IO) {
try {
castDashRaw(contentResolver, video, null, audioSource as JSDashManifestRawAudioSource?, null, resumePosition, speed, castId, onLoadingEstimate, onLoading);
} catch (e: Throwable) {
Logger.e(TAG, "Failed to start casting DASH raw audioSource=${audioSource}.", e);
} }
} }
} else { } else {
var str = listOf( val proxyStreams = Settings.instance.casting.alwaysProxyRequests;
if(videoSource != null) "Video: ${videoSource::class.java.simpleName}" else null, val url = getLocalUrl(ad);
if(audioSource != null) "Audio: ${audioSource::class.java.simpleName}" else null, val id = UUID.randomUUID();
if(subtitleSource != null) "Subtitles: ${subtitleSource::class.java.simpleName}" else null
).filterNotNull().joinToString(", ");
throw UnsupportedCastException(str);
}
}
return true; if (videoSource is IVideoUrlSource) {
val videoPath = "/video-${id}"
val videoUrl = if(proxyStreams) url + videoPath else videoSource.getVideoUrl();
Logger.i(TAG, "Casting as singular video");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", videoSource.container, videoUrl, resumePosition, video.duration.toDouble(), speed);
} else if (audioSource is IAudioUrlSource) {
val audioPath = "/audio-${id}"
val audioUrl = if(proxyStreams) url + audioPath else audioSource.getAudioUrl();
Logger.i(TAG, "Casting as singular audio");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", audioSource.container, audioUrl, resumePosition, video.duration.toDouble(), speed);
} else if(videoSource is IHLSManifestSource) {
if (proxyStreams || ad is ChromecastCastingDevice) {
Logger.i(TAG, "Casting as proxied HLS");
castProxiedHls(video, videoSource.url, videoSource.codec, resumePosition, speed);
} else {
Logger.i(TAG, "Casting as non-proxied HLS");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", videoSource.container, videoSource.url, resumePosition, video.duration.toDouble(), speed);
}
} else if(audioSource is IHLSManifestAudioSource) {
if (proxyStreams || ad is ChromecastCastingDevice) {
Logger.i(TAG, "Casting as proxied audio HLS");
castProxiedHls(video, audioSource.url, audioSource.codec, resumePosition, speed);
} else {
Logger.i(TAG, "Casting as non-proxied audio HLS");
ad.loadVideo(if (video.isLive) "LIVE" else "BUFFERED", audioSource.container, audioSource.url, resumePosition, video.duration.toDouble(), speed);
}
} else if (videoSource is LocalVideoSource) {
Logger.i(TAG, "Casting as local video");
castLocalVideo(video, videoSource, resumePosition, speed);
} else if (audioSource is LocalAudioSource) {
Logger.i(TAG, "Casting as local audio");
castLocalAudio(video, audioSource, resumePosition, speed);
} else if (videoSource is JSDashManifestRawSource) {
Logger.i(TAG, "Casting as JSDashManifestRawSource video");
castDashRaw(contentResolver, video, videoSource as JSDashManifestRawSource?, null, null, resumePosition, speed, castId, onLoadingEstimate, onLoading);
} else if (audioSource is JSDashManifestRawAudioSource) {
Logger.i(TAG, "Casting as JSDashManifestRawSource audio");
castDashRaw(contentResolver, video, null, audioSource as JSDashManifestRawAudioSource?, null, resumePosition, speed, castId, onLoadingEstimate, onLoading);
} else {
var str = listOf(
if(videoSource != null) "Video: ${videoSource::class.java.simpleName}" else null,
if(audioSource != null) "Audio: ${audioSource::class.java.simpleName}" else null,
if(subtitleSource != null) "Subtitles: ${subtitleSource::class.java.simpleName}" else null
).filterNotNull().joinToString(", ");
throw UnsupportedCastException(str);
}
}
return@withContext true;
}
} }
fun resumeVideo(): Boolean { fun resumeVideo(): Boolean {

View file

@ -4,6 +4,7 @@ import android.app.PictureInPictureParams
import android.app.RemoteAction import android.app.RemoteAction
import android.content.ClipData import android.content.ClipData
import android.content.ClipboardManager import android.content.ClipboardManager
import android.content.ContentResolver
import android.content.Context import android.content.Context
import android.content.Intent import android.content.Intent
import android.content.res.Configuration import android.content.res.Configuration
@ -79,7 +80,9 @@ import com.futo.platformplayer.api.media.models.video.IPlatformVideoDetails
import com.futo.platformplayer.api.media.models.video.SerializedPlatformVideo import com.futo.platformplayer.api.media.models.video.SerializedPlatformVideo
import com.futo.platformplayer.api.media.platforms.js.JSClient import com.futo.platformplayer.api.media.platforms.js.JSClient
import com.futo.platformplayer.api.media.platforms.js.SourcePluginConfig import com.futo.platformplayer.api.media.platforms.js.SourcePluginConfig
import com.futo.platformplayer.api.media.platforms.js.models.JSVideo
import com.futo.platformplayer.api.media.platforms.js.models.JSVideoDetails import com.futo.platformplayer.api.media.platforms.js.models.JSVideoDetails
import com.futo.platformplayer.api.media.platforms.js.models.sources.JSSource
import com.futo.platformplayer.api.media.structures.IPager import com.futo.platformplayer.api.media.structures.IPager
import com.futo.platformplayer.casting.CastConnectionState import com.futo.platformplayer.casting.CastConnectionState
import com.futo.platformplayer.casting.StateCasting import com.futo.platformplayer.casting.StateCasting
@ -1900,17 +1903,46 @@ class VideoDetailView : ConstraintLayout {
} }
private fun loadCurrentVideoCast(video: IPlatformVideoDetails, videoSource: IVideoSource?, audioSource: IAudioSource?, subtitleSource: ISubtitleSource?, resumePositionMs: Long, speed: Double?) { private fun loadCurrentVideoCast(video: IPlatformVideoDetails, videoSource: IVideoSource?, audioSource: IAudioSource?, subtitleSource: ISubtitleSource?, resumePositionMs: Long, speed: Double?) {
Logger.i(TAG, "loadCurrentVideoCast(video=$video, videoSource=$videoSource, audioSource=$audioSource, resumePositionMs=$resumePositionMs)") Logger.i(TAG, "loadCurrentVideoCast(video=$video, videoSource=$videoSource, audioSource=$audioSource, resumePositionMs=$resumePositionMs)")
castIfAvailable(context.contentResolver, video, videoSource, audioSource, subtitleSource, resumePositionMs, speed)
}
val castSucceeded = StateCasting.instance.castIfAvailable(context.contentResolver, video, videoSource, audioSource, subtitleSource, resumePositionMs, speed, onLoading = { private fun castIfAvailable(contentResolver: ContentResolver, video: IPlatformVideoDetails, videoSource: IVideoSource?, audioSource: IAudioSource?, subtitleSource: ISubtitleSource?, resumePositionMs: Long, speed: Double?) {
_cast.setLoading(it) fragment.lifecycleScope.launch(Dispatchers.IO) {
}, onLoadingEstimate = { try {
_cast.setLoading(it) val plugin = if (videoSource is JSSource) videoSource.getUnderlyingPlugin()
}) else if (audioSource is JSSource) audioSource.getUnderlyingPlugin()
else null
if (castSucceeded) { val startId = plugin?.getUnderlyingPlugin()?.runtimeId
_cast.setVideoDetails(video, resumePositionMs / 1000); try {
setCastEnabled(true); val castingSucceeded = StateCasting.instance.castIfAvailable(contentResolver, video, videoSource, audioSource, subtitleSource, resumePositionMs, speed, onLoading = {
} else throw IllegalStateException("Disconnected cast during loading"); _cast.setLoading(it)
}, onLoadingEstimate = {
_cast.setLoading(it)
})
if (castingSucceeded) {
withContext(Dispatchers.Main) {
_cast.setVideoDetails(video, resumePositionMs / 1000);
setCastEnabled(true);
}
}
} catch (e: ScriptReloadRequiredException) {
Log.i(TAG, "Reload required exception", e)
if (plugin == null)
throw e
if (startId != -1 && plugin.getUnderlyingPlugin().runtimeId != startId)
throw e
StatePlatform.instance.handleReloadRequired(e, {
fetchVideo()
});
}
} catch (e: Throwable) {
Logger.e(TAG, "loadCurrentVideoCast", e)
}
}
} }
//Events //Events
@ -2423,7 +2455,7 @@ class VideoDetailView : ConstraintLayout {
val d = StateCasting.instance.activeDevice; val d = StateCasting.instance.activeDevice;
if (d != null && d.connectionState == CastConnectionState.CONNECTED) if (d != null && d.connectionState == CastConnectionState.CONNECTED)
StateCasting.instance.castIfAvailable(context.contentResolver, video, videoSource, _lastAudioSource, _lastSubtitleSource, (d.expectedCurrentTime * 1000.0).toLong(), d.speed); castIfAvailable(context.contentResolver, video, videoSource, _lastAudioSource, _lastSubtitleSource, (d.expectedCurrentTime * 1000.0).toLong(), d.speed);
else if(!_player.swapSources(videoSource, _lastAudioSource, true, true, true)) else if(!_player.swapSources(videoSource, _lastAudioSource, true, true, true))
_player.hideControls(false); //TODO: Disable player? _player.hideControls(false); //TODO: Disable player?
@ -2438,7 +2470,7 @@ class VideoDetailView : ConstraintLayout {
val d = StateCasting.instance.activeDevice; val d = StateCasting.instance.activeDevice;
if (d != null && d.connectionState == CastConnectionState.CONNECTED) if (d != null && d.connectionState == CastConnectionState.CONNECTED)
StateCasting.instance.castIfAvailable(context.contentResolver, video, _lastVideoSource, audioSource, _lastSubtitleSource, (d.expectedCurrentTime * 1000.0).toLong(), d.speed); castIfAvailable(context.contentResolver, video, _lastVideoSource, audioSource, _lastSubtitleSource, (d.expectedCurrentTime * 1000.0).toLong(), d.speed)
else(!_player.swapSources(_lastVideoSource, audioSource, true, true, true)) else(!_player.swapSources(_lastVideoSource, audioSource, true, true, true))
_player.hideControls(false); //TODO: Disable player? _player.hideControls(false); //TODO: Disable player?
@ -2454,7 +2486,7 @@ class VideoDetailView : ConstraintLayout {
val d = StateCasting.instance.activeDevice; val d = StateCasting.instance.activeDevice;
if (d != null && d.connectionState == CastConnectionState.CONNECTED) if (d != null && d.connectionState == CastConnectionState.CONNECTED)
StateCasting.instance.castIfAvailable(context.contentResolver, video, _lastVideoSource, _lastAudioSource, toSet, (d.expectedCurrentTime * 1000.0).toLong(), d.speed); castIfAvailable(context.contentResolver, video, _lastVideoSource, _lastAudioSource, toSet, (d.expectedCurrentTime * 1000.0).toLong(), d.speed);
else else
_player.swapSubtitles(fragment.lifecycleScope, toSet); _player.swapSubtitles(fragment.lifecycleScope, toSet);