diff --git a/Ryujinx.Audio/Adpcm/AdpcmDecoder.cs b/Ryujinx.Audio/Adpcm/AdpcmDecoder.cs new file mode 100644 index 0000000000..c207bd69b1 --- /dev/null +++ b/Ryujinx.Audio/Adpcm/AdpcmDecoder.cs @@ -0,0 +1,91 @@ +namespace Ryujinx.Audio.Adpcm +{ + public static class AdpcmDecoder + { + private const int SamplesPerFrame = 14; + private const int BytesPerFrame = 8; + + public static short[] Decode(byte[] Buffer, AdpcmDecoderContext Context) + { + int Samples = GetSamplesCountFromSize(Buffer.Length); + + short[] Pcm = new short[Samples * 2]; + + short History0 = Context.History0; + short History1 = Context.History1; + + int InputOffset = 0; + int OutputOffset = 0; + + while (InputOffset < Buffer.Length) + { + byte Header = Buffer[InputOffset++]; + + int Scale = 0x800 << (Header & 0xf); + + int CoeffIndex = Header >> 4; + + short Coeff0 = Context.Coefficients[CoeffIndex * 2 + 0]; + short Coeff1 = Context.Coefficients[CoeffIndex * 2 + 1]; + + int FrameSamples = SamplesPerFrame; + + if (FrameSamples > Samples) + { + FrameSamples = Samples; + } + + int Value = 0; + + for (int SampleIndex = 0; SampleIndex < FrameSamples; SampleIndex++) + { + int Sample; + + if ((SampleIndex & 1) == 0) + { + Value = Buffer[InputOffset++]; + + Sample = (Value << 24) >> 28; + } + else + { + Sample = (Value << 28) >> 28; + } + + int Prediction = Coeff0 * History0 + Coeff1 * History1; + + Sample = (Sample * Scale + Prediction + 0x400) >> 11; + + short SaturatedSample = DspUtils.Saturate(Sample); + + History1 = History0; + History0 = SaturatedSample; + + Pcm[OutputOffset++] = SaturatedSample; + Pcm[OutputOffset++] = SaturatedSample; + } + + Samples -= FrameSamples; + } + + Context.History0 = History0; + Context.History1 = History1; + + return Pcm; + } + + public static long GetSizeFromSamplesCount(int SamplesCount) + { + int Frames = SamplesCount / SamplesPerFrame; + + return Frames * BytesPerFrame; + } + + public static int GetSamplesCountFromSize(long Size) + { + int Frames = (int)(Size / BytesPerFrame); + + return Frames * SamplesPerFrame; + } + } +} \ No newline at end of file diff --git a/Ryujinx.Audio/Adpcm/AdpcmDecoderContext.cs b/Ryujinx.Audio/Adpcm/AdpcmDecoderContext.cs new file mode 100644 index 0000000000..91730333c8 --- /dev/null +++ b/Ryujinx.Audio/Adpcm/AdpcmDecoderContext.cs @@ -0,0 +1,10 @@ +namespace Ryujinx.Audio.Adpcm +{ + public class AdpcmDecoderContext + { + public short[] Coefficients; + + public short History0; + public short History1; + } +} \ No newline at end of file diff --git a/Ryujinx.Audio/DspUtils.cs b/Ryujinx.Audio/DspUtils.cs new file mode 100644 index 0000000000..c048161dae --- /dev/null +++ b/Ryujinx.Audio/DspUtils.cs @@ -0,0 +1,16 @@ +namespace Ryujinx.Audio.Adpcm +{ + public static class DspUtils + { + public static short Saturate(int Value) + { + if (Value > short.MaxValue) + Value = short.MaxValue; + + if (Value < short.MinValue) + Value = short.MinValue; + + return (short)Value; + } + } +} \ No newline at end of file diff --git a/Ryujinx.Audio/IAalOutput.cs b/Ryujinx.Audio/IAalOutput.cs index f9978ee4d9..7632fa9822 100644 --- a/Ryujinx.Audio/IAalOutput.cs +++ b/Ryujinx.Audio/IAalOutput.cs @@ -14,7 +14,7 @@ namespace Ryujinx.Audio long[] GetReleasedBuffers(int Track, int MaxCount); - void AppendBuffer(int Track, long Tag, byte[] Buffer); + void AppendBuffer(int Track, long Tag, T[] Buffer) where T : struct; void Start(int Track); void Stop(int Track); diff --git a/Ryujinx.Audio/OpenAL/OpenALAudioOut.cs b/Ryujinx.Audio/OpenAL/OpenALAudioOut.cs index 2860dc2e2d..1a443cbba8 100644 --- a/Ryujinx.Audio/OpenAL/OpenALAudioOut.cs +++ b/Ryujinx.Audio/OpenAL/OpenALAudioOut.cs @@ -3,6 +3,7 @@ using OpenTK.Audio.OpenAL; using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Runtime.InteropServices; using System.Threading; namespace Ryujinx.Audio.OpenAL @@ -309,13 +310,15 @@ namespace Ryujinx.Audio.OpenAL return null; } - public void AppendBuffer(int Track, long Tag, byte[] Buffer) + public void AppendBuffer(int Track, long Tag, T[] Buffer) where T : struct { if (Tracks.TryGetValue(Track, out Track Td)) { int BufferId = Td.AppendBuffer(Tag); - AL.BufferData(BufferId, Td.Format, Buffer, Buffer.Length, Td.SampleRate); + int Size = Buffer.Length * Marshal.SizeOf(); + + AL.BufferData(BufferId, Td.Format, Buffer, Size, Td.SampleRate); AL.SourceQueueBuffer(Td.SourceId, BufferId); @@ -366,7 +369,5 @@ namespace Ryujinx.Audio.OpenAL return PlaybackState.Stopped; } - - } } \ No newline at end of file diff --git a/Ryujinx.HLE/OsHle/Services/Aud/BehaviorIn.cs b/Ryujinx.HLE/OsHle/Services/Aud/BehaviorIn.cs new file mode 100644 index 0000000000..cf6871d58d --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/BehaviorIn.cs @@ -0,0 +1,11 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x10, Pack = 4)] + struct BehaviorIn + { + public long Unknown0; + public long Unknown8; + } +} \ No newline at end of file diff --git a/Ryujinx.HLE/OsHle/Services/Aud/BiquadFilter.cs b/Ryujinx.HLE/OsHle/Services/Aud/BiquadFilter.cs new file mode 100644 index 0000000000..7abaee514f --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/BiquadFilter.cs @@ -0,0 +1,14 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0xc, Pack = 2)] + struct BiquadFilter + { + public short B0; + public short B1; + public short B2; + public short A1; + public short A2; + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/IAudioRenderer.cs b/Ryujinx.HLE/OsHle/Services/Aud/IAudioRenderer.cs index bd9188c341..25b787cfda 100644 --- a/Ryujinx.HLE/OsHle/Services/Aud/IAudioRenderer.cs +++ b/Ryujinx.HLE/OsHle/Services/Aud/IAudioRenderer.cs @@ -1,7 +1,10 @@ using ChocolArm64.Memory; +using Ryujinx.Audio; +using Ryujinx.Audio.Adpcm; using Ryujinx.HLE.Logging; using Ryujinx.HLE.OsHle.Handles; using Ryujinx.HLE.OsHle.Ipc; +using Ryujinx.HLE.OsHle.Utilities; using System; using System.Collections.Generic; using System.Runtime.InteropServices; @@ -10,15 +13,36 @@ namespace Ryujinx.HLE.OsHle.Services.Aud { class IAudioRenderer : IpcService, IDisposable { + private const int DeviceChannelsCount = 2; + + //This is the amount of samples that are going to be appended + //each time that RequestUpdateAudioRenderer is called. Ideally, + //this value shouldn't be neither too small (to avoid the player + //starving due to running out of samples) or too large (to avoid + //high latency). + //Additionally, due to ADPCM having 14 samples per frame, this value + //needs to be a multiple of 14. + private const int MixBufferSamplesCount = 770; + private Dictionary m_Commands; public override IReadOnlyDictionary Commands => m_Commands; private KEvent UpdateEvent; + private AMemory Memory; + + private IAalOutput AudioOut; + private AudioRendererParameter Params; - public IAudioRenderer(AudioRendererParameter Params) + private int Track; + + private MemoryPoolContext[] MemoryPools; + + private VoiceContext[] Voices; + + public IAudioRenderer(AMemory Memory, IAalOutput AudioOut, AudioRendererParameter Params) { m_Commands = new Dictionary() { @@ -30,7 +54,43 @@ namespace Ryujinx.HLE.OsHle.Services.Aud UpdateEvent = new KEvent(); - this.Params = Params; + this.Memory = Memory; + this.AudioOut = AudioOut; + this.Params = Params; + + Track = AudioOut.OpenTrack(48000, 2, AudioCallback, out _); + + MemoryPools = CreateArray(Params.EffectCount + Params.VoiceCount * 4); + + Voices = CreateArray(Params.VoiceCount); + + InitializeAudioOut(); + } + + private void AudioCallback() + { + UpdateEvent.WaitEvent.Set(); + } + + private static T[] CreateArray(int Size) where T : new() + { + T[] Output = new T[Size]; + + for (int Index = 0; Index < Size; Index++) + { + Output[Index] = new T(); + } + + return Output; + } + + private void InitializeAudioOut() + { + AppendMixedBuffer(0); + AppendMixedBuffer(1); + AppendMixedBuffer(2); + + AudioOut.Start(Track); } public long RequestUpdateAudioRenderer(ServiceCtx Context) @@ -42,58 +102,96 @@ namespace Ryujinx.HLE.OsHle.Services.Aud long InputPosition = Context.Request.SendBuff[0].Position; - UpdateDataHeader InputDataHeader = AMemoryHelper.Read(Context.Memory, InputPosition); + StructReader Reader = new StructReader(Context.Memory, InputPosition); + StructWriter Writer = new StructWriter(Context.Memory, OutputPosition); - UpdateDataHeader OutputDataHeader = new UpdateDataHeader(); + UpdateDataHeader InputHeader = Reader.Read(); + + Reader.Read(InputHeader.BehaviorSize); + + MemoryPoolIn[] MemoryPoolsIn = Reader.Read(InputHeader.MemoryPoolsSize); + + for (int Index = 0; Index < MemoryPoolsIn.Length; Index++) + { + MemoryPoolIn MemoryPool = MemoryPoolsIn[Index]; + + if (MemoryPool.State == MemoryPoolState.RequestAttach) + { + MemoryPools[Index].OutStatus.State = MemoryPoolState.Attached; + } + else if (MemoryPool.State == MemoryPoolState.RequestDetach) + { + MemoryPools[Index].OutStatus.State = MemoryPoolState.Detached; + } + } + + Reader.Read(InputHeader.VoiceResourceSize); + + VoiceIn[] VoicesIn = Reader.Read(InputHeader.VoicesSize); + + for (int Index = 0; Index < VoicesIn.Length; Index++) + { + VoiceIn Voice = VoicesIn[Index]; + + Voices[Index].SetAcquireState(Voice.Acquired != 0); + + if (Voice.Acquired == 0) + { + continue; + } + + if (Voice.FirstUpdate != 0) + { + Voices[Index].AdpcmCtx = GetAdpcmDecoderContext( + Voice.AdpcmCoeffsPosition, + Voice.AdpcmCoeffsSize); + + Voices[Index].SampleFormat = Voice.SampleFormat; + Voices[Index].ChannelsCount = Voice.ChannelsCount; + Voices[Index].BufferIndex = Voice.BaseWaveBufferIndex; + } + + Voices[Index].WaveBuffers[0] = Voice.WaveBuffer0; + Voices[Index].WaveBuffers[1] = Voice.WaveBuffer1; + Voices[Index].WaveBuffers[2] = Voice.WaveBuffer2; + Voices[Index].WaveBuffers[3] = Voice.WaveBuffer3; + Voices[Index].Volume = Voice.Volume; + Voices[Index].PlayState = Voice.PlayState; + } + + UpdateAudio(); + + UpdateDataHeader OutputHeader = new UpdateDataHeader(); int UpdateHeaderSize = Marshal.SizeOf(); - OutputDataHeader.Revision = Params.Revision; - OutputDataHeader.BehaviorSize = 0xb0; - OutputDataHeader.MemoryPoolsSize = (Params.EffectCount + Params.VoiceCount * 4) * 0x10; - OutputDataHeader.VoicesSize = Params.VoiceCount * 0x10; - OutputDataHeader.EffectsSize = Params.EffectCount * 0x10; - OutputDataHeader.SinksSize = Params.SinkCount * 0x20; - OutputDataHeader.PerformanceManagerSize = 0x10; - OutputDataHeader.TotalSize = UpdateHeaderSize + - OutputDataHeader.BehaviorSize + - OutputDataHeader.MemoryPoolsSize + - OutputDataHeader.VoicesSize + - OutputDataHeader.EffectsSize + - OutputDataHeader.SinksSize + - OutputDataHeader.PerformanceManagerSize; + OutputHeader.Revision = Params.Revision; + OutputHeader.BehaviorSize = 0xb0; + OutputHeader.MemoryPoolsSize = (Params.EffectCount + Params.VoiceCount * 4) * 0x10; + OutputHeader.VoicesSize = Params.VoiceCount * 0x10; + OutputHeader.EffectsSize = Params.EffectCount * 0x10; + OutputHeader.SinksSize = Params.SinkCount * 0x20; + OutputHeader.PerformanceManagerSize = 0x10; + OutputHeader.TotalSize = UpdateHeaderSize + + OutputHeader.BehaviorSize + + OutputHeader.MemoryPoolsSize + + OutputHeader.VoicesSize + + OutputHeader.EffectsSize + + OutputHeader.SinksSize + + OutputHeader.PerformanceManagerSize; - AMemoryHelper.Write(Context.Memory, OutputPosition, OutputDataHeader); + Writer.Write(OutputHeader); - int InMemoryPoolOffset = UpdateHeaderSize + InputDataHeader.BehaviorSize; - - int OutMemoryPoolOffset = UpdateHeaderSize; - - for (int Offset = 0; Offset < OutputDataHeader.MemoryPoolsSize; Offset += 0x10, InMemoryPoolOffset += 0x20) + foreach (MemoryPoolContext MemoryPool in MemoryPools) { - MemoryPoolState PoolState = (MemoryPoolState)Context.Memory.ReadInt32(InputPosition + InMemoryPoolOffset + 0x10); - - //TODO: Figure out what the other values does. - if (PoolState == MemoryPoolState.RequestAttach) - { - Context.Memory.WriteInt32(OutputPosition + OutMemoryPoolOffset + Offset, (int)MemoryPoolState.Attached); - } - else if (PoolState == MemoryPoolState.RequestDetach) - { - Context.Memory.WriteInt32(OutputPosition + OutMemoryPoolOffset + Offset, (int)MemoryPoolState.Detached); - } + Writer.Write(MemoryPool.OutStatus); } - int OutVoicesOffset = OutMemoryPoolOffset + OutputDataHeader.MemoryPoolsSize; - - for (int Offset = 0; Offset < OutputDataHeader.VoicesSize; Offset += 0x10) + foreach (VoiceContext Voice in Voices) { - Context.Memory.WriteInt32(OutputPosition + OutVoicesOffset + Offset + 8, (int)VoicePlaybackState.Finished); + Writer.Write(Voice.OutStatus); } - //TODO: We shouldn't be signaling this here. - UpdateEvent.WaitEvent.Set(); - return 0; } @@ -120,6 +218,85 @@ namespace Ryujinx.HLE.OsHle.Services.Aud return 0; } + private AdpcmDecoderContext GetAdpcmDecoderContext(long Position, long Size) + { + if (Size == 0) + { + return null; + } + + AdpcmDecoderContext Context = new AdpcmDecoderContext(); + + Context.Coefficients = new short[Size >> 1]; + + for (int Offset = 0; Offset < Size; Offset += 2) + { + Context.Coefficients[Offset >> 1] = Memory.ReadInt16(Position + Offset); + } + + return Context; + } + + private void UpdateAudio() + { + long[] Released = AudioOut.GetReleasedBuffers(Track, 2); + + for (int Index = 0; Index < Released.Length; Index++) + { + AppendMixedBuffer(Released[Index]); + } + } + + private void AppendMixedBuffer(long Tag) + { + int[] MixBuffer = new int[MixBufferSamplesCount * DeviceChannelsCount]; + + foreach (VoiceContext Voice in Voices) + { + if (!Voice.Playing) + { + continue; + } + + int OutOffset = 0; + + int PendingSamples = MixBufferSamplesCount; + + while (PendingSamples > 0) + { + short[] Samples = Voice.GetBufferData(Memory, PendingSamples, out int ReturnedSamples); + + if (ReturnedSamples == 0) + { + break; + } + + PendingSamples -= ReturnedSamples; + + for (int Offset = 0; Offset < Samples.Length; Offset++) + { + int Sample = (int)(Samples[Offset] * Voice.Volume); + + MixBuffer[OutOffset++] += Sample; + } + } + } + + AudioOut.AppendBuffer(Track, Tag, GetFinalBuffer(MixBuffer)); + } + + private static short[] GetFinalBuffer(int[] Buffer) + { + short[] Output = new short[Buffer.Length]; + + for (int Offset = 0; Offset < Buffer.Length; Offset++) + { + Output[Offset] = DspUtils.Saturate(Buffer[Offset]); + } + + return Output; + } + public void Dispose() { Dispose(true); diff --git a/Ryujinx.HLE/OsHle/Services/Aud/IAudioRendererManager.cs b/Ryujinx.HLE/OsHle/Services/Aud/IAudioRendererManager.cs index a7daeedd58..43c20277c9 100644 --- a/Ryujinx.HLE/OsHle/Services/Aud/IAudioRendererManager.cs +++ b/Ryujinx.HLE/OsHle/Services/Aud/IAudioRendererManager.cs @@ -1,7 +1,7 @@ +using Ryujinx.Audio; using Ryujinx.HLE.Logging; using Ryujinx.HLE.OsHle.Ipc; using System.Collections.Generic; -using System.Runtime.InteropServices; namespace Ryujinx.HLE.OsHle.Services.Aud { @@ -28,7 +28,7 @@ namespace Ryujinx.HLE.OsHle.Services.Aud public long OpenAudioRenderer(ServiceCtx Context) { - //Same buffer as GetAudioRendererWorkBufferSize is receive here. + IAalOutput AudioOut = Context.Ns.AudioOut; AudioRendererParameter Params = new AudioRendererParameter(); @@ -46,7 +46,7 @@ namespace Ryujinx.HLE.OsHle.Services.Aud Params.Unknown2C = Context.RequestData.ReadInt32(); Params.Revision = Context.RequestData.ReadInt32(); - MakeObject(Context, new IAudioRenderer(Params)); + MakeObject(Context, new IAudioRenderer(Context.Memory, AudioOut, Params)); return 0; } diff --git a/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolContext.cs b/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolContext.cs new file mode 100644 index 0000000000..eed248bfed --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolContext.cs @@ -0,0 +1,12 @@ +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + class MemoryPoolContext + { + public MemoryPoolOut OutStatus; + + public MemoryPoolContext() + { + OutStatus.State = MemoryPoolState.Detached; + } + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolIn.cs b/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolIn.cs new file mode 100644 index 0000000000..1dba97b95b --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolIn.cs @@ -0,0 +1,14 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x20, Pack = 4)] + struct MemoryPoolIn + { + public long Address; + public long Size; + public MemoryPoolState State; + public int Unknown14; + public long Unknown18; + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolOut.cs b/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolOut.cs new file mode 100644 index 0000000000..a3c268339e --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/MemoryPoolOut.cs @@ -0,0 +1,12 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x10, Pack = 4)] + struct MemoryPoolOut + { + public MemoryPoolState State; + public int Unknown14; + public long Unknown18; + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/PlayState.cs b/Ryujinx.HLE/OsHle/Services/Aud/PlayState.cs new file mode 100644 index 0000000000..bae86abd16 --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/PlayState.cs @@ -0,0 +1,9 @@ +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + enum PlayState : byte + { + Playing = 0, + Stopped = 1, + Paused = 2 + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/SampleFormat.cs b/Ryujinx.HLE/OsHle/Services/Aud/SampleFormat.cs new file mode 100644 index 0000000000..06ab492996 --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/SampleFormat.cs @@ -0,0 +1,13 @@ +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + enum SampleFormat : byte + { + Invalid = 0, + PcmInt8 = 1, + PcmInt16 = 2, + PcmInt24 = 3, + PcmInt32 = 4, + PcmFloat = 5, + Adpcm = 6 + } +} \ No newline at end of file diff --git a/Ryujinx.HLE/OsHle/Services/Aud/VoiceChannelResourceIn.cs b/Ryujinx.HLE/OsHle/Services/Aud/VoiceChannelResourceIn.cs new file mode 100644 index 0000000000..5005b8596b --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/VoiceChannelResourceIn.cs @@ -0,0 +1,10 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x70, Pack = 1)] + struct VoiceChannelResourceIn + { + //??? + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/VoiceContext.cs b/Ryujinx.HLE/OsHle/Services/Aud/VoiceContext.cs new file mode 100644 index 0000000000..75f7315f76 --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/VoiceContext.cs @@ -0,0 +1,168 @@ +using ChocolArm64.Memory; +using Ryujinx.Audio.Adpcm; +using System; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + class VoiceContext + { + private bool Acquired; + + public int ChannelsCount; + public int BufferIndex; + public long Offset; + + public float Volume; + + public PlayState PlayState; + + public SampleFormat SampleFormat; + + public AdpcmDecoderContext AdpcmCtx; + + public WaveBuffer[] WaveBuffers; + + public VoiceOut OutStatus; + + public bool Playing => Acquired && PlayState == PlayState.Playing; + + public VoiceContext() + { + WaveBuffers = new WaveBuffer[4]; + } + + public void SetAcquireState(bool NewState) + { + if (Acquired && !NewState) + { + //Release. + Reset(); + } + + Acquired = NewState; + } + + private void Reset() + { + BufferIndex = 0; + Offset = 0; + + OutStatus.PlayedSamplesCount = 0; + OutStatus.PlayedWaveBuffersCount = 0; + OutStatus.VoiceDropsCount = 0; + } + + public short[] GetBufferData(AMemory Memory, int MaxSamples, out int Samples) + { + WaveBuffer Wb = WaveBuffers[BufferIndex]; + + long Position = Wb.Position + Offset; + + long MaxSize = Wb.Size - Offset; + + long Size = GetSizeFromSamplesCount(MaxSamples); + + if (Size > MaxSize) + { + Size = MaxSize; + } + + Samples = GetSamplesCountFromSize(Size); + + OutStatus.PlayedSamplesCount += Samples; + + Offset += Size; + + if (Offset == Wb.Size) + { + Offset = 0; + + if (Wb.Looping == 0) + { + BufferIndex = (BufferIndex + 1) & 3; + } + + OutStatus.PlayedWaveBuffersCount++; + } + + return Decode(Memory.ReadBytes(Position, Size)); + } + + private long GetSizeFromSamplesCount(int SamplesCount) + { + if (SampleFormat == SampleFormat.PcmInt16) + { + return SamplesCount * sizeof(short) * ChannelsCount; + } + else if (SampleFormat == SampleFormat.Adpcm) + { + return AdpcmDecoder.GetSizeFromSamplesCount(SamplesCount); + } + else + { + throw new InvalidOperationException(); + } + } + + private int GetSamplesCountFromSize(long Size) + { + if (SampleFormat == SampleFormat.PcmInt16) + { + return (int)(Size / (sizeof(short) * ChannelsCount)); + } + else if (SampleFormat == SampleFormat.Adpcm) + { + return AdpcmDecoder.GetSamplesCountFromSize(Size); + } + else + { + throw new InvalidOperationException(); + } + } + + private short[] Decode(byte[] Buffer) + { + if (SampleFormat == SampleFormat.PcmInt16) + { + int Samples = GetSamplesCountFromSize(Buffer.Length); + + short[] Output = new short[Samples * 2]; + + if (ChannelsCount == 1) + { + //Duplicate samples to convert the mono stream to stereo. + for (int Offset = 0; Offset < Buffer.Length; Offset += 2) + { + short Sample = GetShort(Buffer, Offset); + + Output[Offset + 0] = Sample; + Output[Offset + 1] = Sample; + } + } + else + { + for (int Offset = 0; Offset < Buffer.Length; Offset += 2) + { + Output[Offset >> 1] = GetShort(Buffer, Offset); + } + } + + return Output; + } + else if (SampleFormat == SampleFormat.Adpcm) + { + return AdpcmDecoder.Decode(Buffer, AdpcmCtx); + } + else + { + throw new InvalidOperationException(); + } + } + + private static short GetShort(byte[] Buffer, int Offset) + { + return (short)((Buffer[Offset + 0] << 0) | + (Buffer[Offset + 1] << 8)); + } + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/VoiceIn.cs b/Ryujinx.HLE/OsHle/Services/Aud/VoiceIn.cs new file mode 100644 index 0000000000..285148f8de --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/VoiceIn.cs @@ -0,0 +1,49 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x170, Pack = 1)] + struct VoiceIn + { + public int VoiceSlot; + public int NodeId; + + public byte FirstUpdate; + public byte Acquired; + + public PlayState PlayState; + + public SampleFormat SampleFormat; + + public int SampleRate; + + public int Priority; + + public int Unknown14; + + public int ChannelsCount; + + public float Pitch; + public float Volume; + + public BiquadFilter BiquadFilter0; + public BiquadFilter BiquadFilter1; + + public int AppendedWaveBuffersCount; + + public int BaseWaveBufferIndex; + + public int Unknown44; + + public long AdpcmCoeffsPosition; + public long AdpcmCoeffsSize; + + public int VoiceDestination; + public int Padding; + + public WaveBuffer WaveBuffer0; + public WaveBuffer WaveBuffer1; + public WaveBuffer WaveBuffer2; + public WaveBuffer WaveBuffer3; + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/VoiceOut.cs b/Ryujinx.HLE/OsHle/Services/Aud/VoiceOut.cs new file mode 100644 index 0000000000..86e8d24cba --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/VoiceOut.cs @@ -0,0 +1,12 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x10, Pack = 4)] + struct VoiceOut + { + public long PlayedSamplesCount; + public int PlayedWaveBuffersCount; + public int VoiceDropsCount; //? + } +} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/VoiceState.cs b/Ryujinx.HLE/OsHle/Services/Aud/VoiceState.cs deleted file mode 100644 index 8b34332392..0000000000 --- a/Ryujinx.HLE/OsHle/Services/Aud/VoiceState.cs +++ /dev/null @@ -1,9 +0,0 @@ -namespace Ryujinx.HLE.OsHle.Services.Aud -{ - enum VoicePlaybackState : int - { - Playing = 0, - Finished = 1, - Paused = 2 - } -} diff --git a/Ryujinx.HLE/OsHle/Services/Aud/WaveBuffer.cs b/Ryujinx.HLE/OsHle/Services/Aud/WaveBuffer.cs new file mode 100644 index 0000000000..222249987e --- /dev/null +++ b/Ryujinx.HLE/OsHle/Services/Aud/WaveBuffer.cs @@ -0,0 +1,20 @@ +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Services.Aud +{ + [StructLayout(LayoutKind.Sequential, Size = 0x38, Pack = 1)] + struct WaveBuffer + { + public long Position; + public long Size; + public int FirstSampleOffset; + public int LastSampleOffset; + public byte Looping; + public byte LastBuffer; + public short Unknown1A; + public int Unknown1C; + public long AdpcmLoopContextPosition; + public long AdpcmLoopContextSize; + public long Unknown30; + } +} diff --git a/Ryujinx.HLE/OsHle/Utilities/StructReader.cs b/Ryujinx.HLE/OsHle/Utilities/StructReader.cs new file mode 100644 index 0000000000..e218288b6e --- /dev/null +++ b/Ryujinx.HLE/OsHle/Utilities/StructReader.cs @@ -0,0 +1,45 @@ +using ChocolArm64.Memory; +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Utilities +{ + class StructReader + { + private AMemory Memory; + + public long Position { get; private set; } + + public StructReader(AMemory Memory, long Position) + { + this.Memory = Memory; + this.Position = Position; + } + + public T Read() where T : struct + { + T Value = AMemoryHelper.Read(Memory, Position); + + Position += Marshal.SizeOf(); + + return Value; + } + + public T[] Read(int Size) where T : struct + { + int StructSize = Marshal.SizeOf(); + + int Count = Size / StructSize; + + T[] Output = new T[Count]; + + for (int Index = 0; Index < Count; Index++) + { + Output[Index] = AMemoryHelper.Read(Memory, Position); + + Position += StructSize; + } + + return Output; + } + } +} diff --git a/Ryujinx.HLE/OsHle/Utilities/StructWriter.cs b/Ryujinx.HLE/OsHle/Utilities/StructWriter.cs new file mode 100644 index 0000000000..7daa95fb63 --- /dev/null +++ b/Ryujinx.HLE/OsHle/Utilities/StructWriter.cs @@ -0,0 +1,25 @@ +using ChocolArm64.Memory; +using System.Runtime.InteropServices; + +namespace Ryujinx.HLE.OsHle.Utilities +{ + class StructWriter + { + private AMemory Memory; + + public long Position { get; private set; } + + public StructWriter(AMemory Memory, long Position) + { + this.Memory = Memory; + this.Position = Position; + } + + public void Write(T Value) where T : struct + { + AMemoryHelper.Write(Memory, Position, Value); + + Position += Marshal.SizeOf(); + } + } +}