Haydn: Part 1 (#2007)

* Haydn: Part 1

Based on my reverse of audio 11.0.0.

As always, core implementation under LGPLv3 for the same reasons as for Amadeus.

This place the bases of a more flexible audio system while making audout & audin accurate.

This have the following improvements:
- Complete reimplementation of audout and audin.
- Audin currently only have a dummy backend.
- Dramatically reduce CPU usage by up to 50% in common cases (SoundIO and OpenAL).
- Audio Renderer now can output to 5.1 devices when supported.
- Audio Renderer init its backend on demand instead of keeping two up all the time.
- All backends implementation are now in their own project.
- Ryujinx.Audio.Renderer was renamed Ryujinx.Audio and was refactored because of this.

As a note, games having issues with OpenAL haven't improved and will not
because of OpenAL design (stopping when buffers finish playing causing
possible audio "pops" when buffers are very small).

* Update for latest hexkyz's edits on Switchbrew

* audren: Rollback channel configuration changes

* Address gdkchan's comments

* Fix typo in OpenAL backend driver

* Address last comments

* Fix a nit

* Address gdkchan's comments
This commit is contained in:
Mary
2021-02-26 01:11:56 +01:00
committed by GitHub
parent 1c49089ff0
commit f556c80d02
249 changed files with 5614 additions and 2712 deletions

View File

@ -0,0 +1,137 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
using System.Threading;
namespace Ryujinx.Audio
{
/// <summary>
/// Manage audio input and output system.
/// </summary>
public class AudioManager : IDisposable
{
/// <summary>
/// Lock used to control the waiters registration.
/// </summary>
private object _lock = new object();
/// <summary>
/// Events signaled when the driver played audio buffers.
/// </summary>
private ManualResetEvent[] _updateRequiredEvents;
/// <summary>
/// Action to execute when the driver played audio buffers.
/// </summary>
private Action[] _actions;
/// <summary>
/// The worker thread in charge of handling sessions update.
/// </summary>
private Thread _workerThread;
/// <summary>
/// Create a new <see cref="AudioManager"/>.
/// </summary>
public AudioManager()
{
_updateRequiredEvents = new ManualResetEvent[2];
_actions = new Action[2];
// Termination event.
_updateRequiredEvents[1] = new ManualResetEvent(false);
_workerThread = new Thread(Update)
{
Name = "AudioManager.Worker"
};
}
/// <summary>
/// Start the <see cref="AudioManager"/>.
/// </summary>
public void Start()
{
if (_workerThread.IsAlive)
{
throw new InvalidOperationException();
}
_workerThread.Start();
}
/// <summary>
/// Initialize update handlers.
/// </summary>
/// <param name="updatedRequiredEvent ">The driver event that will get signaled by the device driver when an audio buffer finished playing/being captured</param>
/// <param name="outputCallback">The callback to call when an audio buffer finished playing</param>
/// <param name="inputCallback">The callback to call when an audio buffer was captured</param>
public void Initialize(ManualResetEvent updatedRequiredEvent, Action outputCallback, Action inputCallback)
{
lock (_lock)
{
_updateRequiredEvents[0] = updatedRequiredEvent;
_actions[0] = outputCallback;
_actions[1] = inputCallback;
}
}
/// <summary>
/// Entrypoint of the <see cref="_workerThread"/> in charge of updating the <see cref="AudioManager"/>.
/// </summary>
private void Update()
{
while (true)
{
int index = WaitHandle.WaitAny(_updateRequiredEvents);
// Last index is here to indicate thread termination.
if (index + 1 == _updateRequiredEvents.Length)
{
break;
}
lock (_lock)
{
foreach (Action action in _actions)
{
action?.Invoke();
}
_updateRequiredEvents[0].Reset();
}
}
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_updateRequiredEvents[1].Set();
_workerThread.Join();
_updateRequiredEvents[1].Dispose();
}
}
}
}

View File

@ -0,0 +1,43 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using System;
namespace Ryujinx.Audio.Backends.Common
{
public static class BackendHelper
{
public static int GetSampleSize(SampleFormat format)
{
return format switch
{
SampleFormat.PcmInt8 => sizeof(byte),
SampleFormat.PcmInt16 => sizeof(ushort),
SampleFormat.PcmInt24 => 3,
SampleFormat.PcmInt32 => sizeof(int),
SampleFormat.PcmFloat => sizeof(float),
_ => throw new ArgumentException($"{format}"),
};
}
public static int GetSampleCount(SampleFormat format, int channelCount, int bufferSize)
{
return bufferSize / GetSampleSize(format) / channelCount;
}
}
}

View File

@ -0,0 +1,183 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Common;
using System;
namespace Ryujinx.Audio.Backends.Common
{
/// <summary>
/// A ring buffer that grow if data written to it is too big to fit.
/// </summary>
public class DynamicRingBuffer
{
private const int RingBufferAlignment = 2048;
private object _lock = new object();
private byte[] _buffer;
private int _size;
private int _headOffset;
private int _tailOffset;
public int Length => _size;
public DynamicRingBuffer(int initialCapacity = RingBufferAlignment)
{
_buffer = new byte[initialCapacity];
}
public void Clear()
{
_size = 0;
_headOffset = 0;
_tailOffset = 0;
}
public void Clear(int size)
{
lock (_lock)
{
if (size > _size)
{
size = _size;
}
if (size == 0)
{
return;
}
_headOffset = (_headOffset + size) % _buffer.Length;
_size -= size;
if (_size == 0)
{
_headOffset = 0;
_tailOffset = 0;
}
}
}
private void SetCapacityLocked(int capacity)
{
byte[] buffer = new byte[capacity];
if (_size > 0)
{
if (_headOffset < _tailOffset)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, 0, _size);
}
else
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, 0, _buffer.Length - _headOffset);
Buffer.BlockCopy(_buffer, 0, buffer, _buffer.Length - _headOffset, _tailOffset);
}
}
_buffer = buffer;
_headOffset = 0;
_tailOffset = _size;
}
public void Write<T>(T[] buffer, int index, int count)
{
if (count == 0)
{
return;
}
lock (_lock)
{
if ((_size + count) > _buffer.Length)
{
SetCapacityLocked(BitUtils.AlignUp(_size + count, RingBufferAlignment));
}
if (_headOffset < _tailOffset)
{
int tailLength = _buffer.Length - _tailOffset;
if (tailLength >= count)
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, count);
}
else
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, tailLength);
Buffer.BlockCopy(buffer, index + tailLength, _buffer, 0, count - tailLength);
}
}
else
{
Buffer.BlockCopy(buffer, index, _buffer, _tailOffset, count);
}
_size += count;
_tailOffset = (_tailOffset + count) % _buffer.Length;
}
}
public int Read<T>(T[] buffer, int index, int count)
{
lock (_lock)
{
if (count > _size)
{
count = _size;
}
if (count == 0)
{
return 0;
}
if (_headOffset < _tailOffset)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, count);
}
else
{
int tailLength = _buffer.Length - _headOffset;
if (tailLength >= count)
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, count);
}
else
{
Buffer.BlockCopy(_buffer, _headOffset, buffer, index, tailLength);
Buffer.BlockCopy(_buffer, 0, buffer, index + tailLength, count - tailLength);
}
}
_size -= count;
_headOffset = (_headOffset + count) % _buffer.Length;
if (_size == 0)
{
_headOffset = 0;
_tailOffset = 0;
}
return count;
}
}
}
}

View File

@ -0,0 +1,89 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
namespace Ryujinx.Audio.Backends.Common
{
public abstract class HardwareDeviceSessionOutputBase : IHardwareDeviceSession
{
public IVirtualMemoryManager MemoryManager { get; }
public SampleFormat RequestedSampleFormat { get; }
public uint RequestedSampleRate { get; }
public uint RequestedChannelCount { get; }
public HardwareDeviceSessionOutputBase(IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount)
{
MemoryManager = memoryManager;
RequestedSampleFormat = requestedSampleFormat;
RequestedSampleRate = requestedSampleRate;
RequestedChannelCount = requestedChannelCount;
}
private byte[] GetBufferSamples(AudioBuffer buffer)
{
if (buffer.DataPointer == 0)
{
return null;
}
byte[] data = new byte[buffer.DataSize];
MemoryManager.Read(buffer.DataPointer, data);
return data;
}
protected ulong GetSampleCount(AudioBuffer buffer)
{
return (ulong)BackendHelper.GetSampleCount(RequestedSampleFormat, (int)RequestedChannelCount, (int)buffer.DataSize);
}
public abstract void Dispose();
public abstract void PrepareToClose();
public abstract void QueueBuffer(AudioBuffer buffer);
public abstract void SetVolume(float volume);
public abstract float GetVolume();
public abstract void Start();
public abstract void Stop();
public abstract ulong GetPlayedSampleCount();
public abstract bool WasBufferFullyConsumed(AudioBuffer buffer);
public virtual bool RegisterBuffer(AudioBuffer buffer)
{
return RegisterBuffer(buffer, GetBufferSamples(buffer));
}
public virtual bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (samples == null)
{
return false;
}
if (buffer.Data == null)
{
buffer.Data = samples;
}
return true;
}
public virtual void UnregisterBuffer(AudioBuffer buffer) { }
}
}

View File

@ -0,0 +1,146 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Backends.Dummy;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Threading;
using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
namespace Ryujinx.Audio.Backends.CompatLayer
{
public class CompatLayerHardwareDeviceDriver : IHardwareDeviceDriver
{
private IHardwareDeviceDriver _realDriver;
public CompatLayerHardwareDeviceDriver(IHardwareDeviceDriver realDevice)
{
_realDriver = realDevice;
}
public void Dispose()
{
_realDriver.Dispose();
}
public ManualResetEvent GetUpdateRequiredEvent()
{
return _realDriver.GetUpdateRequiredEvent();
}
private uint SelectHardwareChannelCount(uint targetChannelCount)
{
if (_realDriver.SupportsChannelCount(targetChannelCount))
{
return targetChannelCount;
}
return targetChannelCount switch
{
6 => SelectHardwareChannelCount(2),
2 => SelectHardwareChannelCount(1),
1 => throw new ArgumentException("No valid channel configuration found!"),
_ => throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}")
};
}
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount)
{
if (channelCount == 0)
{
channelCount = 2;
}
if (sampleRate == 0)
{
sampleRate = Constants.TargetSampleRate;
}
if (!_realDriver.SupportsDirection(direction))
{
if (direction == Direction.Input)
{
Logger.Warning?.Print(LogClass.Audio, "The selected audio backend doesn't support audio input, fallback to dummy...");
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
throw new NotImplementedException();
}
uint hardwareChannelCount = SelectHardwareChannelCount(channelCount);
IHardwareDeviceSession realSession = _realDriver.OpenDeviceSession(direction, memoryManager, sampleFormat, sampleRate, hardwareChannelCount);
if (hardwareChannelCount == channelCount)
{
return realSession;
}
if (direction == Direction.Input)
{
Logger.Warning?.Print(LogClass.Audio, $"The selected audio backend doesn't support the requested audio input configuration, fallback to dummy...");
// TODO: We currently don't support audio input upsampling/downsampling, implement this.
realSession.Dispose();
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
// It must be a HardwareDeviceSessionOutputBase.
if (realSession is not HardwareDeviceSessionOutputBase realSessionOutputBase)
{
throw new InvalidOperationException($"Real driver session class type isn't based on {typeof(HardwareDeviceSessionOutputBase).Name}.");
}
// If we need to do post processing before sending to the hardware device, wrap around it.
return new CompatLayerHardwareDeviceSession(realSessionOutputBase, channelCount);
}
public bool SupportsChannelCount(uint channelCount)
{
return channelCount == 1 || channelCount == 2 || channelCount == 6;
}
public bool SupportsSampleFormat(SampleFormat sampleFormat)
{
// TODO: More formats.
return sampleFormat == SampleFormat.PcmInt16;
}
public bool SupportsSampleRate(uint sampleRate)
{
// TODO: More sample rates.
return sampleRate == Constants.TargetSampleRate;
}
public IHardwareDeviceDriver GetRealDeviceDriver()
{
return _realDriver;
}
public bool SupportsDirection(Direction direction)
{
return direction == Direction.Input || direction == Direction.Output;
}
}
}

View File

@ -0,0 +1,140 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Backends.CompatLayer
{
class CompatLayerHardwareDeviceSession : HardwareDeviceSessionOutputBase
{
private HardwareDeviceSessionOutputBase _realSession;
private uint _userChannelCount;
public CompatLayerHardwareDeviceSession(HardwareDeviceSessionOutputBase realSession, uint userChannelCount) : base(realSession.MemoryManager, realSession.RequestedSampleFormat, realSession.RequestedSampleRate, userChannelCount)
{
_realSession = realSession;
_userChannelCount = userChannelCount;
}
public override void Dispose()
{
_realSession.Dispose();
}
public override ulong GetPlayedSampleCount()
{
return _realSession.GetPlayedSampleCount();
}
public override float GetVolume()
{
return _realSession.GetVolume();
}
public override void PrepareToClose()
{
_realSession.PrepareToClose();
}
public override void QueueBuffer(AudioBuffer buffer)
{
_realSession.QueueBuffer(buffer);
}
public override bool RegisterBuffer(AudioBuffer buffer, byte[] samples)
{
if (RequestedSampleFormat != SampleFormat.PcmInt16)
{
throw new NotImplementedException("Downmixing formats other than PCM16 is not supported.");
}
if (samples == null)
{
return false;
}
short[] downmixedBufferPCM16;
ReadOnlySpan<short> samplesPCM16 = MemoryMarshal.Cast<byte, short>(samples);
if (_userChannelCount == 6)
{
downmixedBufferPCM16 = Downmixing.DownMixSurroundToStereo(samplesPCM16);
if (_realSession.RequestedChannelCount == 1)
{
downmixedBufferPCM16 = Downmixing.DownMixStereoToMono(downmixedBufferPCM16);
}
}
else if (_userChannelCount == 2 && _realSession.RequestedChannelCount == 1)
{
downmixedBufferPCM16 = Downmixing.DownMixStereoToMono(samplesPCM16);
}
else
{
throw new NotImplementedException($"Downmixing from {_userChannelCount} to {_realSession.RequestedChannelCount} not implemented.");
}
byte[] downmixedBuffer = MemoryMarshal.Cast<short, byte>(downmixedBufferPCM16).ToArray();
AudioBuffer fakeBuffer = new AudioBuffer
{
BufferTag = buffer.BufferTag,
DataPointer = buffer.DataPointer,
DataSize = (ulong)downmixedBuffer.Length
};
bool result = _realSession.RegisterBuffer(fakeBuffer, downmixedBuffer);
if (result)
{
buffer.Data = fakeBuffer.Data;
buffer.DataSize = fakeBuffer.DataSize;
}
return result;
}
public override void SetVolume(float volume)
{
_realSession.SetVolume(volume);
}
public override void Start()
{
_realSession.Start();
}
public override void Stop()
{
_realSession.Stop();
}
public override void UnregisterBuffer(AudioBuffer buffer)
{
_realSession.UnregisterBuffer(buffer);
}
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return _realSession.WasBufferFullyConsumed(buffer);
}
}
}

View File

@ -1,8 +1,25 @@
using System;
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio
namespace Ryujinx.Audio.Backends.CompatLayer
{
public static class Downmixing
{

View File

@ -0,0 +1,96 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Threading;
using static Ryujinx.Audio.Integration.IHardwareDeviceDriver;
namespace Ryujinx.Audio.Backends.Dummy
{
public class DummyHardwareDeviceDriver : IHardwareDeviceDriver
{
private ManualResetEvent _updateRequiredEvent;
public DummyHardwareDeviceDriver()
{
_updateRequiredEvent = new ManualResetEvent(false);
}
public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount)
{
if (sampleRate == 0)
{
sampleRate = Constants.TargetSampleRate;
}
if (channelCount == 0)
{
channelCount = 2;
}
if (direction == Direction.Output)
{
return new DummyHardwareDeviceSessionOutput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
else
{
return new DummyHardwareDeviceSessionInput(this, memoryManager, sampleFormat, sampleRate, channelCount);
}
}
public ManualResetEvent GetUpdateRequiredEvent()
{
return _updateRequiredEvent;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// NOTE: The _updateRequiredEvent will be disposed somewhere else.
}
}
public bool SupportsSampleRate(uint sampleRate)
{
return true;
}
public bool SupportsSampleFormat(SampleFormat sampleFormat)
{
return true;
}
public bool SupportsDirection(Direction direction)
{
return direction == Direction.Output || direction == Direction.Input;
}
public bool SupportsChannelCount(uint channelCount)
{
return channelCount == 1 || channelCount == 2 || channelCount == 6;
}
}
}

View File

@ -0,0 +1,84 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System;
namespace Ryujinx.Audio.Backends.Dummy
{
class DummyHardwareDeviceSessionInput : IHardwareDeviceSession
{
private float _volume;
private IHardwareDeviceDriver _manager;
private IVirtualMemoryManager _memoryManager;
public DummyHardwareDeviceSessionInput(IHardwareDeviceDriver manager, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount)
{
_volume = 1.0f;
_manager = manager;
_memoryManager = memoryManager;
}
public void Dispose()
{
// Nothing to do.
}
public ulong GetPlayedSampleCount()
{
// Not implemented for input.
throw new NotSupportedException();
}
public float GetVolume()
{
return _volume;
}
public void PrepareToClose() { }
public void QueueBuffer(AudioBuffer buffer)
{
_memoryManager.Fill(buffer.DataPointer, buffer.DataSize, 0);
_manager.GetUpdateRequiredEvent().Set();
}
public bool RegisterBuffer(AudioBuffer buffer)
{
return buffer.DataPointer != 0;
}
public void SetVolume(float volume)
{
_volume = volume;
}
public void Start() { }
public void Stop() { }
public void UnregisterBuffer(AudioBuffer buffer) { }
public bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return true;
}
}
}

View File

@ -0,0 +1,79 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Backends.Common;
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Memory;
using System.Threading;
namespace Ryujinx.Audio.Backends.Dummy
{
internal class DummyHardwareDeviceSessionOutput : HardwareDeviceSessionOutputBase
{
private float _volume;
private IHardwareDeviceDriver _manager;
private ulong _playedSampleCount;
public DummyHardwareDeviceSessionOutput(IHardwareDeviceDriver manager, IVirtualMemoryManager memoryManager, SampleFormat requestedSampleFormat, uint requestedSampleRate, uint requestedChannelCount) : base(memoryManager, requestedSampleFormat, requestedSampleRate, requestedChannelCount)
{
_volume = 1.0f;
_manager = manager;
}
public override void Dispose()
{
// Nothing to do.
}
public override ulong GetPlayedSampleCount()
{
return Interlocked.Read(ref _playedSampleCount);
}
public override float GetVolume()
{
return _volume;
}
public override void PrepareToClose() { }
public override void QueueBuffer(AudioBuffer buffer)
{
Interlocked.Add(ref _playedSampleCount, GetSampleCount(buffer));
_manager.GetUpdateRequiredEvent().Set();
}
public override void SetVolume(float volume)
{
_volume = volume;
}
public override void Start() { }
public override void Stop() { }
public override void UnregisterBuffer(AudioBuffer buffer) { }
public override bool WasBufferFullyConsumed(AudioBuffer buffer)
{
return true;
}
}
}

View File

@ -0,0 +1,53 @@
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Integration;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Represent an audio buffer that will be used by an <see cref="IHardwareDeviceSession"/>.
/// </summary>
public class AudioBuffer
{
/// <summary>
/// Unique tag of this buffer.
/// </summary>
/// <remarks>Unique per session</remarks>
public ulong BufferTag;
/// <summary>
/// Pointer to the user samples.
/// </summary>
public ulong DataPointer;
/// <summary>
/// Size of the user samples region.
/// </summary>
public ulong DataSize;
/// <summary>
/// The timestamp at which the buffer was played.
/// </summary>
/// <remarks>Not used but useful for debugging</remarks>
public ulong PlayedTimestamp;
/// <summary>
/// The user samples.
/// </summary>
public byte[] Data;
}
}

View File

@ -0,0 +1,532 @@
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Integration;
using Ryujinx.Common;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// An audio device session.
/// </summary>
class AudioDeviceSession : IDisposable
{
/// <summary>
/// The volume of the <see cref="AudioDeviceSession"/>.
/// </summary>
private float _volume;
/// <summary>
/// The state of the <see cref="AudioDeviceSession"/>.
/// </summary>
private AudioDeviceState _state;
/// <summary>
/// Array of all buffers currently used or released.
/// </summary>
private AudioBuffer[] _buffers;
/// <summary>
/// The server index inside <see cref="_buffers"/> (appended but not queued to device driver).
/// </summary>
private uint _serverBufferIndex;
/// <summary>
/// The hardware index inside <see cref="_buffers"/> (queued to device driver).
/// </summary>
private uint _hardwareBufferIndex;
/// <summary>
/// The released index inside <see cref="_buffers"/> (released by the device driver).
/// </summary>
private uint _releasedBufferIndex;
/// <summary>
/// The count of buffer appended (server side).
/// </summary>
private uint _bufferAppendedCount;
/// <summary>
/// The count of buffer registered (driver side).
/// </summary>
private uint _bufferRegisteredCount;
/// <summary>
/// The count of buffer released (released by the driver side).
/// </summary>
private uint _bufferReleasedCount;
/// <summary>
/// The released buffer event.
/// </summary>
private IWritableEvent _bufferEvent;
/// <summary>
/// The session on the device driver.
/// </summary>
private IHardwareDeviceSession _hardwareDeviceSession;
/// <summary>
/// Max number of buffers that can be registered to the device driver at a time.
/// </summary>
private uint _bufferRegisteredLimit;
/// <summary>
/// Create a new <see cref="AudioDeviceSession"/>.
/// </summary>
/// <param name="deviceSession">The device driver session associated</param>
/// <param name="bufferEvent">The release buffer event</param>
/// <param name="bufferRegisteredLimit">The max number of buffers that can be registered to the device driver at a time</param>
public AudioDeviceSession(IHardwareDeviceSession deviceSession, IWritableEvent bufferEvent, uint bufferRegisteredLimit = 4)
{
_bufferEvent = bufferEvent;
_hardwareDeviceSession = deviceSession;
_bufferRegisteredLimit = bufferRegisteredLimit;
_buffers = new AudioBuffer[Constants.AudioDeviceBufferCountMax];
_serverBufferIndex = 0;
_hardwareBufferIndex = 0;
_releasedBufferIndex = 0;
_bufferAppendedCount = 0;
_bufferRegisteredCount = 0;
_bufferReleasedCount = 0;
_volume = 1.0f;
_state = AudioDeviceState.Stopped;
}
/// <summary>
/// Get the released buffer event.
/// </summary>
/// <returns>The released buffer event</returns>
public IWritableEvent GetBufferEvent()
{
return _bufferEvent;
}
/// <summary>
/// Get the state of the session.
/// </summary>
/// <returns>The state of the session</returns>
public AudioDeviceState GetState()
{
Debug.Assert(_state == AudioDeviceState.Started || _state == AudioDeviceState.Stopped);
return _state;
}
/// <summary>
/// Get the total buffer count (server + driver + released).
/// </summary>
/// <returns>Return the total buffer count</returns>
private uint GetTotalBufferCount()
{
uint bufferCount = _bufferAppendedCount + _bufferRegisteredCount + _bufferReleasedCount;
Debug.Assert(bufferCount <= Constants.AudioDeviceBufferCountMax);
return bufferCount;
}
/// <summary>
/// Register a new <see cref="AudioBuffer"/> on the server side.
/// </summary>
/// <param name="buffer">The <see cref="AudioBuffer"/> to register</param>
/// <returns>True if the operation succeeded</returns>
private bool RegisterBuffer(AudioBuffer buffer)
{
if (GetTotalBufferCount() == Constants.AudioDeviceBufferCountMax)
{
return false;
}
_buffers[_serverBufferIndex] = buffer;
_serverBufferIndex = (_serverBufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
_bufferAppendedCount++;
return true;
}
/// <summary>
/// Flush server buffers to hardware.
/// </summary>
private void FlushToHardware()
{
uint bufferToFlushCount = Math.Min(Math.Min(_bufferAppendedCount, 4), _bufferRegisteredLimit - _bufferRegisteredCount);
AudioBuffer[] buffersToFlush = new AudioBuffer[bufferToFlushCount];
uint hardwareBufferIndex = _hardwareBufferIndex;
for (int i = 0; i < buffersToFlush.Length; i++)
{
buffersToFlush[i] = _buffers[_hardwareBufferIndex];
_bufferAppendedCount--;
_bufferRegisteredCount++;
hardwareBufferIndex = (hardwareBufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
}
_hardwareBufferIndex = hardwareBufferIndex;
for (int i = 0; i < buffersToFlush.Length; i++)
{
_hardwareDeviceSession.QueueBuffer(buffersToFlush[i]);
}
}
/// <summary>
/// Get the current index of the <see cref="AudioBuffer"/> playing on the driver side.
/// </summary>
/// <param name="playingIndex">The output index of the <see cref="AudioBuffer"/> playing on the driver side</param>
/// <returns>True if any buffer is playing</returns>
private bool TryGetPlayingBufferIndex(out uint playingIndex)
{
if (_bufferRegisteredCount > 0)
{
playingIndex = (_hardwareBufferIndex - _bufferRegisteredCount) % Constants.AudioDeviceBufferCountMax;
return true;
}
playingIndex = 0;
return false;
}
/// <summary>
/// Try to pop the <see cref="AudioBuffer"/> playing on the driver side.
/// </summary>
/// <param name="buffer">The output <see cref="AudioBuffer"/> playing on the driver side</param>
/// <returns>True if any buffer is playing</returns>
private bool TryPopPlayingBuffer(out AudioBuffer buffer)
{
if (_bufferRegisteredCount > 0)
{
uint bufferIndex = (_hardwareBufferIndex - _bufferRegisteredCount) % Constants.AudioDeviceBufferCountMax;
buffer = _buffers[bufferIndex];
_buffers[bufferIndex] = null;
_bufferRegisteredCount--;
return true;
}
buffer = null;
return false;
}
/// <summary>
/// Try to pop a <see cref="AudioBuffer"/> released by the driver side.
/// </summary>
/// <param name="buffer">The output <see cref="AudioBuffer"/> released by the driver side</param>
/// <returns>True if any buffer has been released</returns>
public bool TryPopReleasedBuffer(out AudioBuffer buffer)
{
if (_bufferReleasedCount > 0)
{
uint bufferIndex = (_releasedBufferIndex - _bufferReleasedCount) % Constants.AudioDeviceBufferCountMax;
buffer = _buffers[bufferIndex];
_buffers[bufferIndex] = null;
_bufferReleasedCount--;
return true;
}
buffer = null;
return false;
}
/// <summary>
/// Release a <see cref="AudioBuffer"/>.
/// </summary>
/// <param name="buffer">The <see cref="AudioBuffer"/> to release</param>
private void ReleaseBuffer(AudioBuffer buffer)
{
buffer.PlayedTimestamp = (ulong)PerformanceCounter.ElapsedNanoseconds;
_bufferRegisteredCount--;
_bufferReleasedCount++;
_releasedBufferIndex = (_releasedBufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
}
/// <summary>
/// Update the released buffers.
/// </summary>
/// <param name="updateForStop">True if the session is currently stopping</param>
private void UpdateReleaseBuffers(bool updateForStop = false)
{
bool wasAnyBuffersReleased = false;
while (TryGetPlayingBufferIndex(out uint playingIndex))
{
if (!updateForStop && !_hardwareDeviceSession.WasBufferFullyConsumed(_buffers[playingIndex]))
{
break;
}
if (updateForStop)
{
_hardwareDeviceSession.UnregisterBuffer(_buffers[playingIndex]);
}
ReleaseBuffer(_buffers[playingIndex]);
wasAnyBuffersReleased = true;
}
if (wasAnyBuffersReleased)
{
_bufferEvent.Signal();
}
}
/// <summary>
/// Append a new <see cref="AudioBuffer"/>.
/// </summary>
/// <param name="buffer">The <see cref="AudioBuffer"/> to append</param>
/// <returns>True if the buffer was appended</returns>
public bool AppendBuffer(AudioBuffer buffer)
{
if (_hardwareDeviceSession.RegisterBuffer(buffer))
{
if (RegisterBuffer(buffer))
{
FlushToHardware();
return true;
}
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
return false;
}
public bool AppendUacBuffer(AudioBuffer buffer, uint handle)
{
// NOTE: On hardware, there is another RegisterBuffer method taking an handle.
// This variant of the call always return false (stubbed?) as a result this logic will never succeed.
return false;
}
/// <summary>
/// Start the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Start()
{
if (_state == AudioDeviceState.Started)
{
return ResultCode.OperationFailed;
}
_hardwareDeviceSession.Start();
_state = AudioDeviceState.Started;
FlushToHardware();
_hardwareDeviceSession.SetVolume(_volume);
return ResultCode.Success;
}
/// <summary>
/// Stop the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Stop()
{
if (_state == AudioDeviceState.Started)
{
_hardwareDeviceSession.Stop();
UpdateReleaseBuffers(true);
_state = AudioDeviceState.Stopped;
}
return ResultCode.Success;
}
/// <summary>
/// Get the volume of the session.
/// </summary>
/// <returns>The volume of the session</returns>
public float GetVolume()
{
return _hardwareDeviceSession.GetVolume();
}
/// <summary>
/// Set the volume of the session.
/// </summary>
/// <param name="volume">The new volume to set</param>
public void SetVolume(float volume)
{
_volume = volume;
if (_state == AudioDeviceState.Started)
{
_hardwareDeviceSession.SetVolume(volume);
}
}
/// <summary>
/// Get the count of buffer currently in use (server + driver side).
/// </summary>
/// <returns>The count of buffer currently in use</returns>
public uint GetBufferCount()
{
return _bufferAppendedCount + _bufferRegisteredCount;
}
/// <summary>
/// Check if a buffer is present.
/// </summary>
/// <param name="bufferTag">The unique tag of the buffer</param>
/// <returns>Return true if a buffer is present</returns>
public bool ContainsBuffer(ulong bufferTag)
{
uint bufferIndex = (_releasedBufferIndex - _bufferReleasedCount) % Constants.AudioDeviceBufferCountMax;
for (int i = 0; i < GetTotalBufferCount(); i++)
{
if (_buffers[bufferIndex].BufferTag == bufferTag)
{
return true;
}
bufferIndex = (bufferIndex + 1) % Constants.AudioDeviceBufferCountMax;
}
return false;
}
/// <summary>
/// Get the count of sample played in this session.
/// </summary>
/// <returns>The count of sample played in this session</returns>
public ulong GetPlayedSampleCount()
{
if (_state == AudioDeviceState.Stopped)
{
return 0;
}
else
{
return _hardwareDeviceSession.GetPlayedSampleCount();
}
}
/// <summary>
/// Flush all buffers to the initial state.
/// </summary>
/// <returns>True if any buffer was flushed</returns>
public bool FlushBuffers()
{
if (_state == AudioDeviceState.Stopped)
{
return false;
}
uint bufferCount = GetBufferCount();
while (TryPopReleasedBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
while (TryPopPlayingBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
if (_bufferRegisteredCount == 0 || (_bufferReleasedCount + _bufferAppendedCount) > Constants.AudioDeviceBufferCountMax)
{
return false;
}
_bufferReleasedCount += _bufferAppendedCount;
_releasedBufferIndex = (_releasedBufferIndex + _bufferAppendedCount) % Constants.AudioDeviceBufferCountMax;
_bufferAppendedCount = 0;
_hardwareBufferIndex = _serverBufferIndex;
if (bufferCount > 0)
{
_bufferEvent.Signal();
}
return true;
}
/// <summary>
/// Update the session.
/// </summary>
public void Update()
{
if (_state == AudioDeviceState.Started)
{
UpdateReleaseBuffers();
FlushToHardware();
}
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// Tell the hardware session that we are ending.
_hardwareDeviceSession.PrepareToClose();
// Unregister all buffers
while (TryPopReleasedBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
while (TryPopPlayingBuffer(out AudioBuffer buffer))
{
_hardwareDeviceSession.UnregisterBuffer(buffer);
}
// Finally dispose hardware session.
_hardwareDeviceSession.Dispose();
_bufferEvent.Signal();
}
}
}
}

View File

@ -0,0 +1,35 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio device state.
/// </summary>
public enum AudioDeviceState : uint
{
/// <summary>
/// The audio device is started.
/// </summary>
Started,
/// <summary>
/// The audio device is stopped.
/// </summary>
Stopped
}
}

View File

@ -0,0 +1,46 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio user input configuration.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AudioInputConfiguration
{
/// <summary>
/// The target sample rate of the user.
/// </summary>
/// <remarks>Only 48000Hz is considered valid, other sample rates will be refused.</remarks>
public uint SampleRate;
/// <summary>
/// The target channel count of the user.
/// </summary>
/// <remarks>Only Stereo and Surround are considered valid, other configurations will be refused.</remarks>
/// <remarks>Not used in audin.</remarks>
public ushort ChannelCount;
/// <summary>
/// Reserved/unused.
/// </summary>
private ushort _reserved;
}
}

View File

@ -0,0 +1,54 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Common.Memory;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio system output configuration.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AudioOutputConfiguration
{
/// <summary>
/// The target sample rate of the system.
/// </summary>
public uint SampleRate;
/// <summary>
/// The target channel count of the system.
/// </summary>
public uint ChannelCount;
/// <summary>
/// Reserved/unused
/// </summary>
public SampleFormat SampleFormat;
/// <summary>
/// Reserved/unused.
/// </summary>
private Array3<byte> _padding;
/// <summary>
/// The initial audio system state.
/// </summary>
public AudioDeviceState AudioOutState;
}
}

View File

@ -0,0 +1,53 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Audio user buffer.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AudioUserBuffer
{
/// <summary>
/// Pointer to the next buffer (ignored).
/// </summary>
public ulong NextBuffer;
/// <summary>
/// Pointer to the user samples.
/// </summary>
public ulong Data;
/// <summary>
/// Capacity of the buffer (unused).
/// </summary>
public ulong Capacity;
/// <summary>
/// Size of the user samples region.
/// </summary>
public ulong DataSize;
/// <summary>
/// Offset in the user samples region (unused).
/// </summary>
public ulong DataOffset;
}
}

View File

@ -0,0 +1,60 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Common
{
/// <summary>
/// Sample format definition.
/// </summary>
public enum SampleFormat : byte
{
/// <summary>
/// Invalid sample format.
/// </summary>
Invalid = 0,
/// <summary>
/// PCM8 sample format. (unsupported)
/// </summary>
PcmInt8 = 1,
/// <summary>
/// PCM16 sample format.
/// </summary>
PcmInt16 = 2,
/// <summary>
/// PCM24 sample format. (unsupported)
/// </summary>
PcmInt24 = 3,
/// <summary>
/// PCM32 sample format.
/// </summary>
PcmInt32 = 4,
/// <summary>
/// PCM Float sample format.
/// </summary>
PcmFloat = 5,
/// <summary>
/// ADPCM sample format. (Also known as GC-ADPCM)
/// </summary>
Adpcm = 6
}
}

192
Ryujinx.Audio/Constants.cs Normal file
View File

@ -0,0 +1,192 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio
{
/// <summary>
/// Define constants used by the audio system.
/// </summary>
public static class Constants
{
/// <summary>
/// The default device output name.
/// </summary>
public const string DefaultDeviceOutputName = "DeviceOut";
/// <summary>
/// The default device input name.
/// </summary>
public const string DefaultDeviceInputName = "BuiltInHeadset";
/// <summary>
/// The maximum number of channels supported. (6 channels for 5.1 surround)
/// </summary>
public const int ChannelCountMax = 6;
/// <summary>
/// The maximum number of channels supported per voice.
/// </summary>
public const int VoiceChannelCountMax = ChannelCountMax;
/// <summary>
/// The maximum count of mix buffer supported per operations (volumes, mix effect, ...)
/// </summary>
public const int MixBufferCountMax = 24;
/// <summary>
/// The maximum count of wavebuffer per voice.
/// </summary>
public const int VoiceWaveBufferCount = 4;
/// <summary>
/// The maximum count of biquad filter per voice.
/// </summary>
public const int VoiceBiquadFilterCount = 2;
/// <summary>
/// The lowest priority that a voice can have.
/// </summary>
public const int VoiceLowestPriority = 0xFF;
/// <summary>
/// The highest priority that a voice can have.
/// </summary>
/// <remarks>Voices with the highest priority will not be dropped if a voice drop needs to occur.</remarks>
public const int VoiceHighestPriority = 0;
/// <summary>
/// Maximum <see cref="Common.BehaviourParameter.ErrorInfo"/> that can be returned by <see cref="Parameter.BehaviourErrorInfoOutStatus"/>.
/// </summary>
public const int MaxErrorInfos = 10;
/// <summary>
/// Default alignment for buffers.
/// </summary>
public const int BufferAlignment = 0x40;
/// <summary>
/// Alignment required for the work buffer.
/// </summary>
public const int WorkBufferAlignment = 0x1000;
/// <summary>
/// Alignment required for every performance metrics frame.
/// </summary>
public const int PerformanceMetricsPerFramesSizeAlignment = 0x100;
/// <summary>
/// The id of the final mix.
/// </summary>
public const int FinalMixId = 0;
/// <summary>
/// The id defining an unused mix id.
/// </summary>
public const int UnusedMixId = int.MaxValue;
/// <summary>
/// The id defining an unused splitter id as a signed integer.
/// </summary>
public const int UnusedSplitterIdInt = -1;
/// <summary>
/// The id defining an unused splitter id.
/// </summary>
public const uint UnusedSplitterId = uint.MaxValue;
/// <summary>
/// The id of invalid/unused node id.
/// </summary>
public const int InvalidNodeId = -268435456;
/// <summary>
/// The indice considered invalid for processing order.
/// </summary>
public const int InvalidProcessingOrder = -1;
/// <summary>
/// The maximum number of audio renderer sessions allowed to be created system wide.
/// </summary>
public const int AudioRendererSessionCountMax = 2;
/// <summary>
/// The maximum number of audio output sessions allowed to be created system wide.
/// </summary>
public const int AudioOutSessionCountMax = 12;
/// <summary>
/// The maximum number of audio input sessions allowed to be created system wide.
/// </summary>
public const int AudioInSessionCountMax = 4;
/// <summary>
/// Maximum buffers supported by one audio device session.
/// </summary>
public const int AudioDeviceBufferCountMax = 32;
/// <summary>
/// The target sample rate of the audio renderer. (48kHz)
/// </summary>
public const uint TargetSampleRate = 48000;
/// <summary>
/// The target sample size of the audio renderer. (PCM16)
/// </summary>
public const int TargetSampleSize = sizeof(ushort);
/// <summary>
/// The target sample count per audio renderer update.
/// </summary>
public const int TargetSampleCount = 240;
/// <summary>
/// The size of an upsampler entry to process upsampling to <see cref="TargetSampleRate"/>.
/// </summary>
public const int UpSampleEntrySize = TargetSampleCount * VoiceChannelCountMax;
/// <summary>
/// The target audio latency computed from <see cref="TargetSampleRate"/> and <see cref="TargetSampleCount"/>.
/// </summary>
public const int AudioProcessorMaxUpdateTimeTarget = 1000000000 / ((int)TargetSampleRate / TargetSampleCount); // 5.00 ms
/// <summary>
/// The maximum update time of the DSP on original hardware.
/// </summary>
public const int AudioProcessorMaxUpdateTime = 5760000; // 5.76 ms
/// <summary>
/// The maximum update time per audio renderer session.
/// </summary>
public const int AudioProcessorMaxUpdateTimePerSessions = AudioProcessorMaxUpdateTime / AudioRendererSessionCountMax;
/// <summary>
/// Guest timer frequency used for system ticks.
/// </summary>
public const int TargetTimerFrequency = 19200000;
/// <summary>
/// The default coefficients used for standard 5.1 surround to stereo downmixing.
/// </summary>
public static float[] DefaultSurroundToStereoCoefficients = new float[4]
{
1.0f,
0.707f,
0.251f,
0.707f,
};
}
}

View File

@ -1,56 +0,0 @@
using System;
namespace Ryujinx.Audio
{
public interface IAalOutput : IDisposable
{
bool SupportsChannelCount(int channels);
private int SelectHardwareChannelCount(int targetChannelCount)
{
if (SupportsChannelCount(targetChannelCount))
{
return targetChannelCount;
}
return targetChannelCount switch
{
6 => SelectHardwareChannelCount(2),
2 => SelectHardwareChannelCount(1),
1 => throw new ArgumentException("No valid channel configuration found!"),
_ => throw new ArgumentException($"Invalid targetChannelCount {targetChannelCount}"),
};
}
int OpenTrack(int sampleRate, int channels, ReleaseCallback callback)
{
return OpenHardwareTrack(sampleRate, SelectHardwareChannelCount(channels), channels, callback);
}
int OpenHardwareTrack(int sampleRate, int hardwareChannels, int virtualChannels, ReleaseCallback callback);
void CloseTrack(int trackId);
bool ContainsBuffer(int trackId, long bufferTag);
long[] GetReleasedBuffers(int trackId, int maxCount);
void AppendBuffer<T>(int trackId, long bufferTag, T[] buffer) where T : struct;
void Start(int trackId);
void Stop(int trackId);
uint GetBufferCount(int trackId);
ulong GetPlayedSampleCount(int trackId);
bool FlushBuffers(int trackId);
float GetVolume(int trackId);
void SetVolume(int trackId, float volume);
PlaybackState GetState(int trackId);
}
}

View File

@ -0,0 +1,262 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Input
{
/// <summary>
/// The audio input manager.
/// </summary>
public class AudioInputManager : IDisposable
{
private object _lock = new object();
/// <summary>
/// Lock used for session allocation.
/// </summary>
private object _sessionLock = new object();
/// <summary>
/// The session ids allocation table.
/// </summary>
private int[] _sessionIds;
/// <summary>
/// The device driver.
/// </summary>
private IHardwareDeviceDriver _deviceDriver;
/// <summary>
/// The events linked to each session.
/// </summary>
private IWritableEvent[] _sessionsBufferEvents;
/// <summary>
/// The <see cref="AudioInputSystem"/> session instances.
/// </summary>
private AudioInputSystem[] _sessions;
/// <summary>
/// The count of active sessions.
/// </summary>
private int _activeSessionCount;
/// <summary>
/// Create a new <see cref="AudioInputManager"/>.
/// </summary>
public AudioInputManager()
{
_sessionIds = new int[Constants.AudioInSessionCountMax];
_sessions = new AudioInputSystem[Constants.AudioInSessionCountMax];
_activeSessionCount = 0;
for (int i = 0; i < _sessionIds.Length; i++)
{
_sessionIds[i] = i;
}
}
/// <summary>
/// Initialize the <see cref="AudioInputManager"/>.
/// </summary>
/// <param name="deviceDriver">The device driver.</param>
/// <param name="sessionRegisterEvents">The events associated to each session.</param>
public void Initialize(IHardwareDeviceDriver deviceDriver, IWritableEvent[] sessionRegisterEvents)
{
_deviceDriver = deviceDriver;
_sessionsBufferEvents = sessionRegisterEvents;
}
/// <summary>
/// Acquire a new session id.
/// </summary>
/// <returns>A new session id.</returns>
private int AcquireSessionId()
{
lock (_sessionLock)
{
int index = _activeSessionCount;
Debug.Assert(index < _sessionIds.Length);
int sessionId = _sessionIds[index];
_sessionIds[index] = -1;
_activeSessionCount++;
Logger.Info?.Print(LogClass.AudioRenderer, $"Registered new input ({sessionId})");
return sessionId;
}
}
/// <summary>
/// Release a given <paramref name="sessionId"/>.
/// </summary>
/// <param name="sessionId">The session id to release.</param>
private void ReleaseSessionId(int sessionId)
{
lock (_sessionLock)
{
Debug.Assert(_activeSessionCount > 0);
int newIndex = --_activeSessionCount;
_sessionIds[newIndex] = sessionId;
}
Logger.Info?.Print(LogClass.AudioRenderer, $"Unregistered input ({sessionId})");
}
/// <summary>
/// Used to update audio input system.
/// </summary>
public void Update()
{
lock (_sessionLock)
{
foreach (AudioInputSystem input in _sessions)
{
input?.Update();
}
}
}
/// <summary>
/// Register a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="input">The <see cref="AudioInputSystem"/> to register.</param>
private void Register(AudioInputSystem input)
{
lock (_sessionLock)
{
_sessions[input.GetSessionId()] = input;
}
}
/// <summary>
/// Unregister a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="input">The <see cref="AudioInputSystem"/> to unregister.</param>
internal void Unregister(AudioInputSystem input)
{
lock (_sessionLock)
{
int sessionId = input.GetSessionId();
_sessions[input.GetSessionId()] = null;
ReleaseSessionId(sessionId);
}
}
/// <summary>
/// Get the list of all audio inputs names.
/// </summary>
/// <param name="filtered">If true, filter disconnected devices</param>
/// <returns>The list of all audio inputs name</returns>
public string[] ListAudioIns(bool filtered)
{
if (filtered)
{
// TODO: Detect if the driver supports audio input
}
return new string[] { Constants.DefaultDeviceInputName };
}
/// <summary>
/// Open a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="outputDeviceName">The output device name selected by the <see cref="AudioInputSystem"/></param>
/// <param name="outputConfiguration">The output audio configuration selected by the <see cref="AudioInputSystem"/></param>
/// <param name="obj">The new <see cref="AudioInputSystem"/></param>
/// <param name="memoryManager">The memory manager that will be used for all guest memory operations</param>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="appletResourceUserId">The applet resource user id of the application</param>
/// <param name="processHandle">The process handle of the application</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode OpenAudioIn(out string outputDeviceName,
out AudioOutputConfiguration outputConfiguration,
out AudioInputSystem obj,
IVirtualMemoryManager memoryManager,
string inputDeviceName,
SampleFormat sampleFormat,
ref AudioInputConfiguration parameter,
ulong appletResourceUserId,
uint processHandle)
{
int sessionId = AcquireSessionId();
_sessionsBufferEvents[sessionId].Clear();
IHardwareDeviceSession deviceSession = _deviceDriver.OpenDeviceSession(IHardwareDeviceDriver.Direction.Input, memoryManager, sampleFormat, parameter.SampleRate, parameter.ChannelCount);
AudioInputSystem audioIn = new AudioInputSystem(this, _lock, deviceSession, _sessionsBufferEvents[sessionId]);
ResultCode result = audioIn.Initialize(inputDeviceName, sampleFormat, ref parameter, sessionId);
if (result == ResultCode.Success)
{
outputDeviceName = audioIn.DeviceName;
outputConfiguration = new AudioOutputConfiguration
{
ChannelCount = audioIn.ChannelCount,
SampleFormat = audioIn.SampleFormat,
SampleRate = audioIn.SampleRate,
AudioOutState = audioIn.GetState(),
};
obj = audioIn;
Register(audioIn);
}
else
{
ReleaseSessionId(sessionId);
obj = null;
outputDeviceName = null;
outputConfiguration = default;
}
return result;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// Nothing to do here.
}
}
}
}

View File

@ -0,0 +1,400 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using System;
namespace Ryujinx.Audio.Input
{
/// <summary>
/// Audio input system.
/// </summary>
public class AudioInputSystem : IDisposable
{
/// <summary>
/// The session id associated to the <see cref="AudioInputSystem"/>.
/// </summary>
private int _sessionId;
/// <summary>
/// The session the <see cref="AudioInputSystem"/>.
/// </summary>
private AudioDeviceSession _session;
/// <summary>
/// The target device name of the <see cref="AudioInputSystem"/>.
/// </summary>
public string DeviceName { get; private set; }
/// <summary>
/// The target sample rate of the <see cref="AudioInputSystem"/>.
/// </summary>
public uint SampleRate { get; private set; }
/// <summary>
/// The target channel count of the <see cref="AudioInputSystem"/>.
/// </summary>
public uint ChannelCount { get; private set; }
/// <summary>
/// The target sample format of the <see cref="AudioInputSystem"/>.
/// </summary>
public SampleFormat SampleFormat { get; private set; }
/// <summary>
/// The <see cref="AudioInputManager"/> owning this.
/// </summary>
private AudioInputManager _manager;
/// <summary>
/// THe lock of the parent.
/// </summary>
private object _parentLock;
/// <summary>
/// Create a new <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="manager">The manager instance</param>
/// <param name="parentLock">The lock of the manager</param>
/// <param name="deviceSession">The hardware device session</param>
/// <param name="bufferEvent">The buffer release event of the audio input</param>
public AudioInputSystem(AudioInputManager manager, object parentLock, IHardwareDeviceSession deviceSession, IWritableEvent bufferEvent)
{
_manager = manager;
_parentLock = parentLock;
_session = new AudioDeviceSession(deviceSession, bufferEvent);
}
/// <summary>
/// Get the default device name on the system.
/// </summary>
/// <returns>The default device name on the system.</returns>
private static string GetDeviceDefaultName()
{
return Constants.DefaultDeviceInputName;
}
/// <summary>
/// Check if a given configuration and device name is valid on the system.
/// </summary>
/// <param name="configuration">The configuration to check.</param>
/// <param name="deviceName">The device name to check.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
private static ResultCode IsConfigurationValid(ref AudioInputConfiguration configuration, string deviceName)
{
if (deviceName.Length != 0 && !deviceName.Equals(GetDeviceDefaultName()))
{
return ResultCode.DeviceNotFound;
}
else if (configuration.SampleRate != 0 && configuration.SampleRate != Constants.TargetSampleRate)
{
return ResultCode.UnsupportedSampleRate;
}
else if (configuration.ChannelCount != 0 && configuration.ChannelCount != 1 && configuration.ChannelCount != 2 && configuration.ChannelCount != 6)
{
return ResultCode.UnsupportedChannelConfiguration;
}
return ResultCode.Success;
}
/// <summary>
/// Get the released buffer event.
/// </summary>
/// <returns>The released buffer event</returns>
public IWritableEvent RegisterBufferEvent()
{
lock (_parentLock)
{
return _session.GetBufferEvent();
}
}
/// <summary>
/// Update the <see cref="AudioInputSystem"/>.
/// </summary>
public void Update()
{
lock (_parentLock)
{
_session.Update();
}
}
/// <summary>
/// Get the id of this session.
/// </summary>
/// <returns>The id of this session</returns>
public int GetSessionId()
{
return _sessionId;
}
/// <summary>
/// Initialize the <see cref="AudioInputSystem"/>.
/// </summary>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="sessionId">The session id associated to this <see cref="AudioInputSystem"/></param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode Initialize(string inputDeviceName, SampleFormat sampleFormat, ref AudioInputConfiguration parameter, int sessionId)
{
_sessionId = sessionId;
ResultCode result = IsConfigurationValid(ref parameter, inputDeviceName);
if (result == ResultCode.Success)
{
if (inputDeviceName.Length == 0)
{
DeviceName = GetDeviceDefaultName();
}
else
{
DeviceName = inputDeviceName;
}
if (parameter.ChannelCount == 6)
{
ChannelCount = 6;
}
else
{
ChannelCount = 2;
}
SampleFormat = sampleFormat;
SampleRate = Constants.TargetSampleRate;
}
return result;
}
/// <summary>
/// Append a new audio buffer to the audio input.
/// </summary>
/// <param name="bufferTag">The unique tag of this buffer.</param>
/// <param name="userBuffer">The buffer informations.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode AppendBuffer(ulong bufferTag, ref AudioUserBuffer userBuffer)
{
lock (_parentLock)
{
AudioBuffer buffer = new AudioBuffer
{
BufferTag = bufferTag,
DataPointer = userBuffer.Data,
DataSize = userBuffer.DataSize
};
if (_session.AppendBuffer(buffer))
{
return ResultCode.Success;
}
return ResultCode.BufferRingFull;
}
}
/// <summary>
/// Append a new audio buffer to the audio input.
/// </summary>
/// <remarks>This is broken by design, only added for completness.</remarks>
/// <param name="bufferTag">The unique tag of this buffer.</param>
/// <param name="userBuffer">The buffer informations.</param>
/// <param name="handle">Some unknown handle.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode AppendUacBuffer(ulong bufferTag, ref AudioUserBuffer userBuffer, uint handle)
{
lock (_parentLock)
{
AudioBuffer buffer = new AudioBuffer
{
BufferTag = bufferTag,
DataPointer = userBuffer.Data,
DataSize = userBuffer.DataSize
};
if (_session.AppendUacBuffer(buffer, handle))
{
return ResultCode.Success;
}
return ResultCode.BufferRingFull;
}
}
/// <summary>
/// Get the release buffers.
/// </summary>
/// <param name="releasedBuffers">The buffer to write the release buffers</param>
/// <param name="releasedCount">The count of released buffers</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode GetReleasedBuffers(Span<ulong> releasedBuffers, out uint releasedCount)
{
releasedCount = 0;
// Ensure that the first entry is set to zero if no entries are returned.
if (releasedBuffers.Length > 0)
{
releasedBuffers[0] = 0;
}
lock (_parentLock)
{
for (int i = 0; i < releasedBuffers.Length; i++)
{
if (!_session.TryPopReleasedBuffer(out AudioBuffer buffer))
{
break;
}
releasedBuffers[i] = buffer.BufferTag;
releasedCount++;
}
}
return ResultCode.Success;
}
/// <summary>
/// Get the current state of the <see cref="AudioInputSystem"/>.
/// </summary>
/// <returns>Return the curent sta\te of the <see cref="AudioInputSystem"/></returns>
public AudioDeviceState GetState()
{
lock (_parentLock)
{
return _session.GetState();
}
}
/// <summary>
/// Start the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Start()
{
lock (_parentLock)
{
return _session.Start();
}
}
/// <summary>
/// Stop the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Stop()
{
lock (_parentLock)
{
return _session.Stop();
}
}
/// <summary>
/// Get the volume of the session.
/// </summary>
/// <returns>The volume of the session</returns>
public float GetVolume()
{
lock (_parentLock)
{
return _session.GetVolume();
}
}
/// <summary>
/// Set the volume of the session.
/// </summary>
/// <param name="volume">The new volume to set</param>
public void SetVolume(float volume)
{
lock (_parentLock)
{
_session.SetVolume(volume);
}
}
/// <summary>
/// Get the count of buffer currently in use (server + driver side).
/// </summary>
/// <returns>The count of buffer currently in use</returns>
public uint GetBufferCount()
{
lock (_parentLock)
{
return _session.GetBufferCount();
}
}
/// <summary>
/// Check if a buffer is present.
/// </summary>
/// <param name="bufferTag">The unique tag of the buffer</param>
/// <returns>Return true if a buffer is present</returns>
public bool ContainsBuffer(ulong bufferTag)
{
lock (_parentLock)
{
return _session.ContainsBuffer(bufferTag);
}
}
/// <summary>
/// Get the count of sample played in this session.
/// </summary>
/// <returns>The count of sample played in this session</returns>
public ulong GetPlayedSampleCount()
{
lock (_parentLock)
{
return _session.GetPlayedSampleCount();
}
}
/// <summary>
/// Flush all buffers to the initial state.
/// </summary>
/// <returns>True if any buffers was flushed</returns>
public bool FlushBuffers()
{
lock (_parentLock)
{
return _session.FlushBuffers();
}
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_session.Dispose();
_manager.Unregister(this);
}
}
}
}

View File

@ -0,0 +1,82 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Integration
{
public class HardwareDeviceImpl : IHardwareDevice
{
private IHardwareDeviceSession _session;
private uint _channelCount;
private uint _sampleRate;
private uint _currentBufferTag;
private byte[] _buffer;
public HardwareDeviceImpl(IHardwareDeviceDriver deviceDriver, uint channelCount, uint sampleRate)
{
_session = deviceDriver.OpenDeviceSession(IHardwareDeviceDriver.Direction.Output, null, SampleFormat.PcmInt16, sampleRate, channelCount);
_channelCount = channelCount;
_sampleRate = sampleRate;
_currentBufferTag = 0;
_buffer = new byte[Constants.TargetSampleCount * channelCount * sizeof(ushort)];
_session.Start();
}
public void AppendBuffer(ReadOnlySpan<short> data, uint channelCount)
{
data.CopyTo(MemoryMarshal.Cast<byte, short>(_buffer));
_session.QueueBuffer(new AudioBuffer
{
DataPointer = _currentBufferTag++,
Data = _buffer,
DataSize = (ulong)_buffer.Length,
});
_currentBufferTag = _currentBufferTag % 4;
}
public uint GetChannelCount()
{
return _channelCount;
}
public uint GetSampleRate()
{
return _sampleRate;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_session.Dispose();
}
}
}
}

View File

@ -0,0 +1,60 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Integration
{
/// <summary>
/// Represent an hardware device used in <see cref="Renderer.Dsp.Command.DeviceSinkCommand"/>
/// </summary>
public interface IHardwareDevice : IDisposable
{
/// <summary>
/// Get the supported sample rate of this device.
/// </summary>
/// <returns>The supported sample rate of this device.</returns>
uint GetSampleRate();
/// <summary>
/// Get the channel count supported by this device.
/// </summary>
/// <returns>The channel count supported by this device.</returns>
uint GetChannelCount();
/// <summary>
/// Appends new PCM16 samples to the device.
/// </summary>
/// <param name="data">The new PCM16 samples.</param>
/// <param name="channelCount">The number of channels.</param>
void AppendBuffer(ReadOnlySpan<short> data, uint channelCount);
/// <summary>
/// Check if the audio renderer needs to perform downmixing.
/// </summary>
/// <returns>True if downmixing is needed.</returns>
public bool NeedDownmixing()
{
uint channelCount = GetChannelCount();
Debug.Assert(channelCount > 0 && channelCount <= Constants.ChannelCountMax);
return channelCount != Constants.ChannelCountMax;
}
}
}

View File

@ -0,0 +1,50 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Memory;
using System;
using System.Threading;
namespace Ryujinx.Audio.Integration
{
/// <summary>
/// Represent an hardware device driver used in <see cref="Output.AudioOutputSystem"/>.
/// </summary>
public interface IHardwareDeviceDriver : IDisposable
{
public enum Direction
{
Input,
Output
}
IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount);
ManualResetEvent GetUpdateRequiredEvent();
bool SupportsDirection(Direction direction);
bool SupportsSampleRate(uint sampleRate);
bool SupportsSampleFormat(SampleFormat sampleFormat);
bool SupportsChannelCount(uint channelCount);
IHardwareDeviceDriver GetRealDeviceDriver()
{
return this;
}
}
}

View File

@ -0,0 +1,45 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using System;
namespace Ryujinx.Audio.Integration
{
public interface IHardwareDeviceSession : IDisposable
{
bool RegisterBuffer(AudioBuffer buffer);
void UnregisterBuffer(AudioBuffer buffer);
void QueueBuffer(AudioBuffer buffer);
bool WasBufferFullyConsumed(AudioBuffer buffer);
void SetVolume(float volume);
float GetVolume();
ulong GetPlayedSampleCount();
void Start();
void Stop();
void PrepareToClose();
}
}

View File

@ -0,0 +1,35 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Integration
{
/// <summary>
/// Represent a writable event with manual clear.
/// </summary>
public interface IWritableEvent
{
/// <summary>
/// Signal the event.
/// </summary>
void Signal();
/// <summary>
/// Clear the signaled state of the event.
/// </summary>
void Clear();
}
}

165
Ryujinx.Audio/LICENSE.txt Normal file
View File

@ -0,0 +1,165 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.

View File

@ -1,38 +0,0 @@
using System;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public static class MarshalEx
{
public static double ReadDouble(IntPtr handle, int offset = 0)
{
return BitConverter.Int64BitsToDouble(Marshal.ReadInt64(handle, offset));
}
public static void WriteDouble(IntPtr handle, double value)
{
WriteDouble(handle, 0, value);
}
public static void WriteDouble(IntPtr handle, int offset, double value)
{
Marshal.WriteInt64(handle, offset, BitConverter.DoubleToInt64Bits(value));
}
public static float ReadFloat(IntPtr handle, int offset = 0)
{
return BitConverter.Int32BitsToSingle(Marshal.ReadInt32(handle, offset));
}
public static void WriteFloat(IntPtr handle, float value)
{
WriteFloat(handle, 0, value);
}
public static void WriteFloat(IntPtr handle, int offset, float value)
{
Marshal.WriteInt32(handle, offset, BitConverter.SingleToInt32Bits(value));
}
}
}

View File

@ -1,386 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public class SoundIO : IDisposable
{
Pointer<SoundIo> handle;
public SoundIO()
{
handle = Natives.soundio_create();
}
internal SoundIO(Pointer<SoundIo> handle)
{
this.handle = handle;
}
public void Dispose ()
{
foreach (var h in allocated_hglobals)
{
Marshal.FreeHGlobal(h);
}
Natives.soundio_destroy(handle);
}
// Equality (based on handle)
public override bool Equals(object other)
{
var d = other as SoundIO;
return d != null && this.handle == d.handle;
}
public override int GetHashCode()
{
return (int)(IntPtr)handle;
}
public static bool operator == (SoundIO obj1, SoundIO obj2)
{
return obj1 is null ? obj2 is null : obj1.Equals(obj2);
}
public static bool operator != (SoundIO obj1, SoundIO obj2)
{
return obj1 is null ? obj2 is object : !obj1.Equals(obj2);
}
// fields
// FIXME: this should be taken care in more centralized/decent manner... we don't want to write
// this kind of code anywhere we need string marshaling.
List<IntPtr> allocated_hglobals = new List<IntPtr>();
public string ApplicationName {
get { return Marshal.PtrToStringAnsi(Marshal.ReadIntPtr(handle, app_name_offset)); }
set
{
unsafe
{
var existing = Marshal.ReadIntPtr(handle, app_name_offset);
if (allocated_hglobals.Contains (existing))
{
allocated_hglobals.Remove(existing);
Marshal.FreeHGlobal(existing);
}
var ptr = Marshal.StringToHGlobalAnsi(value);
Marshal.WriteIntPtr(handle, app_name_offset, ptr);
allocated_hglobals.Add(ptr);
}
}
}
static readonly int app_name_offset = (int)Marshal.OffsetOf<SoundIo>("app_name");
public SoundIOBackend CurrentBackend
{
get { return (SoundIOBackend)Marshal.ReadInt32(handle, current_backend_offset); }
}
static readonly int current_backend_offset = (int)Marshal.OffsetOf<SoundIo>("current_backend");
// emit_rtprio_warning
public Action EmitRealtimePriorityWarning
{
get { return emit_rtprio_warning; }
set
{
emit_rtprio_warning = value;
var ptr = Marshal.GetFunctionPointerForDelegate(on_devices_change);
Marshal.WriteIntPtr(handle, emit_rtprio_warning_offset, ptr);
}
}
static readonly int emit_rtprio_warning_offset = (int)Marshal.OffsetOf<SoundIo>("emit_rtprio_warning");
Action emit_rtprio_warning;
// jack_error_callback
public Action<string> JackErrorCallback
{
get { return jack_error_callback; }
set
{
jack_error_callback = value;
if (value == null)
{
jack_error_callback = null;
}
else
{
jack_error_callback_native = msg => jack_error_callback(msg);
}
var ptr = Marshal.GetFunctionPointerForDelegate(jack_error_callback_native);
Marshal.WriteIntPtr(handle, jack_error_callback_offset, ptr);
}
}
static readonly int jack_error_callback_offset = (int)Marshal.OffsetOf<SoundIo>("jack_error_callback");
Action<string> jack_error_callback;
delegate void jack_error_delegate(string message);
jack_error_delegate jack_error_callback_native;
// jack_info_callback
public Action<string> JackInfoCallback
{
get { return jack_info_callback; }
set
{
jack_info_callback = value;
if (value == null)
{
jack_info_callback = null;
}
else
{
jack_info_callback_native = msg => jack_info_callback(msg);
}
var ptr = Marshal.GetFunctionPointerForDelegate(jack_info_callback_native);
Marshal.WriteIntPtr(handle, jack_info_callback_offset, ptr);
}
}
static readonly int jack_info_callback_offset = (int)Marshal.OffsetOf<SoundIo>("jack_info_callback");
Action<string> jack_info_callback;
delegate void jack_info_delegate(string message);
jack_info_delegate jack_info_callback_native;
// on_backend_disconnect
public Action<int> OnBackendDisconnect
{
get { return on_backend_disconnect; }
set
{
on_backend_disconnect = value;
if (value == null)
{
on_backend_disconnect_native = null;
}
else
{
on_backend_disconnect_native = (sio, err) => on_backend_disconnect(err);
}
var ptr = Marshal.GetFunctionPointerForDelegate(on_backend_disconnect_native);
Marshal.WriteIntPtr(handle, on_backend_disconnect_offset, ptr);
}
}
static readonly int on_backend_disconnect_offset = (int)Marshal.OffsetOf<SoundIo>("on_backend_disconnect");
Action<int> on_backend_disconnect;
delegate void on_backend_disconnect_delegate(IntPtr handle, int errorCode);
on_backend_disconnect_delegate on_backend_disconnect_native;
// on_devices_change
public Action OnDevicesChange
{
get { return on_devices_change; }
set
{
on_devices_change = value;
if (value == null)
{
on_devices_change_native = null;
}
else
{
on_devices_change_native = sio => on_devices_change();
}
var ptr = Marshal.GetFunctionPointerForDelegate(on_devices_change_native);
Marshal.WriteIntPtr(handle, on_devices_change_offset, ptr);
}
}
static readonly int on_devices_change_offset = (int)Marshal.OffsetOf<SoundIo>("on_devices_change");
Action on_devices_change;
delegate void on_devices_change_delegate(IntPtr handle);
on_devices_change_delegate on_devices_change_native;
// on_events_signal
public Action OnEventsSignal
{
get { return on_events_signal; }
set
{
on_events_signal = value;
if (value == null)
{
on_events_signal_native = null;
}
else
{
on_events_signal_native = sio => on_events_signal();
}
var ptr = Marshal.GetFunctionPointerForDelegate(on_events_signal_native);
Marshal.WriteIntPtr(handle, on_events_signal_offset, ptr);
}
}
static readonly int on_events_signal_offset = (int)Marshal.OffsetOf<SoundIo>("on_events_signal");
Action on_events_signal;
delegate void on_events_signal_delegate(IntPtr handle);
on_events_signal_delegate on_events_signal_native;
// functions
public int BackendCount
{
get { return Natives.soundio_backend_count(handle); }
}
public int InputDeviceCount
{
get { return Natives.soundio_input_device_count(handle); }
}
public int OutputDeviceCount
{
get { return Natives.soundio_output_device_count(handle); }
}
public int DefaultInputDeviceIndex
{
get { return Natives.soundio_default_input_device_index(handle); }
}
public int DefaultOutputDeviceIndex
{
get { return Natives.soundio_default_output_device_index(handle); }
}
public SoundIOBackend GetBackend(int index)
{
return (SoundIOBackend)Natives.soundio_get_backend(handle, index);
}
public SoundIODevice GetInputDevice(int index)
{
return new SoundIODevice(Natives.soundio_get_input_device(handle, index));
}
public SoundIODevice GetOutputDevice(int index)
{
return new SoundIODevice(Natives.soundio_get_output_device(handle, index));
}
public void Connect()
{
var ret = (SoundIoError)Natives.soundio_connect(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public void ConnectBackend(SoundIOBackend backend)
{
var ret = (SoundIoError)Natives.soundio_connect_backend(handle, (SoundIoBackend)backend);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public void Disconnect()
{
Natives.soundio_disconnect(handle);
}
public void FlushEvents()
{
Natives.soundio_flush_events(handle);
}
public void WaitEvents()
{
Natives.soundio_wait_events(handle);
}
public void Wakeup()
{
Natives.soundio_wakeup(handle);
}
public void ForceDeviceScan()
{
Natives.soundio_force_device_scan(handle);
}
public SoundIORingBuffer CreateRingBuffer(int capacity)
{
return new SoundIORingBuffer(Natives.soundio_ring_buffer_create(handle, capacity));
}
// static methods
public static string VersionString
{
get { return Marshal.PtrToStringAnsi(Natives.soundio_version_string()); }
}
public static int VersionMajor
{
get { return Natives.soundio_version_major(); }
}
public static int VersionMinor
{
get { return Natives.soundio_version_minor(); }
}
public static int VersionPatch
{
get { return Natives.soundio_version_patch(); }
}
public static string GetBackendName(SoundIOBackend backend)
{
return Marshal.PtrToStringAnsi(Natives.soundio_backend_name((SoundIoBackend)backend));
}
public static bool HaveBackend(SoundIOBackend backend)
{
return Natives.soundio_have_backend((SoundIoBackend)backend);
}
public static int GetBytesPerSample(SoundIOFormat format)
{
return Natives.soundio_get_bytes_per_sample((SoundIoFormat)format);
}
public static int GetBytesPerFrame(SoundIOFormat format, int channelCount)
{
return Natives.soundio_get_bytes_per_frame((SoundIoFormat)format, channelCount);
}
public static int GetBytesPerSecond(SoundIOFormat format, int channelCount, int sampleRate)
{
return Natives.soundio_get_bytes_per_second((SoundIoFormat)format, channelCount, sampleRate);
}
public static string GetSoundFormatName(SoundIOFormat format)
{
return Marshal.PtrToStringAnsi(Natives.soundio_format_string((SoundIoFormat)format));
}
}
}

View File

@ -1,13 +0,0 @@
namespace SoundIOSharp
{
public enum SoundIOBackend
{
None,
Jack,
PulseAudio,
Alsa,
CoreAudio,
Wasapi,
Dummy
}
}

View File

@ -1,30 +0,0 @@
using System;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public struct SoundIOChannelArea
{
internal SoundIOChannelArea(Pointer<SoundIoChannelArea> handle)
{
this.handle = handle;
}
Pointer<SoundIoChannelArea> handle;
public IntPtr Pointer
{
get { return Marshal.ReadIntPtr(handle, ptr_offset); }
set { Marshal.WriteIntPtr(handle, ptr_offset, value); }
}
static readonly int ptr_offset = (int)Marshal.OffsetOf<SoundIoChannelArea>("ptr");
public int Step
{
get { return Marshal.ReadInt32(handle, step_offset); }
}
static readonly int step_offset = (int)Marshal.OffsetOf<SoundIoChannelArea>("step");
}
}

View File

@ -1,34 +0,0 @@
using System;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public struct SoundIOChannelAreas
{
static readonly int native_size = Marshal.SizeOf<SoundIoChannelArea>();
internal SoundIOChannelAreas(IntPtr head, int channelCount, int frameCount)
{
this.head = head;
this.channel_count = channelCount;
this.frame_count = frameCount;
}
IntPtr head;
int channel_count;
int frame_count;
public bool IsEmpty
{
get { return head == IntPtr.Zero; }
}
public SoundIOChannelArea GetArea(int channel)
{
return new SoundIOChannelArea(head + native_size * channel);
}
public int ChannelCount => channel_count;
public int FrameCount => frame_count;
}
}

View File

@ -1,75 +0,0 @@
namespace SoundIOSharp
{
public enum SoundIOChannelId
{
Invalid,
FrontLeft,
FrontRight,
FrontCenter,
Lfe,
BackLeft,
BackRight,
FrontLeftCenter,
FrontRightCenter,
BackCenter,
SideLeft,
SideRight,
TopCenter,
TopFrontLeft,
TopFrontCenter,
TopFrontRight,
TopBackLeft,
TopBackCenter,
TopBackRight,
BackLeftCenter,
BackRightCenter,
FrontLeftWide,
FrontRightWide,
FrontLeftHigh,
FrontCenterHigh,
FrontRightHigh,
TopFrontLeftCenter,
TopFrontRightCenter,
TopSideLeft,
TopSideRight,
LeftLfe,
RightLfe,
Lfe2,
BottomCenter,
BottomLeftCenter,
BottomRightCenter,
MsMid,
MsSide,
AmbisonicW,
AmbisonicX,
AmbisonicY,
AmbisonicZ,
XyX,
XyY,
HeadphonesLeft,
HeadphonesRight,
ClickTrack,
ForeignLanguage,
HearingImpaired,
Narration,
Haptic,
DialogCentricMix,
Aux,
Aux0,
Aux1,
Aux2,
Aux3,
Aux4,
Aux5,
Aux6,
Aux7,
Aux8,
Aux9,
Aux10,
Aux11,
Aux12,
Aux13,
Aux14,
Aux15
}
}

View File

@ -1,116 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public struct SoundIOChannelLayout
{
public static int BuiltInCount
{
get { return Natives.soundio_channel_layout_builtin_count(); }
}
public static SoundIOChannelLayout GetBuiltIn(int index)
{
return new SoundIOChannelLayout(Natives.soundio_channel_layout_get_builtin(index));
}
public static SoundIOChannelLayout GetDefault(int channelCount)
{
var handle = Natives.soundio_channel_layout_get_default(channelCount);
return new SoundIOChannelLayout (handle);
}
public static SoundIOChannelId ParseChannelId(string name)
{
var ptr = Marshal.StringToHGlobalAnsi(name);
try
{
return (SoundIOChannelId)Natives.soundio_parse_channel_id(ptr, name.Length);
}
finally
{
Marshal.FreeHGlobal(ptr);
}
}
// instance members
internal SoundIOChannelLayout(Pointer<SoundIoChannelLayout> handle)
{
this.handle = handle;
}
readonly Pointer<SoundIoChannelLayout> handle;
public bool IsNull
{
get { return handle.Handle == IntPtr.Zero; }
}
internal IntPtr Handle
{
get { return handle; }
}
public int ChannelCount
{
get { return IsNull ? 0 : Marshal.ReadInt32((IntPtr)handle + channel_count_offset); }
}
static readonly int channel_count_offset = (int)Marshal.OffsetOf<SoundIoChannelLayout>("channel_count");
public string Name
{
get { return IsNull ? null : Marshal.PtrToStringAnsi(Marshal.ReadIntPtr((IntPtr)handle + name_offset)); }
}
static readonly int name_offset = (int)Marshal.OffsetOf<SoundIoChannelLayout>("name");
public IEnumerable<SoundIOChannelId> Channels
{
get
{
if (IsNull) yield break;
for (int i = 0; i < 24; i++)
{
yield return (SoundIOChannelId)Marshal.ReadInt32((IntPtr)handle + channels_offset + sizeof(SoundIoChannelId) * i);
}
}
}
static readonly int channels_offset = (int)Marshal.OffsetOf<SoundIoChannelLayout>("channels");
public override bool Equals(object other)
{
if (!(other is SoundIOChannelLayout)) return false;
var s = (SoundIOChannelLayout) other;
return handle == s.handle || Natives.soundio_channel_layout_equal(handle, s.handle);
}
public override int GetHashCode()
{
return handle.GetHashCode();
}
public string DetectBuiltInName()
{
if (IsNull) throw new InvalidOperationException();
return Natives.soundio_channel_layout_detect_builtin(handle) ? Name : null;
}
public int FindChannel(SoundIOChannelId channel)
{
if (IsNull) throw new InvalidOperationException();
return Natives.soundio_channel_layout_find_channel(handle, (SoundIoChannelId)channel);
}
}
}

View File

@ -1,267 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public class SoundIODevice
{
public static SoundIOChannelLayout BestMatchingChannelLayout(SoundIODevice device1, SoundIODevice device2)
{
var ptr1 = Marshal.ReadIntPtr(device1.handle, layouts_offset);
var ptr2 = Marshal.ReadIntPtr(device2.handle, layouts_offset);
return new SoundIOChannelLayout(Natives.soundio_best_matching_channel_layout(ptr1, device1.LayoutCount, ptr2, device2.LayoutCount));
}
internal SoundIODevice(Pointer<SoundIoDevice> handle)
{
this.handle = handle;
}
readonly Pointer<SoundIoDevice> handle;
// Equality (based on handle and native func)
public override bool Equals(object other)
{
var d = other as SoundIODevice;
return d != null && (this.handle == d.handle || Natives.soundio_device_equal (this.handle, d.handle));
}
public override int GetHashCode()
{
return (int)(IntPtr)handle;
}
public static bool operator == (SoundIODevice obj1, SoundIODevice obj2)
{
return obj1 is null ? obj2 is null : obj1.Equals(obj2);
}
public static bool operator != (SoundIODevice obj1, SoundIODevice obj2)
{
return obj1 is null ? obj2 is object : !obj1.Equals(obj2);
}
// fields
public SoundIODeviceAim Aim
{
get { return (SoundIODeviceAim)Marshal.ReadInt32(handle, aim_offset); }
}
static readonly int aim_offset = (int)Marshal.OffsetOf<SoundIoDevice>("aim");
public SoundIOFormat CurrentFormat
{
get { return (SoundIOFormat)Marshal.ReadInt32(handle, current_format_offset); }
}
static readonly int current_format_offset = (int)Marshal.OffsetOf<SoundIoDevice>("current_format");
public SoundIOChannelLayout CurrentLayout
{
get { return new SoundIOChannelLayout((IntPtr)handle + current_layout_offset); }
}
static readonly int current_layout_offset = (int)Marshal.OffsetOf<SoundIoDevice>("current_layout");
public int FormatCount
{
get { return Marshal.ReadInt32(handle, format_count_offset); }
}
static readonly int format_count_offset = (int)Marshal.OffsetOf<SoundIoDevice>("format_count");
public IEnumerable<SoundIOFormat> Formats
{
get
{
var ptr = Marshal.ReadIntPtr(handle, formats_offset);
for (int i = 0; i < FormatCount; i++)
{
yield return (SoundIOFormat)Marshal.ReadInt32(ptr, i);
}
}
}
static readonly int formats_offset = (int)Marshal.OffsetOf<SoundIoDevice>("formats");
public string Id
{
get { return Marshal.PtrToStringAnsi(Marshal.ReadIntPtr(handle, id_offset)); }
}
static readonly int id_offset = (int)Marshal.OffsetOf<SoundIoDevice>("id");
public bool IsRaw
{
get { return Marshal.ReadInt32(handle, is_raw_offset) != 0; }
}
static readonly int is_raw_offset = (int)Marshal.OffsetOf<SoundIoDevice>("is_raw");
public int LayoutCount
{
get { return Marshal.ReadInt32(handle, layout_count_offset); }
}
static readonly int layout_count_offset = (int)Marshal.OffsetOf<SoundIoDevice>("layout_count");
public IEnumerable<SoundIOChannelLayout> Layouts
{
get
{
var ptr = Marshal.ReadIntPtr (handle, layouts_offset);
for (int i = 0; i < LayoutCount; i++)
{
yield return new SoundIOChannelLayout(ptr + i * Marshal.SizeOf<SoundIoChannelLayout>());
}
}
}
static readonly int layouts_offset = (int)Marshal.OffsetOf<SoundIoDevice>("layouts");
public string Name
{
get { return Marshal.PtrToStringAnsi(Marshal.ReadIntPtr(handle, name_offset)); }
}
static readonly int name_offset = (int)Marshal.OffsetOf<SoundIoDevice>("name");
public int ProbeError
{
get { return Marshal.ReadInt32(handle, probe_error_offset); }
}
static readonly int probe_error_offset = (int)Marshal.OffsetOf<SoundIoDevice>("probe_error");
public int ReferenceCount
{
get { return Marshal.ReadInt32(handle, ref_count_offset); }
}
static readonly int ref_count_offset = (int)Marshal.OffsetOf<SoundIoDevice>("ref_count");
public int SampleRateCount
{
get { return Marshal.ReadInt32(handle, sample_rate_count_offset); }
}
static readonly int sample_rate_count_offset = (int)Marshal.OffsetOf<SoundIoDevice>("sample_rate_count");
public IEnumerable<SoundIOSampleRateRange> SampleRates
{
get
{
var ptr = Marshal.ReadIntPtr(handle, sample_rates_offset);
for (int i = 0; i < SampleRateCount; i++)
{
yield return new SoundIOSampleRateRange(Marshal.ReadInt32(ptr, i * 2), Marshal.ReadInt32(ptr, i * 2 + 1));
}
}
}
static readonly int sample_rates_offset = (int)Marshal.OffsetOf<SoundIoDevice>("sample_rates");
public double SoftwareLatencyCurrent
{
get { return MarshalEx.ReadDouble(handle, software_latency_current_offset); }
set { MarshalEx.WriteDouble(handle, software_latency_current_offset, value); }
}
static readonly int software_latency_current_offset = (int)Marshal.OffsetOf<SoundIoDevice>("software_latency_current");
public double SoftwareLatencyMin
{
get { return MarshalEx.ReadDouble(handle, software_latency_min_offset); }
set { MarshalEx.WriteDouble(handle, software_latency_min_offset, value); }
}
static readonly int software_latency_min_offset = (int)Marshal.OffsetOf<SoundIoDevice>("software_latency_min");
public double SoftwareLatencyMax
{
get { return MarshalEx.ReadDouble(handle, software_latency_max_offset); }
set { MarshalEx.WriteDouble(handle, software_latency_max_offset, value); }
}
static readonly int software_latency_max_offset = (int)Marshal.OffsetOf<SoundIoDevice>("software_latency_max");
public SoundIO SoundIO
{
get { return new SoundIO(Marshal.ReadIntPtr(handle, soundio_offset)); }
}
static readonly int soundio_offset = (int)Marshal.OffsetOf<SoundIoDevice>("soundio");
// functions
public void AddReference()
{
Natives.soundio_device_ref(handle);
}
public void RemoveReference()
{
Natives.soundio_device_unref(handle);
}
public void SortDeviceChannelLayouts()
{
Natives.soundio_device_sort_channel_layouts(handle);
}
public static readonly SoundIOFormat S16NE = BitConverter.IsLittleEndian ? SoundIOFormat.S16LE : SoundIOFormat.S16BE;
public static readonly SoundIOFormat U16NE = BitConverter.IsLittleEndian ? SoundIOFormat.U16LE : SoundIOFormat.U16BE;
public static readonly SoundIOFormat S24NE = BitConverter.IsLittleEndian ? SoundIOFormat.S24LE : SoundIOFormat.S24BE;
public static readonly SoundIOFormat U24NE = BitConverter.IsLittleEndian ? SoundIOFormat.U24LE : SoundIOFormat.U24BE;
public static readonly SoundIOFormat S32NE = BitConverter.IsLittleEndian ? SoundIOFormat.S32LE : SoundIOFormat.S32BE;
public static readonly SoundIOFormat U32NE = BitConverter.IsLittleEndian ? SoundIOFormat.U32LE : SoundIOFormat.U32BE;
public static readonly SoundIOFormat Float32NE = BitConverter.IsLittleEndian ? SoundIOFormat.Float32LE : SoundIOFormat.Float32BE;
public static readonly SoundIOFormat Float64NE = BitConverter.IsLittleEndian ? SoundIOFormat.Float64LE : SoundIOFormat.Float64BE;
public static readonly SoundIOFormat S16FE = !BitConverter.IsLittleEndian ? SoundIOFormat.S16LE : SoundIOFormat.S16BE;
public static readonly SoundIOFormat U16FE = !BitConverter.IsLittleEndian ? SoundIOFormat.U16LE : SoundIOFormat.U16BE;
public static readonly SoundIOFormat S24FE = !BitConverter.IsLittleEndian ? SoundIOFormat.S24LE : SoundIOFormat.S24BE;
public static readonly SoundIOFormat U24FE = !BitConverter.IsLittleEndian ? SoundIOFormat.U24LE : SoundIOFormat.U24BE;
public static readonly SoundIOFormat S32FE = !BitConverter.IsLittleEndian ? SoundIOFormat.S32LE : SoundIOFormat.S32BE;
public static readonly SoundIOFormat U32FE = !BitConverter.IsLittleEndian ? SoundIOFormat.U32LE : SoundIOFormat.U32BE;
public static readonly SoundIOFormat Float32FE = !BitConverter.IsLittleEndian ? SoundIOFormat.Float32LE : SoundIOFormat.Float32BE;
public static readonly SoundIOFormat Float64FE = !BitConverter.IsLittleEndian ? SoundIOFormat.Float64LE : SoundIOFormat.Float64BE;
public bool SupportsFormat(SoundIOFormat format)
{
return Natives.soundio_device_supports_format(handle, (SoundIoFormat)format);
}
public bool SupportsSampleRate(int sampleRate)
{
return Natives.soundio_device_supports_sample_rate(handle, sampleRate);
}
public bool SupportsChannelCount(int channelCount)
{
return Natives.soundio_device_supports_layout(handle, SoundIOChannelLayout.GetDefault(channelCount).Handle);
}
public int GetNearestSampleRate(int sampleRate)
{
return Natives.soundio_device_nearest_sample_rate(handle, sampleRate);
}
public SoundIOInStream CreateInStream()
{
return new SoundIOInStream(Natives.soundio_instream_create(handle));
}
public SoundIOOutStream CreateOutStream()
{
return new SoundIOOutStream(Natives.soundio_outstream_create(handle));
}
}
}

View File

@ -1,8 +0,0 @@
namespace SoundIOSharp
{
public enum SoundIODeviceAim // soundio.h (228, 6)
{
Input,
Output
}
}

View File

@ -1,10 +0,0 @@
using System;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public class SoundIOException : Exception
{
internal SoundIOException(SoundIoError errorCode) : base (Marshal.PtrToStringAnsi(Natives.soundio_strerror((int) errorCode))) { }
}
}

View File

@ -1,25 +0,0 @@
namespace SoundIOSharp
{
public enum SoundIOFormat
{
Invalid,
S8,
U8,
S16LE,
S16BE,
U16LE,
U16BE,
S24LE,
S24BE,
U24LE,
U24BE,
S32LE,
S32BE,
U32LE,
U32BE,
Float32LE,
Float32BE,
Float64LE,
Float64BE
}
}

View File

@ -1,293 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public class SoundIOInStream : IDisposable
{
internal SoundIOInStream(Pointer<SoundIoInStream> handle)
{
this.handle = handle;
}
Pointer<SoundIoInStream> handle;
public void Dispose()
{
Natives.soundio_instream_destroy(handle);
}
// Equality (based on handle)
public override bool Equals(object other)
{
var d = other as SoundIOInStream;
return d != null && (this.handle == d.handle);
}
public override int GetHashCode()
{
return (int)(IntPtr)handle;
}
public static bool operator == (SoundIOInStream obj1, SoundIOInStream obj2)
{
return obj1 is null ? obj2 is null : obj1.Equals(obj2);
}
public static bool operator != (SoundIOInStream obj1, SoundIOInStream obj2)
{
return obj1 is null ? obj2 is object : !obj1.Equals(obj2);
}
// fields
public SoundIODevice Device
{
get { return new SoundIODevice(Marshal.ReadIntPtr(handle, device_offset)); }
}
static readonly int device_offset = (int)Marshal.OffsetOf<SoundIoInStream>("device");
public SoundIOFormat Format
{
get { return (SoundIOFormat)Marshal.ReadInt32(handle, format_offset); }
set { Marshal.WriteInt32(handle, format_offset, (int) value); }
}
static readonly int format_offset = (int)Marshal.OffsetOf<SoundIoInStream>("format");
public int SampleRate
{
get { return Marshal.ReadInt32(handle, sample_rate_offset); }
set { Marshal.WriteInt32(handle, sample_rate_offset, value); }
}
static readonly int sample_rate_offset = (int)Marshal.OffsetOf<SoundIoInStream>("sample_rate");
public SoundIOChannelLayout Layout
{
get { return new SoundIOChannelLayout ((IntPtr) handle + layout_offset); }
set
{
unsafe
{
Buffer.MemoryCopy((void*)((IntPtr)handle + layout_offset), (void*)value.Handle, Marshal.SizeOf<SoundIoChannelLayout>(), Marshal.SizeOf<SoundIoChannelLayout>());
}
}
}
static readonly int layout_offset = (int)Marshal.OffsetOf<SoundIoInStream>("layout");
public double SoftwareLatency
{
get { return MarshalEx.ReadDouble(handle, software_latency_offset); }
set { MarshalEx.WriteDouble(handle, software_latency_offset, value); }
}
static readonly int software_latency_offset = (int)Marshal.OffsetOf<SoundIoInStream>("software_latency");
// error_callback
public Action ErrorCallback
{
get { return error_callback; }
set
{
error_callback = value;
error_callback_native = _ => error_callback();
var ptr = Marshal.GetFunctionPointerForDelegate(error_callback_native);
Marshal.WriteIntPtr(handle, error_callback_offset, ptr);
}
}
static readonly int error_callback_offset = (int)Marshal.OffsetOf<SoundIoInStream>("error_callback");
Action error_callback;
delegate void error_callback_delegate(IntPtr handle);
error_callback_delegate error_callback_native;
// read_callback
public Action<int,int> ReadCallback
{
get { return read_callback; }
set
{
read_callback = value;
read_callback_native = (_, minFrameCount, maxFrameCount) => read_callback(minFrameCount, maxFrameCount);
var ptr = Marshal.GetFunctionPointerForDelegate(read_callback_native);
Marshal.WriteIntPtr(handle, read_callback_offset, ptr);
}
}
static readonly int read_callback_offset = (int)Marshal.OffsetOf<SoundIoInStream>("read_callback");
Action<int, int> read_callback;
delegate void read_callback_delegate(IntPtr handle, int min, int max);
read_callback_delegate read_callback_native;
// overflow_callback
public Action OverflowCallback
{
get { return overflow_callback; }
set
{
overflow_callback = value;
overflow_callback_native = _ => overflow_callback();
var ptr = Marshal.GetFunctionPointerForDelegate(overflow_callback_native);
Marshal.WriteIntPtr(handle, overflow_callback_offset, ptr);
}
}
static readonly int overflow_callback_offset = (int)Marshal.OffsetOf<SoundIoInStream>("overflow_callback");
Action overflow_callback;
delegate void overflow_callback_delegate(IntPtr handle);
overflow_callback_delegate overflow_callback_native;
// FIXME: this should be taken care in more centralized/decent manner... we don't want to write
// this kind of code anywhere we need string marshaling.
List<IntPtr> allocated_hglobals = new List<IntPtr>();
public string Name
{
get { return Marshal.PtrToStringAnsi(Marshal.ReadIntPtr(handle, name_offset)); }
set
{
unsafe
{
var existing = Marshal.ReadIntPtr(handle, name_offset);
if (allocated_hglobals.Contains(existing))
{
allocated_hglobals.Remove(existing);
Marshal.FreeHGlobal(existing);
}
var ptr = Marshal.StringToHGlobalAnsi(value);
Marshal.WriteIntPtr(handle, name_offset, ptr);
allocated_hglobals.Add(ptr);
}
}
}
static readonly int name_offset = (int)Marshal.OffsetOf<SoundIoInStream>("name");
public bool NonTerminalHint
{
get { return Marshal.ReadInt32(handle, non_terminal_hint_offset) != 0; }
}
static readonly int non_terminal_hint_offset = (int)Marshal.OffsetOf<SoundIoInStream>("non_terminal_hint");
public int BytesPerFrame
{
get { return Marshal.ReadInt32(handle, bytes_per_frame_offset); }
}
static readonly int bytes_per_frame_offset = (int)Marshal.OffsetOf<SoundIoInStream>("bytes_per_frame");
public int BytesPerSample
{
get { return Marshal.ReadInt32(handle, bytes_per_sample_offset); }
}
static readonly int bytes_per_sample_offset = (int)Marshal.OffsetOf<SoundIoInStream>("bytes_per_sample");
public string LayoutErrorMessage
{
get
{
var code = (SoundIoError)Marshal.ReadInt32(handle, layout_error_offset);
return code == SoundIoError.SoundIoErrorNone ? null : Marshal.PtrToStringAnsi(Natives.soundio_strerror((int)code));
}
}
static readonly int layout_error_offset = (int)Marshal.OffsetOf<SoundIoInStream>("layout_error");
// functions
public void Open()
{
var ret = (SoundIoError)Natives.soundio_instream_open(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public void Start()
{
var ret = (SoundIoError)Natives.soundio_instream_start(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public SoundIOChannelAreas BeginRead(ref int frameCount)
{
IntPtr ptrs = default;
int nativeFrameCount = frameCount;
unsafe
{
var frameCountPtr = &nativeFrameCount;
var ptrptr = &ptrs;
var ret = (SoundIoError)Natives.soundio_instream_begin_read(handle, (IntPtr)ptrptr, (IntPtr)frameCountPtr);
frameCount = *frameCountPtr;
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
return new SoundIOChannelAreas(ptrs, Layout.ChannelCount, frameCount);
}
}
public void EndRead()
{
var ret = (SoundIoError)Natives.soundio_instream_end_read(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public void Pause(bool pause)
{
var ret = (SoundIoError)Natives.soundio_instream_pause(handle, pause);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public double GetLatency()
{
unsafe
{
double* dptr = null;
IntPtr p = new IntPtr(dptr);
var ret = (SoundIoError)Natives.soundio_instream_get_latency(handle, p);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
dptr = (double*)p;
return *dptr;
}
}
}
}

View File

@ -1,331 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace SoundIOSharp
{
public class SoundIOOutStream : IDisposable
{
internal SoundIOOutStream (Pointer<SoundIoOutStream> handle)
{
this.handle = handle;
}
Pointer<SoundIoOutStream> handle;
public void Dispose ()
{
Natives.soundio_outstream_destroy (handle);
}
// Equality (based on handle)
public override bool Equals (object other)
{
var d = other as SoundIOOutStream;
return d != null && (this.handle == d.handle);
}
public override int GetHashCode ()
{
return (int)(IntPtr)handle;
}
public static bool operator == (SoundIOOutStream obj1, SoundIOOutStream obj2)
{
return obj1 is null ? obj2 is null : obj1.Equals(obj2);
}
public static bool operator != (SoundIOOutStream obj1, SoundIOOutStream obj2)
{
return obj1 is null ? obj2 is object : !obj1.Equals(obj2);
}
// fields
public SoundIODevice Device
{
get { return new SoundIODevice(Marshal.ReadIntPtr(handle, device_offset)); }
}
static readonly int device_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("device");
public SoundIOFormat Format
{
get { return (SoundIOFormat) Marshal.ReadInt32(handle, format_offset); }
set { Marshal.WriteInt32(handle, format_offset, (int) value); }
}
static readonly int format_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("format");
public int SampleRate
{
get { return Marshal.ReadInt32(handle, sample_rate_offset); }
set { Marshal.WriteInt32(handle, sample_rate_offset, value); }
}
static readonly int sample_rate_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("sample_rate");
public SoundIOChannelLayout Layout
{
get { unsafe { return new SoundIOChannelLayout((IntPtr) (void*)((IntPtr)handle + layout_offset)); } }
set
{
unsafe
{
Buffer.MemoryCopy((void*)value.Handle, (void*)((IntPtr)handle + layout_offset), Marshal.SizeOf<SoundIoChannelLayout>(), Marshal.SizeOf<SoundIoChannelLayout>());
}
}
}
static readonly int layout_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("layout");
public double SoftwareLatency
{
get { return MarshalEx.ReadDouble (handle, software_latency_offset); }
set { MarshalEx.WriteDouble (handle, software_latency_offset, value); }
}
static readonly int software_latency_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("software_latency");
public float Volume
{
get { return MarshalEx.ReadFloat(handle, volume_offset); }
set { MarshalEx.WriteFloat(handle, volume_offset, value); }
}
static readonly int volume_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("volume");
// error_callback
public Action ErrorCallback
{
get { return error_callback; }
set
{
error_callback = value;
if (value == null)
{
error_callback_native = null;
}
else
{
error_callback_native = stream => error_callback();
}
var ptr = Marshal.GetFunctionPointerForDelegate(error_callback_native);
Marshal.WriteIntPtr(handle, error_callback_offset, ptr);
}
}
static readonly int error_callback_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("error_callback");
Action error_callback;
delegate void error_callback_delegate (IntPtr handle);
error_callback_delegate error_callback_native;
// write_callback
public Action<int, int> WriteCallback
{
get { return write_callback; }
set
{
write_callback = value;
if (value == null)
{
write_callback_native = null;
}
else
{
write_callback_native = (h, frame_count_min, frame_count_max) => write_callback(frame_count_min, frame_count_max);
}
var ptr = Marshal.GetFunctionPointerForDelegate (write_callback_native);
Marshal.WriteIntPtr (handle, write_callback_offset, ptr);
}
}
static readonly int write_callback_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("write_callback");
Action<int, int> write_callback;
delegate void write_callback_delegate(IntPtr handle, int min, int max);
write_callback_delegate write_callback_native;
// underflow_callback
public Action UnderflowCallback
{
get { return underflow_callback; }
set
{
underflow_callback = value;
if (value == null)
{
underflow_callback_native = null;
}
else
{
underflow_callback_native = h => underflow_callback();
}
var ptr = Marshal.GetFunctionPointerForDelegate (underflow_callback_native);
Marshal.WriteIntPtr (handle, underflow_callback_offset, ptr);
}
}
static readonly int underflow_callback_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("underflow_callback");
Action underflow_callback;
delegate void underflow_callback_delegate(IntPtr handle);
underflow_callback_delegate underflow_callback_native;
// FIXME: this should be taken care in more centralized/decent manner... we don't want to write
// this kind of code anywhere we need string marshaling.
List<IntPtr> allocated_hglobals = new List<IntPtr>();
public string Name {
get { return Marshal.PtrToStringAnsi(Marshal.ReadIntPtr(handle, name_offset)); }
set
{
unsafe
{
var existing = Marshal.ReadIntPtr(handle, name_offset);
if (allocated_hglobals.Contains(existing))
{
allocated_hglobals.Remove(existing);
Marshal.FreeHGlobal(existing);
}
var ptr = Marshal.StringToHGlobalAnsi(value);
Marshal.WriteIntPtr(handle, name_offset, ptr);
allocated_hglobals.Add(ptr);
}
}
}
static readonly int name_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("name");
public bool NonTerminalHint
{
get { return Marshal.ReadInt32(handle, non_terminal_hint_offset) != 0; }
}
static readonly int non_terminal_hint_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("non_terminal_hint");
public int BytesPerFrame
{
get { return Marshal.ReadInt32(handle, bytes_per_frame_offset); }
}
static readonly int bytes_per_frame_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("bytes_per_frame");
public int BytesPerSample
{
get { return Marshal.ReadInt32(handle, bytes_per_sample_offset); }
}
static readonly int bytes_per_sample_offset = (int)Marshal.OffsetOf<SoundIoOutStream>("bytes_per_sample");
public string LayoutErrorMessage
{
get
{
var code = (SoundIoError)Marshal.ReadInt32(handle, layout_error_offset);
return code == SoundIoError.SoundIoErrorNone ? null : Marshal.PtrToStringAnsi(Natives.soundio_strerror((int)code));
}
}
static readonly int layout_error_offset = (int)Marshal.OffsetOf<SoundIoOutStream> ("layout_error");
// functions
public void Open ()
{
var ret = (SoundIoError)Natives.soundio_outstream_open(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public void Start ()
{
var ret = (SoundIoError)Natives.soundio_outstream_start(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public SoundIOChannelAreas BeginWrite(ref int frameCount)
{
IntPtr ptrs = default;
int nativeFrameCount = frameCount;
unsafe
{
var frameCountPtr = &nativeFrameCount;
var ptrptr = &ptrs;
var ret = (SoundIoError)Natives.soundio_outstream_begin_write(handle, (IntPtr)ptrptr, (IntPtr)frameCountPtr);
frameCount = *frameCountPtr;
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
return new SoundIOChannelAreas(ptrs, Layout.ChannelCount, frameCount);
}
}
public void EndWrite ()
{
var ret = (SoundIoError)Natives.soundio_outstream_end_write(handle);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public void ClearBuffer ()
{
_ = Natives.soundio_outstream_clear_buffer(handle);
}
public void Pause (bool pause)
{
var ret = (SoundIoError)Natives.soundio_outstream_pause(handle, pause);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
public double GetLatency ()
{
unsafe
{
double* dptr = null;
IntPtr p = new IntPtr(dptr);
var ret = (SoundIoError)Natives.soundio_outstream_get_latency(handle, p);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
dptr = (double*)p;
return *dptr;
}
}
public void SetVolume (double volume)
{
var ret = (SoundIoError)Natives.soundio_outstream_set_volume(handle, volume);
if (ret != SoundIoError.SoundIoErrorNone)
{
throw new SoundIOException(ret);
}
}
}
}

View File

@ -1,58 +0,0 @@
using System;
namespace SoundIOSharp
{
public class SoundIORingBuffer : IDisposable
{
internal SoundIORingBuffer(IntPtr handle)
{
this.handle = handle;
}
IntPtr handle;
public int Capacity
{
get { return Natives.soundio_ring_buffer_capacity(handle); }
}
public void Clear()
{
Natives.soundio_ring_buffer_clear(handle);
}
public void Dispose()
{
Natives.soundio_ring_buffer_destroy(handle);
}
public int FillCount
{
get { return Natives.soundio_ring_buffer_fill_count(handle); }
}
public int FreeCount
{
get { return Natives.soundio_ring_buffer_free_count(handle); }
}
public IntPtr ReadPointer
{
get { return Natives.soundio_ring_buffer_read_ptr(handle); }
}
public IntPtr WritePointer
{
get { return Natives.soundio_ring_buffer_write_ptr(handle); }
}
public void AdvanceReadPointer(int count)
{
Natives.soundio_ring_buffer_advance_read_ptr(handle, count);
}
public void AdvanceWritePointer(int count)
{
Natives.soundio_ring_buffer_advance_write_ptr(handle, count);
}
}
}

View File

@ -1,15 +0,0 @@
using System;
namespace SoundIOSharp
{
public struct SoundIOSampleRateRange
{
internal SoundIOSampleRateRange(int min, int max)
{
Min = min;
Max = max;
}
public readonly int Min;
public readonly int Max;
}
}

View File

@ -1,643 +0,0 @@
// This source file is generated by nclang PInvokeGenerator.
using System;
using System.Runtime.InteropServices;
using delegate0 = SoundIOSharp.Delegates.delegate0;
using delegate1 = SoundIOSharp.Delegates.delegate1;
using delegate2 = SoundIOSharp.Delegates.delegate2;
using delegate3 = SoundIOSharp.Delegates.delegate3;
using delegate4 = SoundIOSharp.Delegates.delegate4;
using delegate5 = SoundIOSharp.Delegates.delegate5;
using delegate6 = SoundIOSharp.Delegates.delegate6;
using delegate7 = SoundIOSharp.Delegates.delegate7;
using delegate8 = SoundIOSharp.Delegates.delegate8;
using delegate9 = SoundIOSharp.Delegates.delegate9;
namespace SoundIOSharp
{
enum SoundIoError // soundio.h (72, 6)
{
SoundIoErrorNone = 0,
SoundIoErrorNoMem = 1,
SoundIoErrorInitAudioBackend = 2,
SoundIoErrorSystemResources = 3,
SoundIoErrorOpeningDevice = 4,
SoundIoErrorNoSuchDevice = 5,
SoundIoErrorInvalid = 6,
SoundIoErrorBackendUnavailable = 7,
SoundIoErrorStreaming = 8,
SoundIoErrorIncompatibleDevice = 9,
SoundIoErrorNoSuchClient = 10,
SoundIoErrorIncompatibleBackend = 11,
SoundIoErrorBackendDisconnected = 12,
SoundIoErrorInterrupted = 13,
SoundIoErrorUnderflow = 14,
SoundIoErrorEncodingString = 15,
}
enum SoundIoChannelId // soundio.h (106, 6)
{
SoundIoChannelIdInvalid = 0,
SoundIoChannelIdFrontLeft = 1,
SoundIoChannelIdFrontRight = 2,
SoundIoChannelIdFrontCenter = 3,
SoundIoChannelIdLfe = 4,
SoundIoChannelIdBackLeft = 5,
SoundIoChannelIdBackRight = 6,
SoundIoChannelIdFrontLeftCenter = 7,
SoundIoChannelIdFrontRightCenter = 8,
SoundIoChannelIdBackCenter = 9,
SoundIoChannelIdSideLeft = 10,
SoundIoChannelIdSideRight = 11,
SoundIoChannelIdTopCenter = 12,
SoundIoChannelIdTopFrontLeft = 13,
SoundIoChannelIdTopFrontCenter = 14,
SoundIoChannelIdTopFrontRight = 15,
SoundIoChannelIdTopBackLeft = 16,
SoundIoChannelIdTopBackCenter = 17,
SoundIoChannelIdTopBackRight = 18,
SoundIoChannelIdBackLeftCenter = 19,
SoundIoChannelIdBackRightCenter = 20,
SoundIoChannelIdFrontLeftWide = 21,
SoundIoChannelIdFrontRightWide = 22,
SoundIoChannelIdFrontLeftHigh = 23,
SoundIoChannelIdFrontCenterHigh = 24,
SoundIoChannelIdFrontRightHigh = 25,
SoundIoChannelIdTopFrontLeftCenter = 26,
SoundIoChannelIdTopFrontRightCenter = 27,
SoundIoChannelIdTopSideLeft = 28,
SoundIoChannelIdTopSideRight = 29,
SoundIoChannelIdLeftLfe = 30,
SoundIoChannelIdRightLfe = 31,
SoundIoChannelIdLfe2 = 32,
SoundIoChannelIdBottomCenter = 33,
SoundIoChannelIdBottomLeftCenter = 34,
SoundIoChannelIdBottomRightCenter = 35,
SoundIoChannelIdMsMid = 36,
SoundIoChannelIdMsSide = 37,
SoundIoChannelIdAmbisonicW = 38,
SoundIoChannelIdAmbisonicX = 39,
SoundIoChannelIdAmbisonicY = 40,
SoundIoChannelIdAmbisonicZ = 41,
SoundIoChannelIdXyX = 42,
SoundIoChannelIdXyY = 43,
SoundIoChannelIdHeadphonesLeft = 44,
SoundIoChannelIdHeadphonesRight = 45,
SoundIoChannelIdClickTrack = 46,
SoundIoChannelIdForeignLanguage = 47,
SoundIoChannelIdHearingImpaired = 48,
SoundIoChannelIdNarration = 49,
SoundIoChannelIdHaptic = 50,
SoundIoChannelIdDialogCentricMix = 51,
SoundIoChannelIdAux = 52,
SoundIoChannelIdAux0 = 53,
SoundIoChannelIdAux1 = 54,
SoundIoChannelIdAux2 = 55,
SoundIoChannelIdAux3 = 56,
SoundIoChannelIdAux4 = 57,
SoundIoChannelIdAux5 = 58,
SoundIoChannelIdAux6 = 59,
SoundIoChannelIdAux7 = 60,
SoundIoChannelIdAux8 = 61,
SoundIoChannelIdAux9 = 62,
SoundIoChannelIdAux10 = 63,
SoundIoChannelIdAux11 = 64,
SoundIoChannelIdAux12 = 65,
SoundIoChannelIdAux13 = 66,
SoundIoChannelIdAux14 = 67,
SoundIoChannelIdAux15 = 68,
}
enum SoundIoChannelLayoutId // soundio.h (189, 6)
{
SoundIoChannelLayoutIdMono = 0,
SoundIoChannelLayoutIdStereo = 1,
SoundIoChannelLayoutId2Point1 = 2,
SoundIoChannelLayoutId3Point0 = 3,
SoundIoChannelLayoutId3Point0Back = 4,
SoundIoChannelLayoutId3Point1 = 5,
SoundIoChannelLayoutId4Point0 = 6,
SoundIoChannelLayoutIdQuad = 7,
SoundIoChannelLayoutIdQuadSide = 8,
SoundIoChannelLayoutId4Point1 = 9,
SoundIoChannelLayoutId5Point0Back = 10,
SoundIoChannelLayoutId5Point0Side = 11,
SoundIoChannelLayoutId5Point1 = 12,
SoundIoChannelLayoutId5Point1Back = 13,
SoundIoChannelLayoutId6Point0Side = 14,
SoundIoChannelLayoutId6Point0Front = 15,
SoundIoChannelLayoutIdHexagonal = 16,
SoundIoChannelLayoutId6Point1 = 17,
SoundIoChannelLayoutId6Point1Back = 18,
SoundIoChannelLayoutId6Point1Front = 19,
SoundIoChannelLayoutId7Point0 = 20,
SoundIoChannelLayoutId7Point0Front = 21,
SoundIoChannelLayoutId7Point1 = 22,
SoundIoChannelLayoutId7Point1Wide = 23,
SoundIoChannelLayoutId7Point1WideBack = 24,
SoundIoChannelLayoutIdOctagonal = 25,
}
enum SoundIoBackend // soundio.h (218, 6)
{
SoundIoBackendNone = 0,
SoundIoBackendJack = 1,
SoundIoBackendPulseAudio = 2,
SoundIoBackendAlsa = 3,
SoundIoBackendCoreAudio = 4,
SoundIoBackendWasapi = 5,
SoundIoBackendDummy = 6,
}
enum SoundIoDeviceAim // soundio.h (228, 6)
{
SoundIoDeviceAimInput = 0,
SoundIoDeviceAimOutput = 1,
}
enum SoundIoFormat // soundio.h (235, 6)
{
SoundIoFormatInvalid = 0,
SoundIoFormatS8 = 1,
SoundIoFormatU8 = 2,
SoundIoFormatS16LE = 3,
SoundIoFormatS16BE = 4,
SoundIoFormatU16LE = 5,
SoundIoFormatU16BE = 6,
SoundIoFormatS24LE = 7,
SoundIoFormatS24BE = 8,
SoundIoFormatU24LE = 9,
SoundIoFormatU24BE = 10,
SoundIoFormatS32LE = 11,
SoundIoFormatS32BE = 12,
SoundIoFormatU32LE = 13,
SoundIoFormatU32BE = 14,
SoundIoFormatFloat32LE = 15,
SoundIoFormatFloat32BE = 16,
SoundIoFormatFloat64LE = 17,
SoundIoFormatFloat64BE = 18,
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoChannelLayout // soundio.h (306, 8)
{
[CTypeDetails("Pointer<byte>")] public System.IntPtr @name;
public int @channel_count;
[MarshalAs(UnmanagedType.ByValArray, SizeConst = 24)]
[CTypeDetails("ConstArrayOf<SoundIoChannelId>")] public SoundIoChannelId[] @channels;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoSampleRateRange // soundio.h (313, 8)
{
public int @min;
public int @max;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoChannelArea // soundio.h (319, 8)
{
[CTypeDetails("Pointer<byte>")] public System.IntPtr @ptr;
public int @step;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIo // soundio.h (328, 8)
{
[CTypeDetails("Pointer<void>")] public System.IntPtr @userdata;
[CTypeDetails("Pointer<void (SoundIo *)>")] public delegate0 @on_devices_change;
[CTypeDetails("Pointer<void (SoundIo *, int)>")] public delegate1 @on_backend_disconnect;
[CTypeDetails("Pointer<void (SoundIo *)>")] public Delegates.delegate0 @on_events_signal;
public SoundIoBackend @current_backend;
[CTypeDetails("Pointer<byte>")] public System.IntPtr @app_name;
[CTypeDetails("Pointer<void ()>")] public delegate2 @emit_rtprio_warning;
[CTypeDetails("Pointer<void (const char *)>")] public delegate3 @jack_info_callback;
[CTypeDetails("Pointer<void (const char *)>")] public Delegates.delegate3 @jack_error_callback;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoDevice // soundio.h (387, 8)
{
[CTypeDetails("Pointer<SoundIo>")] public System.IntPtr @soundio;
[CTypeDetails("Pointer<byte>")] public System.IntPtr @id;
[CTypeDetails("Pointer<byte>")] public System.IntPtr @name;
public SoundIoDeviceAim @aim;
[CTypeDetails("Pointer<SoundIoChannelLayout>")] public System.IntPtr @layouts;
public int @layout_count;
public SoundIoChannelLayout @current_layout;
[CTypeDetails("Pointer<SoundIoFormat>")] public System.IntPtr @formats;
public int @format_count;
public SoundIoFormat @current_format;
[CTypeDetails("Pointer<SoundIoSampleRateRange>")] public System.IntPtr @sample_rates;
public int @sample_rate_count;
public int @sample_rate_current;
public double @software_latency_min;
public double @software_latency_max;
public double @software_latency_current;
public bool @is_raw;
public int @ref_count;
public int @probe_error;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoOutStream // soundio.h (497, 8)
{
[CTypeDetails("Pointer<SoundIoDevice>")] public System.IntPtr @device;
public SoundIoFormat @format;
public int @sample_rate;
public SoundIoChannelLayout @layout;
public double @software_latency;
public float @volume;
[CTypeDetails("Pointer<void>")] public System.IntPtr @userdata;
[CTypeDetails("Pointer<void (SoundIoOutStream *, int, int)>")] public delegate4 @write_callback;
[CTypeDetails("Pointer<void (SoundIoOutStream *)>")] public delegate5 @underflow_callback;
[CTypeDetails("Pointer<void (SoundIoOutStream *, int)>")] public delegate6 @error_callback;
[CTypeDetails("Pointer<byte>")] public System.IntPtr @name;
public bool @non_terminal_hint;
public int @bytes_per_frame;
public int @bytes_per_sample;
public int @layout_error;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoInStream // soundio.h (600, 8)
{
[CTypeDetails("Pointer<SoundIoDevice>")] public System.IntPtr @device;
public SoundIoFormat @format;
public int @sample_rate;
public SoundIoChannelLayout @layout;
public double @software_latency;
[CTypeDetails("Pointer<void>")] public System.IntPtr @userdata;
[CTypeDetails("Pointer<void (SoundIoInStream *, int, int)>")] public delegate7 @read_callback;
[CTypeDetails("Pointer<void (SoundIoInStream *)>")] public delegate8 @overflow_callback;
[CTypeDetails("Pointer<void (SoundIoInStream *, int)>")] public delegate9 @error_callback;
[CTypeDetails("Pointer<byte>")] public System.IntPtr @name;
public bool @non_terminal_hint;
public int @bytes_per_frame;
public int @bytes_per_sample;
public int @layout_error;
}
[StructLayout(LayoutKind.Sequential)]
struct SoundIoRingBuffer // soundio.h (1170, 8)
{
}
partial class Natives
{
const string LibraryName = "libsoundio";
// function soundio_version_string - soundio.h (682, 28)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_version_string();
// function soundio_version_major - soundio.h (684, 20)
[DllImport(LibraryName)]
internal static extern int soundio_version_major();
// function soundio_version_minor - soundio.h (686, 20)
[DllImport(LibraryName)]
internal static extern int soundio_version_minor();
// function soundio_version_patch - soundio.h (688, 20)
[DllImport(LibraryName)]
internal static extern int soundio_version_patch();
// function soundio_create - soundio.h (694, 32)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_create();
// function soundio_destroy - soundio.h (695, 21)
[DllImport(LibraryName)]
internal static extern void soundio_destroy([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_connect - soundio.h (705, 20)
[DllImport(LibraryName)]
internal static extern int soundio_connect([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_connect_backend - soundio.h (717, 20)
[DllImport(LibraryName)]
internal static extern int soundio_connect_backend([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio, SoundIoBackend @backend);
// function soundio_disconnect - soundio.h (718, 21)
[DllImport(LibraryName)]
internal static extern void soundio_disconnect([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_strerror - soundio.h (721, 28)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_strerror(int @error);
// function soundio_backend_name - soundio.h (723, 28)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_backend_name(SoundIoBackend @backend);
// function soundio_backend_count - soundio.h (726, 20)
[DllImport(LibraryName)]
internal static extern int soundio_backend_count([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_get_backend - soundio.h (729, 36)
[DllImport(LibraryName)]
internal static extern SoundIoBackend soundio_get_backend([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio, int @index);
// function soundio_have_backend - soundio.h (732, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_have_backend(SoundIoBackend @backend);
// function soundio_flush_events - soundio.h (756, 21)
[DllImport(LibraryName)]
internal static extern void soundio_flush_events([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_wait_events - soundio.h (760, 21)
[DllImport(LibraryName)]
internal static extern void soundio_wait_events([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_wakeup - soundio.h (763, 21)
[DllImport(LibraryName)]
internal static extern void soundio_wakeup([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_force_device_scan - soundio.h (780, 21)
[DllImport(LibraryName)]
internal static extern void soundio_force_device_scan([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_channel_layout_equal - soundio.h (787, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_channel_layout_equal([CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @a, [CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @b);
// function soundio_get_channel_name - soundio.h (791, 28)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_get_channel_name(SoundIoChannelId @id);
// function soundio_parse_channel_id - soundio.h (795, 38)
[DllImport(LibraryName)]
internal static extern SoundIoChannelId soundio_parse_channel_id([CTypeDetails("Pointer<byte>")]System.IntPtr @str, int @str_len);
// function soundio_channel_layout_builtin_count - soundio.h (798, 20)
[DllImport(LibraryName)]
internal static extern int soundio_channel_layout_builtin_count();
// function soundio_channel_layout_get_builtin - soundio.h (803, 51)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_channel_layout_get_builtin(int @index);
// function soundio_channel_layout_get_default - soundio.h (806, 51)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_channel_layout_get_default(int @channel_count);
// function soundio_channel_layout_find_channel - soundio.h (809, 20)
[DllImport(LibraryName)]
internal static extern int soundio_channel_layout_find_channel([CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @layout, SoundIoChannelId @channel);
// function soundio_channel_layout_detect_builtin - soundio.h (814, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_channel_layout_detect_builtin([CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @layout);
// function soundio_best_matching_channel_layout - soundio.h (819, 51)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_best_matching_channel_layout([CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @preferred_layouts, int @preferred_layout_count, [CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @available_layouts, int @available_layout_count);
// function soundio_sort_channel_layouts - soundio.h (824, 21)
[DllImport(LibraryName)]
internal static extern void soundio_sort_channel_layouts([CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @layouts, int @layout_count);
// function soundio_get_bytes_per_sample - soundio.h (830, 20)
[DllImport(LibraryName)]
internal static extern int soundio_get_bytes_per_sample(SoundIoFormat @format);
// function soundio_get_bytes_per_frame - soundio.h (833, 19)
[DllImport(LibraryName)]
internal static extern int soundio_get_bytes_per_frame(SoundIoFormat @format, int @channel_count);
// function soundio_get_bytes_per_second - soundio.h (838, 19)
[DllImport(LibraryName)]
internal static extern int soundio_get_bytes_per_second(SoundIoFormat @format, int @channel_count, int @sample_rate);
// function soundio_format_string - soundio.h (845, 29)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_format_string(SoundIoFormat @format);
// function soundio_input_device_count - soundio.h (861, 20)
[DllImport(LibraryName)]
internal static extern int soundio_input_device_count([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_output_device_count - soundio.h (864, 20)
[DllImport(LibraryName)]
internal static extern int soundio_output_device_count([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_get_input_device - soundio.h (870, 38)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_get_input_device([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio, int @index);
// function soundio_get_output_device - soundio.h (875, 38)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_get_output_device([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio, int @index);
// function soundio_default_input_device_index - soundio.h (880, 20)
[DllImport(LibraryName)]
internal static extern int soundio_default_input_device_index([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_default_output_device_index - soundio.h (885, 20)
[DllImport(LibraryName)]
internal static extern int soundio_default_output_device_index([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio);
// function soundio_device_ref - soundio.h (888, 21)
[DllImport(LibraryName)]
internal static extern void soundio_device_ref([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device);
// function soundio_device_unref - soundio.h (891, 21)
[DllImport(LibraryName)]
internal static extern void soundio_device_unref([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device);
// function soundio_device_equal - soundio.h (895, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_device_equal([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @a, [CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @b);
// function soundio_device_sort_channel_layouts - soundio.h (900, 21)
[DllImport(LibraryName)]
internal static extern void soundio_device_sort_channel_layouts([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device);
// function soundio_device_supports_format - soundio.h (904, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_device_supports_format([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device, SoundIoFormat @format);
// function soundio_device_supports_layout - soundio.h (909, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_device_supports_layout([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device, [CTypeDetails("Pointer<SoundIoChannelLayout>")]System.IntPtr @layout);
// function soundio_device_supports_sample_rate - soundio.h (914, 21)
[DllImport(LibraryName)]
internal static extern bool soundio_device_supports_sample_rate([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device, int @sample_rate);
// function soundio_device_nearest_sample_rate - soundio.h (919, 20)
[DllImport(LibraryName)]
internal static extern int soundio_device_nearest_sample_rate([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device, int @sample_rate);
// function soundio_outstream_create - soundio.h (929, 41)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_outstream_create([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device);
// function soundio_outstream_destroy - soundio.h (931, 21)
[DllImport(LibraryName)]
internal static extern void soundio_outstream_destroy([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream);
// function soundio_outstream_open - soundio.h (954, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_open([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream);
// function soundio_outstream_start - soundio.h (965, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_start([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream);
// function soundio_outstream_begin_write - soundio.h (997, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_begin_write([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream, [CTypeDetails("Pointer<System.IntPtr>")]System.IntPtr @areas, [CTypeDetails("Pointer<int>")]System.IntPtr @frame_count);
// function soundio_outstream_end_write - soundio.h (1009, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_end_write([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream);
// function soundio_outstream_clear_buffer - soundio.h (1024, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_clear_buffer([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream);
// function soundio_outstream_pause - soundio.h (1045, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_pause([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream, bool @pause);
// function soundio_outstream_get_latency - soundio.h (1058, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_get_latency([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream, [CTypeDetails("Pointer<double>")]System.IntPtr @out_latency);
// function soundio_outstream_set_volume - soundio.h (1061, 20)
[DllImport(LibraryName)]
internal static extern int soundio_outstream_set_volume([CTypeDetails("Pointer<SoundIoOutStream>")]System.IntPtr @outstream, double @volume);
// function soundio_instream_create - soundio.h (1071, 40)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_instream_create([CTypeDetails("Pointer<SoundIoDevice>")]System.IntPtr @device);
// function soundio_instream_destroy - soundio.h (1073, 21)
[DllImport(LibraryName)]
internal static extern void soundio_instream_destroy([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream);
// function soundio_instream_open - soundio.h (1093, 20)
[DllImport(LibraryName)]
internal static extern int soundio_instream_open([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream);
// function soundio_instream_start - soundio.h (1102, 20)
[DllImport(LibraryName)]
internal static extern int soundio_instream_start([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream);
// function soundio_instream_begin_read - soundio.h (1133, 20)
[DllImport(LibraryName)]
internal static extern int soundio_instream_begin_read([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream, [CTypeDetails("Pointer<System.IntPtr>")]System.IntPtr @areas, [CTypeDetails("Pointer<int>")]System.IntPtr @frame_count);
// function soundio_instream_end_read - soundio.h (1143, 20)
[DllImport(LibraryName)]
internal static extern int soundio_instream_end_read([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream);
// function soundio_instream_pause - soundio.h (1156, 20)
[DllImport(LibraryName)]
internal static extern int soundio_instream_pause([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream, bool @pause);
// function soundio_instream_get_latency - soundio.h (1166, 20)
[DllImport(LibraryName)]
internal static extern int soundio_instream_get_latency([CTypeDetails("Pointer<SoundIoInStream>")]System.IntPtr @instream, [CTypeDetails("Pointer<double>")]System.IntPtr @out_latency);
// function soundio_ring_buffer_create - soundio.h (1181, 42)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_ring_buffer_create([CTypeDetails("Pointer<SoundIo>")]System.IntPtr @soundio, int @requested_capacity);
// function soundio_ring_buffer_destroy - soundio.h (1182, 21)
[DllImport(LibraryName)]
internal static extern void soundio_ring_buffer_destroy([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
// function soundio_ring_buffer_capacity - soundio.h (1186, 20)
[DllImport(LibraryName)]
internal static extern int soundio_ring_buffer_capacity([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
// function soundio_ring_buffer_write_ptr - soundio.h (1189, 22)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_ring_buffer_write_ptr([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
// function soundio_ring_buffer_advance_write_ptr - soundio.h (1191, 21)
[DllImport(LibraryName)]
internal static extern void soundio_ring_buffer_advance_write_ptr([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer, int @count);
// function soundio_ring_buffer_read_ptr - soundio.h (1194, 22)
[DllImport(LibraryName)]
internal static extern System.IntPtr soundio_ring_buffer_read_ptr([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
// function soundio_ring_buffer_advance_read_ptr - soundio.h (1196, 21)
[DllImport(LibraryName)]
internal static extern void soundio_ring_buffer_advance_read_ptr([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer, int @count);
// function soundio_ring_buffer_fill_count - soundio.h (1199, 20)
[DllImport(LibraryName)]
internal static extern int soundio_ring_buffer_fill_count([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
// function soundio_ring_buffer_free_count - soundio.h (1202, 20)
[DllImport(LibraryName)]
internal static extern int soundio_ring_buffer_free_count([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
// function soundio_ring_buffer_clear - soundio.h (1205, 21)
[DllImport(LibraryName)]
internal static extern void soundio_ring_buffer_clear([CTypeDetails("Pointer<SoundIoRingBuffer>")]System.IntPtr @ring_buffer);
}
class Delegates
{
public delegate void delegate0(System.IntPtr p0);
public delegate void delegate1(System.IntPtr p0, int p1);
public delegate void delegate2();
public delegate void delegate3(System.IntPtr p0);
public delegate void delegate4(System.IntPtr p0, int p1, int p2);
public delegate void delegate5(System.IntPtr p0);
public delegate void delegate6(System.IntPtr p0, int p1);
public delegate void delegate7(System.IntPtr p0, int p1, int p2);
public delegate void delegate8(System.IntPtr p0);
public delegate void delegate9(System.IntPtr p0, int p1);
}
public struct Pointer<T>
{
public IntPtr Handle;
public static implicit operator IntPtr(Pointer<T> value) { return value.Handle; }
public static implicit operator Pointer<T>(IntPtr value) { return new Pointer<T>(value); }
public Pointer(IntPtr handle)
{
Handle = handle;
}
public override bool Equals(object obj)
{
return obj is Pointer<T> && this == (Pointer<T>)obj;
}
public override int GetHashCode()
{
return (int)Handle;
}
public static bool operator ==(Pointer<T> p1, Pointer<T> p2)
{
return p1.Handle == p2.Handle;
}
public static bool operator !=(Pointer<T> p1, Pointer<T> p2)
{
return p1.Handle != p2.Handle;
}
}
public struct ArrayOf<T> { }
public struct ConstArrayOf<T> { }
public class CTypeDetailsAttribute : Attribute
{
public CTypeDetailsAttribute(string value)
{
Value = value;
}
public string Value { get; set; }
}
}

View File

@ -0,0 +1,256 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Output
{
/// <summary>
/// The audio output manager.
/// </summary>
public class AudioOutputManager : IDisposable
{
private object _lock = new object();
/// <summary>
/// Lock used for session allocation.
/// </summary>
private object _sessionLock = new object();
/// <summary>
/// The session ids allocation table.
/// </summary>
private int[] _sessionIds;
/// <summary>
/// The device driver.
/// </summary>
private IHardwareDeviceDriver _deviceDriver;
/// <summary>
/// The events linked to each session.
/// </summary>
private IWritableEvent[] _sessionsBufferEvents;
/// <summary>
/// The <see cref="AudioOutputSystem"/> session instances.
/// </summary>
private AudioOutputSystem[] _sessions;
/// <summary>
/// The count of active sessions.
/// </summary>
private int _activeSessionCount;
/// <summary>
/// Create a new <see cref="AudioOutputManager"/>.
/// </summary>
public AudioOutputManager()
{
_sessionIds = new int[Constants.AudioOutSessionCountMax];
_sessions = new AudioOutputSystem[Constants.AudioOutSessionCountMax];
_activeSessionCount = 0;
for (int i = 0; i < _sessionIds.Length; i++)
{
_sessionIds[i] = i;
}
}
/// <summary>
/// Initialize the <see cref="AudioOutputManager"/>.
/// </summary>
/// <param name="deviceDriver">The device driver.</param>
/// <param name="sessionRegisterEvents">The events associated to each session.</param>
public void Initialize(IHardwareDeviceDriver deviceDriver, IWritableEvent[] sessionRegisterEvents)
{
_deviceDriver = deviceDriver;
_sessionsBufferEvents = sessionRegisterEvents;
}
/// <summary>
/// Acquire a new session id.
/// </summary>
/// <returns>A new session id.</returns>
private int AcquireSessionId()
{
lock (_sessionLock)
{
int index = _activeSessionCount;
Debug.Assert(index < _sessionIds.Length);
int sessionId = _sessionIds[index];
_sessionIds[index] = -1;
_activeSessionCount++;
Logger.Info?.Print(LogClass.AudioRenderer, $"Registered new output ({sessionId})");
return sessionId;
}
}
/// <summary>
/// Release a given <paramref name="sessionId"/>.
/// </summary>
/// <param name="sessionId">The session id to release.</param>
private void ReleaseSessionId(int sessionId)
{
lock (_sessionLock)
{
Debug.Assert(_activeSessionCount > 0);
int newIndex = --_activeSessionCount;
_sessionIds[newIndex] = sessionId;
}
Logger.Info?.Print(LogClass.AudioRenderer, $"Unregistered output ({sessionId})");
}
/// <summary>
/// Used to update audio output system.
/// </summary>
public void Update()
{
lock (_sessionLock)
{
foreach (AudioOutputSystem output in _sessions)
{
output?.Update();
}
}
}
/// <summary>
/// Register a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="output">The <see cref="AudioOutputSystem"/> to register.</param>
private void Register(AudioOutputSystem output)
{
lock (_sessionLock)
{
_sessions[output.GetSessionId()] = output;
}
}
/// <summary>
/// Unregister a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="output">The <see cref="AudioOutputSystem"/> to unregister.</param>
internal void Unregister(AudioOutputSystem output)
{
lock (_sessionLock)
{
int sessionId = output.GetSessionId();
_sessions[output.GetSessionId()] = null;
ReleaseSessionId(sessionId);
}
}
/// <summary>
/// Get the list of all audio outputs name.
/// </summary>
/// <returns>The list of all audio outputs name</returns>
public string[] ListAudioOuts()
{
return new string[] { Constants.DefaultDeviceOutputName };
}
/// <summary>
/// Open a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="outputDeviceName">The output device name selected by the <see cref="AudioOutputSystem"/></param>
/// <param name="outputConfiguration">The output audio configuration selected by the <see cref="AudioOutputSystem"/></param>
/// <param name="obj">The new <see cref="AudioOutputSystem"/></param>
/// <param name="memoryManager">The memory manager that will be used for all guest memory operations</param>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="appletResourceUserId">The applet resource user id of the application</param>
/// <param name="processHandle">The process handle of the application</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode OpenAudioOut(out string outputDeviceName,
out AudioOutputConfiguration outputConfiguration,
out AudioOutputSystem obj,
IVirtualMemoryManager memoryManager,
string inputDeviceName,
SampleFormat sampleFormat,
ref AudioInputConfiguration parameter,
ulong appletResourceUserId,
uint processHandle)
{
int sessionId = AcquireSessionId();
_sessionsBufferEvents[sessionId].Clear();
IHardwareDeviceSession deviceSession = _deviceDriver.OpenDeviceSession(IHardwareDeviceDriver.Direction.Output, memoryManager, sampleFormat, parameter.SampleRate, parameter.ChannelCount);
AudioOutputSystem audioOut = new AudioOutputSystem(this, _lock, deviceSession, _sessionsBufferEvents[sessionId]);
ResultCode result = audioOut.Initialize(inputDeviceName, sampleFormat, ref parameter, sessionId);
if (result == ResultCode.Success)
{
outputDeviceName = audioOut.DeviceName;
outputConfiguration = new AudioOutputConfiguration
{
ChannelCount = audioOut.ChannelCount,
SampleFormat = audioOut.SampleFormat,
SampleRate = audioOut.SampleRate,
AudioOutState = audioOut.GetState(),
};
obj = audioOut;
Register(audioOut);
}
else
{
ReleaseSessionId(sessionId);
obj = null;
outputDeviceName = null;
outputConfiguration = default;
}
return result;
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// Nothing to do here.
}
}
}
}

View File

@ -0,0 +1,373 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Integration;
using System;
namespace Ryujinx.Audio.Output
{
/// <summary>
/// Audio output system.
/// </summary>
public class AudioOutputSystem : IDisposable
{
/// <summary>
/// The session id associated to the <see cref="AudioOutputSystem"/>.
/// </summary>
private int _sessionId;
/// <summary>
/// The session the <see cref="AudioOutputSystem"/>.
/// </summary>
private AudioDeviceSession _session;
/// <summary>
/// The target device name of the <see cref="AudioOutputSystem"/>.
/// </summary>
public string DeviceName { get; private set; }
/// <summary>
/// The target sample rate of the <see cref="AudioOutputSystem"/>.
/// </summary>
public uint SampleRate { get; private set; }
/// <summary>
/// The target channel count of the <see cref="AudioOutputSystem"/>.
/// </summary>
public uint ChannelCount { get; private set; }
/// <summary>
/// The target sample format of the <see cref="AudioOutputSystem"/>.
/// </summary>
public SampleFormat SampleFormat { get; private set; }
/// <summary>
/// The <see cref="AudioOutputManager"/> owning this.
/// </summary>
private AudioOutputManager _manager;
/// <summary>
/// THe lock of the parent.
/// </summary>
private object _parentLock;
/// <summary>
/// Create a new <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="manager">The manager instance</param>
/// <param name="parentLock">The lock of the manager</param>
/// <param name="deviceSession">The hardware device session</param>
/// <param name="bufferEvent">The buffer release event of the audio output</param>
public AudioOutputSystem(AudioOutputManager manager, object parentLock, IHardwareDeviceSession deviceSession, IWritableEvent bufferEvent)
{
_manager = manager;
_parentLock = parentLock;
_session = new AudioDeviceSession(deviceSession, bufferEvent);
}
/// <summary>
/// Get the default device name on the system.
/// </summary>
/// <returns>The default device name on the system.</returns>
private static string GetDeviceDefaultName()
{
return Constants.DefaultDeviceOutputName;
}
/// <summary>
/// Check if a given configuration and device name is valid on the system.
/// </summary>
/// <param name="configuration">The configuration to check.</param>
/// <param name="deviceName">The device name to check.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
private static ResultCode IsConfigurationValid(ref AudioInputConfiguration configuration, string deviceName)
{
if (deviceName.Length != 0 && !deviceName.Equals(GetDeviceDefaultName()))
{
return ResultCode.DeviceNotFound;
}
else if (configuration.SampleRate != 0 && configuration.SampleRate != Constants.TargetSampleRate)
{
return ResultCode.UnsupportedSampleRate;
}
else if (configuration.ChannelCount != 0 && configuration.ChannelCount != 1 && configuration.ChannelCount != 2 && configuration.ChannelCount != 6)
{
return ResultCode.UnsupportedChannelConfiguration;
}
return ResultCode.Success;
}
/// <summary>
/// Get the released buffer event.
/// </summary>
/// <returns>The released buffer event</returns>
public IWritableEvent RegisterBufferEvent()
{
lock (_parentLock)
{
return _session.GetBufferEvent();
}
}
/// <summary>
/// Update the <see cref="AudioOutputSystem"/>.
/// </summary>
public void Update()
{
lock (_parentLock)
{
_session.Update();
}
}
/// <summary>
/// Get the id of this session.
/// </summary>
/// <returns>The id of this session</returns>
public int GetSessionId()
{
return _sessionId;
}
/// <summary>
/// Initialize the <see cref="AudioOutputSystem"/>.
/// </summary>
/// <param name="inputDeviceName">The input device name wanted by the user</param>
/// <param name="sampleFormat">The sample format to use</param>
/// <param name="parameter">The user configuration</param>
/// <param name="sessionId">The session id associated to this <see cref="AudioOutputSystem"/></param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode Initialize(string inputDeviceName, SampleFormat sampleFormat, ref AudioInputConfiguration parameter, int sessionId)
{
_sessionId = sessionId;
ResultCode result = IsConfigurationValid(ref parameter, inputDeviceName);
if (result == ResultCode.Success)
{
if (inputDeviceName.Length == 0)
{
DeviceName = GetDeviceDefaultName();
}
else
{
DeviceName = inputDeviceName;
}
if (parameter.ChannelCount == 6)
{
ChannelCount = 6;
}
else
{
ChannelCount = 2;
}
SampleFormat = sampleFormat;
SampleRate = Constants.TargetSampleRate;
}
return result;
}
/// <summary>
/// Append a new audio buffer to the audio output.
/// </summary>
/// <param name="bufferTag">The unique tag of this buffer.</param>
/// <param name="userBuffer">The buffer informations.</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode AppendBuffer(ulong bufferTag, ref AudioUserBuffer userBuffer)
{
lock (_parentLock)
{
AudioBuffer buffer = new AudioBuffer
{
BufferTag = bufferTag,
DataPointer = userBuffer.Data,
DataSize = userBuffer.DataSize
};
if (_session.AppendBuffer(buffer))
{
return ResultCode.Success;
}
return ResultCode.BufferRingFull;
}
}
/// <summary>
/// Get the release buffers.
/// </summary>
/// <param name="releasedBuffers">The buffer to write the release buffers</param>
/// <param name="releasedCount">The count of released buffers</param>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success.</returns>
public ResultCode GetReleasedBuffer(Span<ulong> releasedBuffers, out uint releasedCount)
{
releasedCount = 0;
// Ensure that the first entry is set to zero if no entries are returned.
if (releasedBuffers.Length > 0)
{
releasedBuffers[0] = 0;
}
lock (_parentLock)
{
for (int i = 0; i < releasedBuffers.Length; i++)
{
if (!_session.TryPopReleasedBuffer(out AudioBuffer buffer))
{
break;
}
releasedBuffers[i] = buffer.BufferTag;
releasedCount++;
}
}
return ResultCode.Success;
}
/// <summary>
/// Get the current state of the <see cref="AudioOutputSystem"/>.
/// </summary>
/// <returns>Return the curent sta\te of the <see cref="AudioOutputSystem"/></returns>
/// <returns></returns>
public AudioDeviceState GetState()
{
lock (_parentLock)
{
return _session.GetState();
}
}
/// <summary>
/// Start the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Start()
{
lock (_parentLock)
{
return _session.Start();
}
}
/// <summary>
/// Stop the audio session.
/// </summary>
/// <returns>A <see cref="ResultCode"/> reporting an error or a success</returns>
public ResultCode Stop()
{
lock (_parentLock)
{
return _session.Stop();
}
}
/// <summary>
/// Get the volume of the session.
/// </summary>
/// <returns>The volume of the session</returns>
public float GetVolume()
{
lock (_parentLock)
{
return _session.GetVolume();
}
}
/// <summary>
/// Set the volume of the session.
/// </summary>
/// <param name="volume">The new volume to set</param>
public void SetVolume(float volume)
{
lock (_parentLock)
{
_session.SetVolume(volume);
}
}
/// <summary>
/// Get the count of buffer currently in use (server + driver side).
/// </summary>
/// <returns>The count of buffer currently in use</returns>
public uint GetBufferCount()
{
lock (_parentLock)
{
return _session.GetBufferCount();
}
}
/// <summary>
/// Check if a buffer is present.
/// </summary>
/// <param name="bufferTag">The unique tag of the buffer</param>
/// <returns>Return true if a buffer is present</returns>
public bool ContainsBuffer(ulong bufferTag)
{
lock (_parentLock)
{
return _session.ContainsBuffer(bufferTag);
}
}
/// <summary>
/// Get the count of sample played in this session.
/// </summary>
/// <returns>The count of sample played in this session</returns>
public ulong GetPlayedSampleCount()
{
lock (_parentLock)
{
return _session.GetPlayedSampleCount();
}
}
/// <summary>
/// Flush all buffers to the initial state.
/// </summary>
/// <returns>True if any buffers was flushed</returns>
public bool FlushBuffers()
{
lock (_parentLock)
{
return _session.FlushBuffers();
}
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_session.Dispose();
_manager.Unregister(this);
}
}
}
}

View File

@ -1,17 +0,0 @@
namespace Ryujinx.Audio
{
/// <summary>
/// The playback state of a track
/// </summary>
public enum PlaybackState
{
/// <summary>
/// The track is currently playing
/// </summary>
Playing = 0,
/// <summary>
/// The track is currently stopped
/// </summary>
Stopped = 1
}
}

View File

@ -1,4 +0,0 @@
namespace Ryujinx.Audio
{
public delegate void ReleaseCallback();
}

View File

@ -0,0 +1,30 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Common
{
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct AuxiliaryBufferAddresses
{
public ulong SendBufferInfo;
public ulong SendBufferInfoBase;
public ulong ReturnBufferInfo;
public ulong ReturnBufferInfoBase;
}
}

View File

@ -0,0 +1,67 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represents the input parameter for <see cref="Server.BehaviourContext"/>.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct BehaviourParameter
{
/// <summary>
/// The current audio renderer revision in use.
/// </summary>
public int UserRevision;
/// <summary>
/// Reserved/padding.
/// </summary>
private uint _padding;
/// <summary>
/// The flags given controlling behaviour of the audio renderer
/// </summary>
/// <remarks>See <see cref="Server.BehaviourContext.UpdateFlags(ulong)"/> and <see cref="Server.BehaviourContext.IsMemoryPoolForceMappingEnabled"/>.</remarks>
public ulong Flags;
/// <summary>
/// Represents an error during <see cref="Server.AudioRenderSystem.Update(System.Memory{byte}, System.Memory{byte}, System.ReadOnlyMemory{byte})"/>.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct ErrorInfo
{
/// <summary>
/// The error code to report.
/// </summary>
public ResultCode ErrorCode;
/// <summary>
/// Reserved/padding.
/// </summary>
private uint _padding;
/// <summary>
/// Extra information given with the <see cref="ResultCode"/>
/// </summary>
/// <remarks>This is usually used to report a faulting cpu address when a <see cref="Server.MemoryPool.MemoryPoolState"/> mapping fail.</remarks>
public ulong ExtraErrorInfo;
}
}
}

View File

@ -0,0 +1,167 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Utils;
using Ryujinx.Common;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represents a adjacent matrix.
/// </summary>
/// <remarks>This is used for splitter routing.</remarks>
public class EdgeMatrix
{
/// <summary>
/// Backing <see cref="BitArray"/> used for node connections.
/// </summary>
private BitArray _storage;
/// <summary>
/// The count of nodes of the current instance.
/// </summary>
private int _nodeCount;
/// <summary>
/// Get the required work buffer size memory needed for the <see cref="EdgeMatrix"/>.
/// </summary>
/// <param name="nodeCount">The count of nodes.</param>
/// <returns>The size required for the given <paramref name="nodeCount"/>.</returns>
public static int GetWorkBufferSize(int nodeCount)
{
int size = BitUtils.AlignUp(nodeCount * nodeCount, Constants.BufferAlignment);
return size / Unsafe.SizeOf<byte>();
}
/// <summary>
/// Initializes the <see cref="EdgeMatrix"/> instance with backing memory.
/// </summary>
/// <param name="edgeMatrixWorkBuffer">The backing memory.</param>
/// <param name="nodeCount">The count of nodes.</param>
public void Initialize(Memory<byte> edgeMatrixWorkBuffer, int nodeCount)
{
Debug.Assert(edgeMatrixWorkBuffer.Length >= GetWorkBufferSize(nodeCount));
_storage = new BitArray(edgeMatrixWorkBuffer);
_nodeCount = nodeCount;
_storage.Reset();
}
/// <summary>
/// Test if the bit at the given index is set.
/// </summary>
/// <param name="index">A bit index.</param>
/// <returns>Returns true if the bit at the given index is set</returns>
public bool Test(int index)
{
return _storage.Test(index);
}
/// <summary>
/// Reset all bits in the storage.
/// </summary>
public void Reset()
{
_storage.Reset();
}
/// <summary>
/// Reset the bit at the given index.
/// </summary>
/// <param name="index">A bit index.</param>
public void Reset(int index)
{
_storage.Reset(index);
}
/// <summary>
/// Set the bit at the given index.
/// </summary>
/// <param name="index">A bit index.</param>
public void Set(int index)
{
_storage.Set(index);
}
/// <summary>
/// Connect a given source to a given destination.
/// </summary>
/// <param name="source">The source index.</param>
/// <param name="destination">The destination index.</param>
public void Connect(int source, int destination)
{
Debug.Assert(source < _nodeCount);
Debug.Assert(destination < _nodeCount);
_storage.Set(_nodeCount * source + destination);
}
/// <summary>
/// Check if the given source is connected to the given destination.
/// </summary>
/// <param name="source">The source index.</param>
/// <param name="destination">The destination index.</param>
/// <returns>Returns true if the given source is connected to the given destination.</returns>
public bool Connected(int source, int destination)
{
Debug.Assert(source < _nodeCount);
Debug.Assert(destination < _nodeCount);
return _storage.Test(_nodeCount * source + destination);
}
/// <summary>
/// Disconnect a given source from a given destination.
/// </summary>
/// <param name="source">The source index.</param>
/// <param name="destination">The destination index.</param>
public void Disconnect(int source, int destination)
{
Debug.Assert(source < _nodeCount);
Debug.Assert(destination < _nodeCount);
_storage.Reset(_nodeCount * source + destination);
}
/// <summary>
/// Remove all edges from a given source.
/// </summary>
/// <param name="source">The source index.</param>
public void RemoveEdges(int source)
{
for (int i = 0; i < _nodeCount; i++)
{
Disconnect(source, i);
}
}
/// <summary>
/// Get the total node count.
/// </summary>
/// <returns>The total node count.</returns>
public int GetNodeCount()
{
return _nodeCount;
}
}
}

View File

@ -0,0 +1,60 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// The type of an effect.
/// </summary>
public enum EffectType : byte
{
/// <summary>
/// Invalid effect.
/// </summary>
Invalid,
/// <summary>
/// Effect applying additional mixing capability.
/// </summary>
BufferMix,
/// <summary>
/// Effect applying custom user effect (via auxiliary buffers).
/// </summary>
AuxiliaryBuffer,
/// <summary>
/// Effect applying a delay.
/// </summary>
Delay,
/// <summary>
/// Effect applying a reverberation effect via a given preset.
/// </summary>
Reverb,
/// <summary>
/// Effect applying a 3D reverberation effect via a given preset.
/// </summary>
Reverb3d,
/// <summary>
/// Effect applying a biquad filter.
/// </summary>
BiquadFilter
}
}

View File

@ -0,0 +1,60 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represents the state of a memory pool.
/// </summary>
public enum MemoryPoolUserState : uint
{
/// <summary>
/// Invalid state.
/// </summary>
Invalid = 0,
/// <summary>
/// The memory pool is new. (client side only)
/// </summary>
New = 1,
/// <summary>
/// The user asked to detach the memory pool from the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
RequestDetach = 2,
/// <summary>
/// The memory pool is detached from the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
Detached = 3,
/// <summary>
/// The user asked to attach the memory pool to the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
RequestAttach = 4,
/// <summary>
/// The memory pool is attached to the <see cref="Dsp.AudioProcessor"/>.
/// </summary>
Attached = 5,
/// <summary>
/// The memory pool is released. (client side only)
/// </summary>
Released = 6
}
}

View File

@ -0,0 +1,45 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Helper for manipulating node ids.
/// </summary>
public static class NodeIdHelper
{
/// <summary>
/// Get the type of a node from a given node id.
/// </summary>
/// <param name="nodeId">Id of the node.</param>
/// <returns>The type of the node.</returns>
public static NodeIdType GetType(int nodeId)
{
return (NodeIdType)(nodeId >> 28);
}
/// <summary>
/// Get the base of a node from a given node id.
/// </summary>
/// <param name="nodeId">Id of the node.</param>
/// <returns>The base of the node.</returns>
public static int GetBase(int nodeId)
{
return (nodeId >> 16) & 0xFFF;
}
}
}

View File

@ -0,0 +1,50 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// The type of a node.
/// </summary>
public enum NodeIdType : byte
{
/// <summary>
/// Invalid node id.
/// </summary>
Invalid = 0,
/// <summary>
/// Voice related node id. (data source, biquad filter, ...)
/// </summary>
Voice = 1,
/// <summary>
/// Mix related node id. (mix, effects, splitters, ...)
/// </summary>
Mix = 2,
/// <summary>
/// Sink related node id. (device &amp; circular buffer sink)
/// </summary>
Sink = 3,
/// <summary>
/// Performance monitoring related node id (performance commands)
/// </summary>
Performance = 15
}
}

View File

@ -0,0 +1,246 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Utils;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Common
{
public class NodeStates
{
private class Stack
{
private Memory<int> _storage;
private int _index;
private int _nodeCount;
public void Reset(Memory<int> storage, int nodeCount)
{
Debug.Assert(storage.Length * sizeof(int) >= CalcBufferSize(nodeCount));
_storage = storage;
_index = 0;
_nodeCount = nodeCount;
}
public int GetCurrentCount()
{
return _index;
}
public void Push(int data)
{
Debug.Assert(_index + 1 <= _nodeCount);
_storage.Span[_index++] = data;
}
public int Pop()
{
Debug.Assert(_index > 0);
return _storage.Span[--_index];
}
public int Top()
{
return _storage.Span[_index - 1];
}
public static int CalcBufferSize(int nodeCount)
{
return nodeCount * sizeof(int);
}
}
private int _nodeCount;
private EdgeMatrix _discovered;
private EdgeMatrix _finished;
private Memory<int> _resultArray;
private Stack _stack;
private int _tsortResultIndex;
private enum NodeState : byte
{
Unknown,
Discovered,
Finished
}
public NodeStates()
{
_stack = new Stack();
_discovered = new EdgeMatrix();
_finished = new EdgeMatrix();
}
public static int GetWorkBufferSize(int nodeCount)
{
return Stack.CalcBufferSize(nodeCount * nodeCount) + 0xC * nodeCount + 2 * EdgeMatrix.GetWorkBufferSize(nodeCount);
}
public void Initialize(Memory<byte> nodeStatesWorkBuffer, int nodeCount)
{
int workBufferSize = GetWorkBufferSize(nodeCount);
Debug.Assert(nodeStatesWorkBuffer.Length >= workBufferSize);
_nodeCount = nodeCount;
int edgeMatrixWorkBufferSize = EdgeMatrix.GetWorkBufferSize(nodeCount);
_discovered.Initialize(nodeStatesWorkBuffer.Slice(0, edgeMatrixWorkBufferSize), nodeCount);
_finished.Initialize(nodeStatesWorkBuffer.Slice(edgeMatrixWorkBufferSize, edgeMatrixWorkBufferSize), nodeCount);
nodeStatesWorkBuffer = nodeStatesWorkBuffer.Slice(edgeMatrixWorkBufferSize * 2);
_resultArray = SpanMemoryManager<int>.Cast(nodeStatesWorkBuffer.Slice(0, sizeof(int) * nodeCount));
nodeStatesWorkBuffer = nodeStatesWorkBuffer.Slice(sizeof(int) * nodeCount);
Memory<int> stackWorkBuffer = SpanMemoryManager<int>.Cast(nodeStatesWorkBuffer.Slice(0, Stack.CalcBufferSize(nodeCount * nodeCount)));
_stack.Reset(stackWorkBuffer, nodeCount * nodeCount);
}
private void Reset()
{
_discovered.Reset();
_finished.Reset();
_tsortResultIndex = 0;
_resultArray.Span.Fill(-1);
}
private NodeState GetState(int index)
{
Debug.Assert(index < _nodeCount);
if (_discovered.Test(index))
{
Debug.Assert(!_finished.Test(index));
return NodeState.Discovered;
}
else if (_finished.Test(index))
{
Debug.Assert(!_discovered.Test(index));
return NodeState.Finished;
}
return NodeState.Unknown;
}
private void SetState(int index, NodeState state)
{
switch (state)
{
case NodeState.Unknown:
_discovered.Reset(index);
_finished.Reset(index);
break;
case NodeState.Discovered:
_discovered.Set(index);
_finished.Reset(index);
break;
case NodeState.Finished:
_finished.Set(index);
_discovered.Reset(index);
break;
}
}
private void PushTsortResult(int index)
{
Debug.Assert(index < _nodeCount);
_resultArray.Span[_tsortResultIndex++] = index;
}
public ReadOnlySpan<int> GetTsortResult()
{
return _resultArray.Span.Slice(0, _tsortResultIndex);
}
public bool Sort(EdgeMatrix edgeMatrix)
{
Reset();
if (_nodeCount <= 0)
{
return true;
}
for (int i = 0; i < _nodeCount; i++)
{
if (GetState(i) == NodeState.Unknown)
{
_stack.Push(i);
}
while (_stack.GetCurrentCount() > 0)
{
int topIndex = _stack.Top();
NodeState topState = GetState(topIndex);
if (topState == NodeState.Discovered)
{
SetState(topIndex, NodeState.Finished);
PushTsortResult(topIndex);
_stack.Pop();
}
else if (topState == NodeState.Finished)
{
_stack.Pop();
}
else
{
if (topState == NodeState.Unknown)
{
SetState(topIndex, NodeState.Discovered);
}
for (int j = 0; j < edgeMatrix.GetNodeCount(); j++)
{
if (edgeMatrix.Connected(topIndex, j))
{
NodeState jState = GetState(j);
if (jState == NodeState.Unknown)
{
_stack.Push(j);
}
// Found a loop, reset and propagate rejection.
else if (jState == NodeState.Discovered)
{
Reset();
return false;
}
}
}
}
}
}
return true;
}
}
}

View File

@ -0,0 +1,34 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
public enum PerformanceDetailType : byte
{
Unknown,
PcmInt16,
Adpcm,
VolumeRamp,
BiquadFilter,
Mix,
Delay,
Aux,
Reverb,
Reverb3d,
PcmFloat
}
}

View File

@ -0,0 +1,28 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
public enum PerformanceEntryType : byte
{
Invalid,
Voice,
SubMix,
FinalMix,
Sink
}
}

View File

@ -0,0 +1,40 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Common play state.
/// </summary>
public enum PlayState : byte
{
/// <summary>
/// The user request the voice to be started.
/// </summary>
Start,
/// <summary>
/// The user request the voice to be stopped.
/// </summary>
Stop,
/// <summary>
/// The user request the voice to be paused.
/// </summary>
Pause
}
}

View File

@ -0,0 +1,50 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Early reverb reflection.
/// </summary>
public enum ReverbEarlyMode : uint
{
/// <summary>
/// Room early reflection. (small acoustic space, fast reflection)
/// </summary>
Room,
/// <summary>
/// Chamber early reflection. (bigger than <see cref="Room"/>'s acoustic space, short reflection)
/// </summary>
Chamber,
/// <summary>
/// Hall early reflection. (large acoustic space, warm reflection)
/// </summary>
Hall,
/// <summary>
/// Cathedral early reflection. (very large acoustic space, pronounced bright reflection)
/// </summary>
Cathedral,
/// <summary>
/// No early reflection.
/// </summary>
Disabled
}
}

View File

@ -0,0 +1,55 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Late reverb reflection.
/// </summary>
public enum ReverbLateMode : uint
{
/// <summary>
/// Room late reflection. (small acoustic space, fast reflection)
/// </summary>
Room,
/// <summary>
/// Hall late reflection. (large acoustic space, warm reflection)
/// </summary>
Hall,
/// <summary>
/// Classic plate late reflection. (clean distinctive reverb)
/// </summary>
Plate,
/// <summary>
/// Cathedral late reflection. (very large acoustic space, pronounced bright reflection)
/// </summary>
Cathedral,
/// <summary>
/// Do not apply any delay. (max delay)
/// </summary>
NoDelay,
/// <summary>
/// Max delay. (used for delay line limits)
/// </summary>
Limit = NoDelay
}
}

View File

@ -0,0 +1,40 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// The type of a sink.
/// </summary>
public enum SinkType : byte
{
/// <summary>
/// The sink is in an invalid state.
/// </summary>
Invalid,
/// <summary>
/// The sink is a device.
/// </summary>
Device,
/// <summary>
/// The sink is a circular buffer.
/// </summary>
CircularBuffer
}
}

View File

@ -0,0 +1,50 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Update data header used for input and output of <see cref="Server.AudioRenderSystem.Update(System.Memory{byte}, System.Memory{byte}, System.ReadOnlyMemory{byte})"/>.
/// </summary>
public struct UpdateDataHeader
{
public int Revision;
public uint BehaviourSize;
public uint MemoryPoolsSize;
public uint VoicesSize;
public uint VoiceResourcesSize;
public uint EffectsSize;
public uint MixesSize;
public uint SinksSize;
public uint PerformanceBufferSize;
public uint Unknown24;
public uint RenderInfoSize;
private unsafe fixed int _reserved[4];
public uint TotalSize;
public void Initialize(int revision)
{
Revision = revision;
TotalSize = (uint)Unsafe.SizeOf<UpdateDataHeader>();
}
}
}

View File

@ -0,0 +1,121 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Common.Memory;
using Ryujinx.Common.Utilities;
using System;
using System.Runtime.InteropServices;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// Represent the update state of a voice.
/// </summary>
/// <remarks>This is shared between the server and audio processor.</remarks>
[StructLayout(LayoutKind.Sequential, Pack = Align)]
public struct VoiceUpdateState
{
public const int Align = 0x10;
public const int BiquadStateOffset = 0x0;
public const int BiquadStateSize = 0x10;
/// <summary>
/// The state of the biquad filters of this voice.
/// </summary>
public Array2<BiquadFilterState> BiquadFilterState;
/// <summary>
/// The total amount of samples that was played.
/// </summary>
/// <remarks>This is reset to 0 when a <see cref="WaveBuffer"/> finishes playing and <see cref="WaveBuffer.IsEndOfStream"/> is set.</remarks>
/// <remarks>This is reset to 0 when looping while <see cref="Parameter.VoiceInParameter.DecodingBehaviour.PlayedSampleCountResetWhenLooping"/> is set.</remarks>
public ulong PlayedSampleCount;
/// <summary>
/// The current sample offset in the <see cref="WaveBuffer"/> pointed by <see cref="WaveBufferIndex"/>.
/// </summary>
public int Offset;
/// <summary>
/// The current index of the <see cref="WaveBuffer"/> in use.
/// </summary>
public uint WaveBufferIndex;
private WaveBufferValidArray _isWaveBufferValid;
/// <summary>
/// The total amount of <see cref="WaveBuffer"/> consumed.
/// </summary>
public uint WaveBufferConsumed;
/// <summary>
/// Pitch used for Sample Rate Conversion.
/// </summary>
public Array8<short> Pitch;
public float Fraction;
/// <summary>
/// The ADPCM loop context when <see cref="SampleFormat.Adpcm"/> is in use.
/// </summary>
public AdpcmLoopContext LoopContext;
/// <summary>
/// The last samples after a mix ramp.
/// </summary>
/// <remarks>This is used for depop (to perform voice drop).</remarks>
public Array24<float> LastSamples;
/// <summary>
/// The current count of loop performed.
/// </summary>
public int LoopCount;
[StructLayout(LayoutKind.Sequential, Size = 1 * Constants.VoiceWaveBufferCount, Pack = 1)]
private struct WaveBufferValidArray { }
/// <summary>
/// Contains information of <see cref="WaveBuffer"/> validity.
/// </summary>
public Span<bool> IsWaveBufferValid => SpanHelpers.AsSpan<WaveBufferValidArray, bool>(ref _isWaveBufferValid);
/// <summary>
/// Mark the current <see cref="WaveBuffer"/> as played and switch to the next one.
/// </summary>
/// <param name="waveBuffer">The current <see cref="WaveBuffer"/></param>
/// <param name="waveBufferIndex">The wavebuffer index.</param>
/// <param name="waveBufferConsumed">The amount of wavebuffers consumed.</param>
/// <param name="playedSampleCount">The total count of sample played.</param>
public void MarkEndOfBufferWaveBufferProcessing(ref WaveBuffer waveBuffer, ref int waveBufferIndex, ref uint waveBufferConsumed, ref ulong playedSampleCount)
{
IsWaveBufferValid[waveBufferIndex++] = false;
LoopCount = 0;
waveBufferConsumed++;
if (waveBufferIndex >= Constants.VoiceWaveBufferCount)
{
waveBufferIndex = 0;
}
if (waveBuffer.IsEndOfStream)
{
playedSampleCount = 0;
}
}
}
}

View File

@ -0,0 +1,99 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Runtime.InteropServices;
using DspAddr = System.UInt64;
namespace Ryujinx.Audio.Renderer.Common
{
/// <summary>
/// A wavebuffer used for data source commands.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1)]
public struct WaveBuffer
{
/// <summary>
/// The DSP address of the sample data of the wavebuffer.
/// </summary>
public DspAddr Buffer;
/// <summary>
/// The DSP address of the context of the wavebuffer.
/// </summary>
/// <remarks>Only used by <see cref="SampleFormat.Adpcm"/>.</remarks>
public DspAddr Context;
/// <summary>
/// The size of the sample buffer data.
/// </summary>
public uint BufferSize;
/// <summary>
/// The size of the context buffer.
/// </summary>
public uint ContextSize;
/// <summary>
/// First sample to play on the wavebuffer.
/// </summary>
public uint StartSampleOffset;
/// <summary>
/// Last sample to play on the wavebuffer.
/// </summary>
public uint EndSampleOffset;
/// <summary>
/// First sample to play when looping the wavebuffer.
/// </summary>
/// <remarks>
/// If <see cref="LoopStartSampleOffset"/> or <see cref="LoopEndSampleOffset"/> is equal to zero,, it will default to <see cref="StartSampleOffset"/> and <see cref="EndSampleOffset"/>.
/// </remarks>
public uint LoopStartSampleOffset;
/// <summary>
/// Last sample to play when looping the wavebuffer.
/// </summary>
/// <remarks>
/// If <see cref="LoopStartSampleOffset"/> or <see cref="LoopEndSampleOffset"/> is equal to zero, it will default to <see cref="StartSampleOffset"/> and <see cref="EndSampleOffset"/>.
/// </remarks>
public uint LoopEndSampleOffset;
/// <summary>
/// The max loop count.
/// </summary>
public int LoopCount;
/// <summary>
/// Set to true if the wavebuffer is looping.
/// </summary>
[MarshalAs(UnmanagedType.I1)]
public bool Looping;
/// <summary>
/// Set to true if the wavebuffer is the end of stream.
/// </summary>
[MarshalAs(UnmanagedType.I1)]
public bool IsEndOfStream;
/// <summary>
/// Padding/Reserved.
/// </summary>
private ushort _padding;
}
}

View File

@ -0,0 +1,78 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Utils;
using Ryujinx.Common;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Common
{
public class WorkBufferAllocator
{
public Memory<byte> BackingMemory { get; }
public ulong Offset { get; private set; }
public WorkBufferAllocator(Memory<byte> backingMemory)
{
BackingMemory = backingMemory;
}
public Memory<byte> Allocate(ulong size, int align)
{
Debug.Assert(align != 0);
if (size != 0)
{
ulong alignedOffset = BitUtils.AlignUp(Offset, align);
if (alignedOffset + size <= (ulong)BackingMemory.Length)
{
Memory<byte> result = BackingMemory.Slice((int)alignedOffset, (int)size);
Offset = alignedOffset + size;
// Clear the memory to be sure that is does not contain any garbage.
result.Span.Fill(0);
return result;
}
}
return Memory<byte>.Empty;
}
public Memory<T> Allocate<T>(ulong count, int align) where T: unmanaged
{
Memory<byte> allocatedMemory = Allocate((ulong)Unsafe.SizeOf<T>() * count, align);
if (allocatedMemory.IsEmpty)
{
return Memory<T>.Empty;
}
return SpanMemoryManager<T>.Cast(allocatedMemory);
}
public static ulong GetTargetSize<T>(ulong currentSize, ulong count, int align) where T: unmanaged
{
return BitUtils.AlignUp(currentSize, align) + (ulong)Unsafe.SizeOf<T>() * count;
}
}
}

View File

@ -0,0 +1,84 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Device
{
/// <summary>
/// Represents a virtual device used by IAudioDevice.
/// </summary>
public class VirtualDevice
{
/// <summary>
/// All the defined virtual devices.
/// </summary>
public static readonly VirtualDevice[] Devices = new VirtualDevice[4]
{
new VirtualDevice("AudioStereoJackOutput", 2),
new VirtualDevice("AudioBuiltInSpeakerOutput", 2),
new VirtualDevice("AudioTvOutput", 6),
new VirtualDevice("AudioUsbDeviceOutput", 2),
};
/// <summary>
/// The name of the <see cref="VirtualDevice"/>.
/// </summary>
public string Name { get; }
/// <summary>
/// The count of channels supported by the <see cref="VirtualDevice"/>.
/// </summary>
public uint ChannelCount { get; }
/// <summary>
/// The system master volume of the <see cref="VirtualDevice"/>.
/// </summary>
public float MasterVolume { get; private set; }
/// <summary>
/// Create a new <see cref="VirtualDevice"/> instance.
/// </summary>
/// <param name="name">The name of the <see cref="VirtualDevice"/>.</param>
/// <param name="channelCount">The count of channels supported by the <see cref="VirtualDevice"/>.</param>
private VirtualDevice(string name, uint channelCount)
{
Name = name;
ChannelCount = channelCount;
}
/// <summary>
/// Update the master volume of the <see cref="VirtualDevice"/>.
/// </summary>
/// <param name="volume">The new master volume.</param>
public void UpdateMasterVolume(float volume)
{
Debug.Assert(volume >= 0.0f && volume <= 1.0f);
MasterVolume = volume;
}
/// <summary>
/// Check if the <see cref="VirtualDevice"/> is a usb device.
/// </summary>
/// <returns>Returns true if the <see cref="VirtualDevice"/> is a usb device.</returns>
public bool IsUsbDevice()
{
return Name.Equals("AudioUsbDeviceOutput");
}
}
}

View File

@ -0,0 +1,44 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Device
{
/// <summary>
/// Represents a virtual device session used by IAudioDevice.
/// </summary>
public class VirtualDeviceSession
{
/// <summary>
/// The <see cref="VirtualDevice"/> associated to this session.
/// </summary>
public VirtualDevice Device { get; }
/// <summary>
/// The user volume of this session.
/// </summary>
public float Volume { get; set; }
/// <summary>
/// Create a new <see cref="VirtualDeviceSession"/> instance.
/// </summary>
/// <param name="virtualDevice">The <see cref="VirtualDevice"/> associated to this session.</param>
public VirtualDeviceSession(VirtualDevice virtualDevice)
{
Device = virtualDevice;
}
}
}

View File

@ -0,0 +1,79 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System.Collections.Generic;
namespace Ryujinx.Audio.Renderer.Device
{
/// <summary>
/// Represent an instance containing a registry of <see cref="VirtualDeviceSession"/>.
/// </summary>
public class VirtualDeviceSessionRegistry
{
/// <summary>
/// The session registry, used to store the sessions of a given AppletResourceId.
/// </summary>
private Dictionary<ulong, VirtualDeviceSession[]> _sessionsRegistry = new Dictionary<ulong, VirtualDeviceSession[]>();
/// <summary>
/// The default <see cref="VirtualDevice"/>.
/// </summary>
/// <remarks>This is used when the USB device is the default one on older revision.</remarks>
public VirtualDevice DefaultDevice => VirtualDevice.Devices[0];
/// <summary>
/// The current active <see cref="VirtualDevice"/>.
/// </summary>
// TODO: make this configurable
public VirtualDevice ActiveDevice = VirtualDevice.Devices[1];
/// <summary>
/// Get the associated <see cref="T:VirtualDeviceSession[]"/> from an AppletResourceId.
/// </summary>
/// <param name="resourceAppletId">The AppletResourceId used.</param>
/// <returns>The associated <see cref="T:VirtualDeviceSession[]"/> from an AppletResourceId.</returns>
public VirtualDeviceSession[] GetSessionByAppletResourceId(ulong resourceAppletId)
{
if (_sessionsRegistry.TryGetValue(resourceAppletId, out VirtualDeviceSession[] result))
{
return result;
}
result = CreateSessionsFromBehaviourContext();
_sessionsRegistry.Add(resourceAppletId, result);
return result;
}
/// <summary>
/// Create a new array of sessions for each <see cref="VirtualDevice"/>.
/// </summary>
/// <returns>A new array of sessions for each <see cref="VirtualDevice"/>.</returns>
private static VirtualDeviceSession[] CreateSessionsFromBehaviourContext()
{
VirtualDeviceSession[] virtualDeviceSession = new VirtualDeviceSession[VirtualDevice.Devices.Length];
for (int i = 0; i < virtualDeviceSession.Length; i++)
{
virtualDeviceSession[i] = new VirtualDeviceSession(VirtualDevice.Devices[i]);
}
return virtualDeviceSession;
}
}
}

View File

@ -0,0 +1,219 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.State;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class AdpcmHelper
{
private const int FixedPointPrecision = 11;
private const int SamplesPerFrame = 14;
private const int NibblesPerFrame = SamplesPerFrame + 2;
private const int BytesPerFrame = 8;
private const int BitsPerFrame = BytesPerFrame * 8;
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint GetAdpcmDataSize(int sampleCount)
{
Debug.Assert(sampleCount >= 0);
int frames = sampleCount / SamplesPerFrame;
int extraSize = 0;
if ((sampleCount % SamplesPerFrame) != 0)
{
extraSize = (sampleCount % SamplesPerFrame) / 2 + 1 + (sampleCount % 2);
}
return (uint)(BytesPerFrame * frames + extraSize);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetAdpcmOffsetFromSampleOffset(int sampleOffset)
{
Debug.Assert(sampleOffset >= 0);
return GetNibblesFromSampleCount(sampleOffset) / 2;
}
public static int NibbleToSample(int nibble)
{
int frames = nibble / NibblesPerFrame;
int extraNibbles = nibble % NibblesPerFrame;
int samples = SamplesPerFrame * frames;
return samples + extraNibbles - 2;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int GetNibblesFromSampleCount(int sampleCount)
{
byte headerSize = 0;
if ((sampleCount % SamplesPerFrame) != 0)
{
headerSize = 2;
}
return sampleCount % SamplesPerFrame + NibblesPerFrame * (sampleCount / SamplesPerFrame) + headerSize;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static short Saturate(int value)
{
if (value > short.MaxValue)
value = short.MaxValue;
if (value < short.MinValue)
value = short.MinValue;
return (short)value;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int Decode(Span<short> output, ReadOnlySpan<byte> input, int startSampleOffset, int endSampleOffset, int offset, int count, ReadOnlySpan<short> coefficients, ref AdpcmLoopContext loopContext)
{
if (input.IsEmpty || endSampleOffset < startSampleOffset)
{
return 0;
}
byte predScale = (byte)loopContext.PredScale;
byte scale = (byte)(predScale & 0xF);
byte coefficientIndex = (byte)((predScale >> 4) & 0xF);
short history0 = loopContext.History0;
short history1 = loopContext.History1;
short coefficient0 = coefficients[coefficientIndex * 2 + 0];
short coefficient1 = coefficients[coefficientIndex * 2 + 1];
int decodedCount = Math.Min(count, endSampleOffset - startSampleOffset - offset);
int nibbles = GetNibblesFromSampleCount(offset + startSampleOffset);
int remaining = decodedCount;
int outputBufferIndex = 0;
int inputIndex = 0;
ReadOnlySpan<byte> targetInput;
targetInput = input.Slice(nibbles / 2);
while (remaining > 0)
{
int samplesCount;
if (((uint)nibbles % NibblesPerFrame) == 0)
{
predScale = targetInput[inputIndex++];
scale = (byte)(predScale & 0xF);
coefficientIndex = (byte)((predScale >> 4) & 0xF);
coefficient0 = coefficients[coefficientIndex * 2 + 0];
coefficient1 = coefficients[coefficientIndex * 2 + 1];
nibbles += 2;
samplesCount = Math.Min(remaining, SamplesPerFrame);
}
else
{
samplesCount = 1;
}
int scaleFixedPoint = FixedPointHelper.ToFixed(1.0f, FixedPointPrecision) << scale;
if (samplesCount < SamplesPerFrame)
{
for (int i = 0; i < samplesCount; i++)
{
int value = targetInput[inputIndex];
int sample;
if ((nibbles & 1) != 0)
{
sample = (value << 28) >> 28;
inputIndex++;
}
else
{
sample = (value << 24) >> 28;
}
nibbles++;
int prediction = coefficient0 * history0 + coefficient1 * history1;
sample = FixedPointHelper.RoundUpAndToInt(sample * scaleFixedPoint + prediction, FixedPointPrecision);
short saturatedSample = Saturate(sample);
history1 = history0;
history0 = saturatedSample;
output[outputBufferIndex++] = saturatedSample;
remaining--;
}
}
else
{
for (int i = 0; i < SamplesPerFrame / 2; i++)
{
int value = targetInput[inputIndex];
int sample0;
int sample1;
sample0 = (value << 24) >> 28;
sample1 = (value << 28) >> 28;
inputIndex++;
int prediction0 = coefficient0 * history0 + coefficient1 * history1;
sample0 = FixedPointHelper.RoundUpAndToInt(sample0 * scaleFixedPoint + prediction0, FixedPointPrecision);
short saturatedSample0 = Saturate(sample0);
int prediction1 = coefficient0 * saturatedSample0 + coefficient1 * history0;
sample1 = FixedPointHelper.RoundUpAndToInt(sample1 * scaleFixedPoint + prediction1, FixedPointPrecision);
short saturatedSample1 = Saturate(sample1);
history1 = saturatedSample0;
history0 = saturatedSample1;
output[outputBufferIndex++] = saturatedSample0;
output[outputBufferIndex++] = saturatedSample1;
}
nibbles += SamplesPerFrame;
remaining -= SamplesPerFrame;
}
}
loopContext.PredScale = predScale;
loopContext.History0 = history0;
loopContext.History1 = history1;
return decodedCount;
}
}
}

View File

@ -0,0 +1,255 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Dsp.Command;
using Ryujinx.Audio.Renderer.Utils;
using Ryujinx.Common;
using Ryujinx.Common.Logging;
using System;
using System.Threading;
namespace Ryujinx.Audio.Renderer.Dsp
{
public class AudioProcessor : IDisposable
{
private const int MaxBufferedFrames = 5;
private const int TargetBufferedFrames = 3;
private enum MailboxMessage : uint
{
Start,
Stop,
RenderStart,
RenderEnd
}
private class RendererSession
{
public CommandList CommandList;
public int RenderingLimit;
public ulong AppletResourceId;
}
private Mailbox<MailboxMessage> _mailbox;
private RendererSession[] _sessionCommandList;
private Thread _workerThread;
public IHardwareDevice[] OutputDevices { get; private set; }
private long _lastTime;
private long _playbackEnds;
private ManualResetEvent _event;
public AudioProcessor()
{
_event = new ManualResetEvent(false);
}
private static uint GetHardwareChannelCount(IHardwareDeviceDriver deviceDriver)
{
// Get the real device driver (In case the compat layer is on top of it).
deviceDriver = deviceDriver.GetRealDeviceDriver();
if (deviceDriver.SupportsChannelCount(6))
{
return 6;
}
else
{
// NOTE: We default to stereo as this will get downmixed to mono by the compat layer if it's not compatible.
return 2;
}
}
public void Start(IHardwareDeviceDriver deviceDriver)
{
OutputDevices = new IHardwareDevice[Constants.AudioRendererSessionCountMax];
// TODO: Before enabling this, we need up-mixing from stereo to 5.1.
// uint channelCount = GetHardwareChannelCount(deviceDriver);
uint channelCount = 2;
for (int i = 0; i < OutputDevices.Length; i++)
{
// TODO: Don't hardcode sample rate.
OutputDevices[i] = new HardwareDeviceImpl(deviceDriver, channelCount, Constants.TargetSampleRate);
}
_mailbox = new Mailbox<MailboxMessage>();
_sessionCommandList = new RendererSession[Constants.AudioRendererSessionCountMax];
_event.Reset();
_lastTime = PerformanceCounter.ElapsedNanoseconds;
StartThread();
_mailbox.SendMessage(MailboxMessage.Start);
if (_mailbox.ReceiveResponse() != MailboxMessage.Start)
{
throw new InvalidOperationException("Audio Processor Start response was invalid!");
}
}
public void Stop()
{
_mailbox.SendMessage(MailboxMessage.Stop);
if (_mailbox.ReceiveResponse() != MailboxMessage.Stop)
{
throw new InvalidOperationException("Audio Processor Stop response was invalid!");
}
foreach (IHardwareDevice device in OutputDevices)
{
device.Dispose();
}
}
public void Send(int sessionId, CommandList commands, int renderingLimit, ulong appletResourceId)
{
_sessionCommandList[sessionId] = new RendererSession
{
CommandList = commands,
RenderingLimit = renderingLimit,
AppletResourceId = appletResourceId
};
}
public void Signal()
{
_mailbox.SendMessage(MailboxMessage.RenderStart);
}
public void Wait()
{
if (_mailbox.ReceiveResponse() != MailboxMessage.RenderEnd)
{
throw new InvalidOperationException("Audio Processor Wait response was invalid!");
}
long increment = Constants.AudioProcessorMaxUpdateTimeTarget;
long timeNow = PerformanceCounter.ElapsedNanoseconds;
if (timeNow > _playbackEnds)
{
// Playback has restarted.
_playbackEnds = timeNow;
}
_playbackEnds += increment;
// The number of frames we are behind where the timer says we should be.
long framesBehind = (timeNow - _lastTime) / increment;
// The number of frames yet to play on the backend.
long bufferedFrames = (_playbackEnds - timeNow) / increment + framesBehind;
// If we've entered a situation where a lot of buffers will be queued on the backend,
// Skip some audio frames so that playback can catch up.
if (bufferedFrames > MaxBufferedFrames)
{
// Skip a few frames so that we're not too far behind. (the target number of frames)
_lastTime += increment * (bufferedFrames - TargetBufferedFrames);
}
while (timeNow < _lastTime + increment)
{
_event.WaitOne(1);
timeNow = PerformanceCounter.ElapsedNanoseconds;
}
_lastTime += increment;
}
private void StartThread()
{
_workerThread = new Thread(Work)
{
Name = "AudioProcessor.Worker"
};
_workerThread.Start();
}
private void Work()
{
if (_mailbox.ReceiveMessage() != MailboxMessage.Start)
{
throw new InvalidOperationException("Audio Processor Start message was invalid!");
}
_mailbox.SendResponse(MailboxMessage.Start);
_mailbox.SendResponse(MailboxMessage.RenderEnd);
Logger.Info?.Print(LogClass.AudioRenderer, "Starting audio processor");
while (true)
{
MailboxMessage message = _mailbox.ReceiveMessage();
if (message == MailboxMessage.Stop)
{
break;
}
if (message == MailboxMessage.RenderStart)
{
long startTicks = PerformanceCounter.ElapsedNanoseconds;
for (int i = 0; i < _sessionCommandList.Length; i++)
{
if (_sessionCommandList[i] != null)
{
_sessionCommandList[i].CommandList.Process(OutputDevices[i]);
_sessionCommandList[i] = null;
}
}
long endTicks = PerformanceCounter.ElapsedNanoseconds;
long elapsedTime = endTicks - startTicks;
if (Constants.AudioProcessorMaxUpdateTime < elapsedTime)
{
Logger.Debug?.Print(LogClass.AudioRenderer, $"DSP too slow (exceeded by {elapsedTime - Constants.AudioProcessorMaxUpdateTime}ns)");
}
_mailbox.SendResponse(MailboxMessage.RenderEnd);
}
}
Logger.Info?.Print(LogClass.AudioRenderer, "Stopping audio processor");
_mailbox.SendResponse(MailboxMessage.Stop);
}
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
_event.Dispose();
}
}
}
}

View File

@ -0,0 +1,94 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class AdpcmDataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.AdpcmDataSourceVersion1;
public ulong EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public ulong AdpcmParameter { get; }
public ulong AdpcmParameterSize { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public AdpcmDataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = outputBufferIndex;
SampleRate = serverState.SampleRate;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
AdpcmParameter = serverState.DataSourceStateAddressInfo.GetReference(true);
AdpcmParameterSize = serverState.DataSourceStateAddressInfo.Size;
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation()
{
State = State,
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.Adpcm,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
WaveBuffers = WaveBuffers,
ExtraParameter = AdpcmParameter,
ExtraParameterSize = AdpcmParameterSize,
ChannelIndex = 0,
ChannelCount = 1,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, info, context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@ -0,0 +1,205 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Common;
using Ryujinx.Memory;
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using static Ryujinx.Audio.Renderer.Dsp.State.AuxiliaryBufferHeader;
using CpuAddress = System.UInt64;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class AuxiliaryBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.AuxiliaryBuffer;
public ulong EstimatedProcessingTime { get; set; }
public uint InputBufferIndex { get; }
public uint OutputBufferIndex { get; }
public AuxiliaryBufferAddresses BufferInfo { get; }
public CpuAddress InputBuffer { get; }
public CpuAddress OutputBuffer { get; }
public uint CountMax { get; }
public uint UpdateCount { get; }
public uint WriteOffset { get; }
public bool IsEffectEnabled { get; }
public AuxiliaryBufferCommand(uint bufferOffset, byte inputBufferOffset, byte outputBufferOffset,
ref AuxiliaryBufferAddresses sendBufferInfo, bool isEnabled, uint countMax,
CpuAddress outputBuffer, CpuAddress inputBuffer, uint updateCount, uint writeOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = bufferOffset + inputBufferOffset;
OutputBufferIndex = bufferOffset + outputBufferOffset;
BufferInfo = sendBufferInfo;
InputBuffer = inputBuffer;
OutputBuffer = outputBuffer;
CountMax = countMax;
UpdateCount = updateCount;
WriteOffset = writeOffset;
IsEffectEnabled = isEnabled;
}
private uint Read(IVirtualMemoryManager memoryManager, ulong bufferAddress, uint countMax, Span<int> outBuffer, uint count, uint readOffset, uint updateCount)
{
if (countMax == 0 || bufferAddress == 0)
{
return 0;
}
uint targetReadOffset = readOffset + AuxiliaryBufferInfo.GetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo);
if (targetReadOffset > countMax)
{
return 0;
}
uint remaining = count;
uint outBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetReadOffset, remaining);
memoryManager.Read(bufferAddress + targetReadOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(outBuffer.Slice((int)outBufferOffset, (int)countToWrite)));
targetReadOffset = (targetReadOffset + countToWrite) % countMax;
remaining -= countToWrite;
outBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint newReadOffset = (AuxiliaryBufferInfo.GetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo) + updateCount) % countMax;
AuxiliaryBufferInfo.SetReadOffset(memoryManager, BufferInfo.ReturnBufferInfo, newReadOffset);
}
return count;
}
private uint Write(IVirtualMemoryManager memoryManager, ulong outBufferAddress, uint countMax, ReadOnlySpan<int> buffer, uint count, uint writeOffset, uint updateCount)
{
if (countMax == 0 || outBufferAddress == 0)
{
return 0;
}
uint targetWriteOffset = writeOffset + AuxiliaryBufferInfo.GetWriteOffset(memoryManager, BufferInfo.SendBufferInfo);
if (targetWriteOffset > countMax)
{
return 0;
}
uint remaining = count;
uint inBufferOffset = 0;
while (remaining != 0)
{
uint countToWrite = Math.Min(countMax - targetWriteOffset, remaining);
memoryManager.Write(outBufferAddress + targetWriteOffset * sizeof(int), MemoryMarshal.Cast<int, byte>(buffer.Slice((int)inBufferOffset, (int)countToWrite)));
targetWriteOffset = (targetWriteOffset + countToWrite) % countMax;
remaining -= countToWrite;
inBufferOffset += countToWrite;
}
if (updateCount != 0)
{
uint newWriteOffset = (AuxiliaryBufferInfo.GetWriteOffset(memoryManager, BufferInfo.SendBufferInfo) + updateCount) % countMax;
AuxiliaryBufferInfo.SetWriteOffset(memoryManager, BufferInfo.SendBufferInfo, newWriteOffset);
}
return count;
}
public void Process(CommandList context)
{
Span<float> inputBuffer = context.GetBuffer((int)InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer((int)OutputBufferIndex);
if (IsEffectEnabled)
{
Span<int> inputBufferInt = MemoryMarshal.Cast<float, int>(inputBuffer);
Span<int> outputBufferInt = MemoryMarshal.Cast<float, int>(outputBuffer);
// Convert input data to the target format for user (int)
DataSourceHelper.ToInt(inputBufferInt, inputBuffer, outputBuffer.Length);
// Send the input to the user
Write(context.MemoryManager, OutputBuffer, CountMax, inputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert back to float just in case it's reused
DataSourceHelper.ToFloat(inputBuffer, inputBufferInt, inputBuffer.Length);
// Retrieve the input from user
uint readResult = Read(context.MemoryManager, InputBuffer, CountMax, outputBufferInt, context.SampleCount, WriteOffset, UpdateCount);
// Convert the outputBuffer back to the target format of the renderer (float)
DataSourceHelper.ToFloat(outputBuffer, outputBufferInt, outputBuffer.Length);
if (readResult != context.SampleCount)
{
outputBuffer.Slice((int)readResult, (int)context.SampleCount - (int)readResult).Fill(0);
}
}
else
{
ZeroFill(context.MemoryManager, BufferInfo.SendBufferInfo, Unsafe.SizeOf<AuxiliaryBufferInfo>());
ZeroFill(context.MemoryManager, BufferInfo.ReturnBufferInfo, Unsafe.SizeOf<AuxiliaryBufferInfo>());
if (InputBufferIndex != OutputBufferIndex)
{
inputBuffer.CopyTo(outputBuffer);
}
}
}
private static void ZeroFill(IVirtualMemoryManager memoryManager, ulong address, int size)
{
ulong endAddress = address + (ulong)size;
while (address + 7UL < endAddress)
{
memoryManager.Write(address, 0UL);
address += 8;
}
while (address < endAddress)
{
memoryManager.Write(address, (byte)0);
address++;
}
}
}
}

View File

@ -0,0 +1,89 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class BiquadFilterCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.BiquadFilter;
public ulong EstimatedProcessingTime { get; set; }
public BiquadFilterParameter Parameter { get; }
public Memory<BiquadFilterState> BiquadFilterState { get; }
public int InputBufferIndex { get; }
public int OutputBufferIndex { get; }
public bool NeedInitialization { get; }
public BiquadFilterCommand(int baseIndex, ref BiquadFilterParameter filter, Memory<BiquadFilterState> biquadFilterStateMemory, int inputBufferOffset, int outputBufferOffset, bool needInitialization, int nodeId)
{
Parameter = filter;
BiquadFilterState = biquadFilterStateMemory;
InputBufferIndex = baseIndex + inputBufferOffset;
OutputBufferIndex = baseIndex + outputBufferOffset;
NeedInitialization = needInitialization;
Enabled = true;
NodeId = nodeId;
}
private void ProcessBiquadFilter(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, uint sampleCount)
{
const int fixedPointPrecisionForParameter = 14;
float a0 = FixedPointHelper.ToFloat(Parameter.Numerator[0], fixedPointPrecisionForParameter);
float a1 = FixedPointHelper.ToFloat(Parameter.Numerator[1], fixedPointPrecisionForParameter);
float a2 = FixedPointHelper.ToFloat(Parameter.Numerator[2], fixedPointPrecisionForParameter);
float b1 = FixedPointHelper.ToFloat(Parameter.Denominator[0], fixedPointPrecisionForParameter);
float b2 = FixedPointHelper.ToFloat(Parameter.Denominator[1], fixedPointPrecisionForParameter);
ref BiquadFilterState state = ref BiquadFilterState.Span[0];
for (int i = 0; i < sampleCount; i++)
{
float input = inputBuffer[i];
float output = input * a0 + state.Z1;
state.Z1 = input * a1 + output * b1 + state.Z2;
state.Z2 = input * a2 + output * b2;
outputBuffer[i] = output;
}
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(InputBufferIndex);
if (NeedInitialization)
{
BiquadFilterState.Span[0] = new BiquadFilterState();
}
ProcessBiquadFilter(outputBuffer, outputBuffer, context.SampleCount);
}
}
}

View File

@ -0,0 +1,91 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Parameter.Sink;
using Ryujinx.Audio.Renderer.Server.MemoryPool;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CircularBufferSinkCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CircularBufferSink;
public ulong EstimatedProcessingTime { get; set; }
public ushort[] Input { get; }
public uint InputCount { get; }
public ulong CircularBuffer { get; }
public ulong CircularBufferSize { get; }
public ulong CurrentOffset { get; }
public CircularBufferSinkCommand(uint bufferOffset, ref CircularBufferParameter parameter, ref AddressInfo circularBufferAddressInfo, uint currentOffset, int nodeId)
{
Enabled = true;
NodeId = nodeId;
Input = new ushort[Constants.ChannelCountMax];
InputCount = parameter.InputCount;
for (int i = 0; i < InputCount; i++)
{
Input[i] = (ushort)(bufferOffset + parameter.Input[i]);
}
CircularBuffer = circularBufferAddressInfo.GetReference(true);
CircularBufferSize = parameter.BufferSize;
CurrentOffset = currentOffset;
Debug.Assert(CircularBuffer != 0);
}
public void Process(CommandList context)
{
const int targetChannelCount = 2;
ulong currentOffset = CurrentOffset;
if (CircularBufferSize > 0)
{
for (int i = 0; i < InputCount; i++)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(Input[i]);
ulong targetOffset = CircularBuffer + currentOffset;
for (int y = 0; y < context.SampleCount; y++)
{
context.MemoryManager.Write(targetOffset + (ulong)y * targetChannelCount, PcmHelper.Saturate(inputBuffer[y]));
}
currentOffset += context.SampleCount * targetChannelCount;
if (currentOffset >= CircularBufferSize)
{
currentOffset = 0;
}
}
}
}
}
}

View File

@ -0,0 +1,41 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class ClearMixBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.ClearMixBuffer;
public ulong EstimatedProcessingTime { get; set; }
public ClearMixBufferCommand(int nodeId)
{
Enabled = true;
NodeId = nodeId;
}
public void Process(CommandList context)
{
context.Buffers.Span.Fill(0);
}
}
}

View File

@ -0,0 +1,124 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Server;
using Ryujinx.Common;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Collections.Generic;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CommandList
{
public ulong StartTime { get; private set; }
public ulong EndTime { get; private set; }
public uint SampleCount { get; }
public uint SampleRate { get; }
public Memory<float> Buffers { get; }
public uint BufferCount { get; }
public List<ICommand> Commands { get; }
public IVirtualMemoryManager MemoryManager { get; }
public IHardwareDevice OutputDevice { get; private set; }
public CommandList(AudioRenderSystem renderSystem) : this(renderSystem.MemoryManager,
renderSystem.GetMixBuffer(),
renderSystem.GetSampleCount(),
renderSystem.GetSampleRate(),
renderSystem.GetMixBufferCount(),
renderSystem.GetVoiceChannelCountMax())
{
}
public CommandList(IVirtualMemoryManager memoryManager, Memory<float> mixBuffer, uint sampleCount, uint sampleRate, uint mixBufferCount, uint voiceChannelCountMax)
{
SampleCount = sampleCount;
SampleRate = sampleRate;
BufferCount = mixBufferCount + voiceChannelCountMax;
Buffers = mixBuffer;
Commands = new List<ICommand>();
MemoryManager = memoryManager;
}
public void AddCommand(ICommand command)
{
Commands.Add(command);
}
public void AddCommand<T>(T command) where T : unmanaged, ICommand
{
throw new NotImplementedException();
}
public Memory<float> GetBufferMemory(int index)
{
return Buffers.Slice(index * (int)SampleCount, (int)SampleCount);
}
public Span<float> GetBuffer(int index)
{
return Buffers.Span.Slice(index * (int)SampleCount, (int)SampleCount);
}
public ulong GetTimeElapsedSinceDspStartedProcessing()
{
return (ulong)PerformanceCounter.ElapsedNanoseconds - StartTime;
}
public void Process(IHardwareDevice outputDevice)
{
OutputDevice = outputDevice;
StartTime = (ulong)PerformanceCounter.ElapsedNanoseconds;
foreach (ICommand command in Commands)
{
if (command.Enabled)
{
bool shouldMeter = command.ShouldMeter();
long startTime = 0;
if (shouldMeter)
{
startTime = PerformanceCounter.ElapsedNanoseconds;
}
command.Process(this);
if (shouldMeter)
{
ulong effectiveElapsedTime = (ulong)(PerformanceCounter.ElapsedNanoseconds - startTime);
if (effectiveElapsedTime > command.EstimatedProcessingTime)
{
Logger.Warning?.Print(LogClass.AudioRenderer, $"Command {command.GetType().Name} took {effectiveElapsedTime}ns (expected {command.EstimatedProcessingTime}ns)");
}
}
}
}
EndTime = (ulong)PerformanceCounter.ElapsedNanoseconds;
}
}
}

View File

@ -0,0 +1,49 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public enum CommandType : byte
{
Invalid,
PcmInt16DataSourceVersion1,
PcmInt16DataSourceVersion2,
PcmFloatDataSourceVersion1,
PcmFloatDataSourceVersion2,
AdpcmDataSourceVersion1,
AdpcmDataSourceVersion2,
Volume,
VolumeRamp,
BiquadFilter,
Mix,
MixRamp,
MixRampGrouped,
DepopPrepare,
DepopForMixBuffers,
Delay,
Upsample,
DownMixSurroundToStereo,
AuxiliaryBuffer,
DeviceSink,
CircularBufferSink,
Reverb,
Reverb3d,
Performance,
ClearMixBuffer,
CopyMixBuffer
}
}

View File

@ -0,0 +1,52 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class CopyMixBufferCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.CopyMixBuffer;
public ulong EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public CopyMixBufferCommand(uint inputBufferIndex, uint outputBufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
inputBuffer.CopyTo(outputBuffer);
}
}
}

View File

@ -0,0 +1,127 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DataSourceVersion2Command : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType { get; }
public ulong EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public ulong ExtraParameter { get; }
public ulong ExtraParameterSize { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public SampleFormat SampleFormat { get; }
public SampleRateConversionQuality SrcQuality { get; }
public DataSourceVersion2Command(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
SampleFormat = serverState.SampleFormat;
SrcQuality = serverState.SrcQuality;
CommandType = GetCommandTypeBySampleFormat(SampleFormat);
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(2);
}
if (SampleFormat == SampleFormat.Adpcm)
{
ExtraParameter = serverState.DataSourceStateAddressInfo.GetReference(true);
ExtraParameterSize = serverState.DataSourceStateAddressInfo.Size;
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
private static CommandType GetCommandTypeBySampleFormat(SampleFormat sampleFormat)
{
switch (sampleFormat)
{
case SampleFormat.Adpcm:
return CommandType.AdpcmDataSourceVersion2;
case SampleFormat.PcmInt16:
return CommandType.PcmInt16DataSourceVersion2;
case SampleFormat.PcmFloat:
return CommandType.PcmFloatDataSourceVersion2;
default:
throw new NotImplementedException($"{sampleFormat}");
}
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation()
{
State = State,
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
WaveBuffers = WaveBuffers,
ExtraParameter = ExtraParameter,
ExtraParameterSize = ExtraParameterSize,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
SrcQuality = SrcQuality
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, info, context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@ -0,0 +1,272 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DelayCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Delay;
public ulong EstimatedProcessingTime { get; set; }
public DelayParameter Parameter => _parameter;
public Memory<DelayState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private DelayParameter _parameter;
private const int FixedPointPrecision = 14;
public DelayCommand(uint bufferOffset, DelayParameter parameter, Memory<DelayState> state, bool isEnabled, ulong workBuffer, int nodeId)
{
Enabled = true;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
IsEffectEnabled = isEnabled;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
}
private void ProcessDelayMono(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, uint sampleCount)
{
ref DelayState state = ref State.Span[0];
float feedbackGain = FixedPointHelper.ToFloat(Parameter.FeedbackGain, FixedPointPrecision);
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
for (int i = 0; i < sampleCount; i++)
{
float input = inputBuffer[i] * 64;
float delayLineValue = state.DelayLines[0].Read();
float lowPassResult = input * inGain + delayLineValue * feedbackGain * state.LowPassBaseGain + state.LowPassZ[0] * state.LowPassFeedbackGain;
state.LowPassZ[0] = lowPassResult;
state.DelayLines[0].Update(lowPassResult);
outputBuffer[i] = (input * dryGain + delayLineValue * outGain) / 64;
}
}
private void ProcessDelayStereo(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ref DelayState state = ref State.Span[0];
float[] channelInput = new float[Parameter.ChannelCount];
float[] delayLineValues = new float[Parameter.ChannelCount];
float[] temp = new float[Parameter.ChannelCount];
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
for (int i = 0; i < sampleCount; i++)
{
for (int j = 0; j < Parameter.ChannelCount; j++)
{
channelInput[j] = inputBuffers[j].Span[i] * 64;
delayLineValues[j] = state.DelayLines[j].Read();
}
temp[0] = channelInput[0] * inGain + delayLineValues[1] * delayFeedbackCrossGain + delayLineValues[0] * delayFeedbackBaseGain;
temp[1] = channelInput[1] * inGain + delayLineValues[0] * delayFeedbackCrossGain + delayLineValues[1] * delayFeedbackBaseGain;
for (int j = 0; j < Parameter.ChannelCount; j++)
{
float lowPassResult = state.LowPassFeedbackGain * state.LowPassZ[j] + temp[j] * state.LowPassBaseGain;
state.LowPassZ[j] = lowPassResult;
state.DelayLines[j].Update(lowPassResult);
outputBuffers[j].Span[i] = (channelInput[j] * dryGain + delayLineValues[j] * outGain) / 64;
}
}
}
private void ProcessDelayQuadraphonic(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ref DelayState state = ref State.Span[0];
float[] channelInput = new float[Parameter.ChannelCount];
float[] delayLineValues = new float[Parameter.ChannelCount];
float[] temp = new float[Parameter.ChannelCount];
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
for (int i = 0; i < sampleCount; i++)
{
for (int j = 0; j < Parameter.ChannelCount; j++)
{
channelInput[j] = inputBuffers[j].Span[i] * 64;
delayLineValues[j] = state.DelayLines[j].Read();
}
temp[0] = channelInput[0] * inGain + (delayLineValues[2] + delayLineValues[1]) * delayFeedbackCrossGain + delayLineValues[0] * delayFeedbackBaseGain;
temp[1] = channelInput[1] * inGain + (delayLineValues[0] + delayLineValues[3]) * delayFeedbackCrossGain + delayLineValues[1] * delayFeedbackBaseGain;
temp[2] = channelInput[2] * inGain + (delayLineValues[3] + delayLineValues[0]) * delayFeedbackCrossGain + delayLineValues[2] * delayFeedbackBaseGain;
temp[3] = channelInput[3] * inGain + (delayLineValues[1] + delayLineValues[2]) * delayFeedbackCrossGain + delayLineValues[3] * delayFeedbackBaseGain;
for (int j = 0; j < Parameter.ChannelCount; j++)
{
float lowPassResult = state.LowPassFeedbackGain * state.LowPassZ[j] + temp[j] * state.LowPassBaseGain;
state.LowPassZ[j] = lowPassResult;
state.DelayLines[j].Update(lowPassResult);
outputBuffers[j].Span[i] = (channelInput[j] * dryGain + delayLineValues[j] * outGain) / 64;
}
}
}
private void ProcessDelaySurround(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ref DelayState state = ref State.Span[0];
float[] channelInput = new float[Parameter.ChannelCount];
float[] delayLineValues = new float[Parameter.ChannelCount];
float[] temp = new float[Parameter.ChannelCount];
float delayFeedbackBaseGain = state.DelayFeedbackBaseGain;
float delayFeedbackCrossGain = state.DelayFeedbackCrossGain;
float inGain = FixedPointHelper.ToFloat(Parameter.InGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
for (int i = 0; i < sampleCount; i++)
{
for (int j = 0; j < Parameter.ChannelCount; j++)
{
channelInput[j] = inputBuffers[j].Span[i] * 64;
delayLineValues[j] = state.DelayLines[j].Read();
}
temp[0] = channelInput[0] * inGain + (delayLineValues[2] + delayLineValues[4]) * delayFeedbackCrossGain + delayLineValues[0] * delayFeedbackBaseGain;
temp[1] = channelInput[1] * inGain + (delayLineValues[4] + delayLineValues[3]) * delayFeedbackCrossGain + delayLineValues[1] * delayFeedbackBaseGain;
temp[2] = channelInput[2] * inGain + (delayLineValues[3] + delayLineValues[0]) * delayFeedbackCrossGain + delayLineValues[2] * delayFeedbackBaseGain;
temp[3] = channelInput[3] * inGain + (delayLineValues[1] + delayLineValues[2]) * delayFeedbackCrossGain + delayLineValues[3] * delayFeedbackBaseGain;
temp[4] = channelInput[4] * inGain + (delayLineValues[0] + delayLineValues[1]) * delayFeedbackCrossGain + delayLineValues[4] * delayFeedbackBaseGain;
temp[5] = channelInput[5] * inGain + delayLineValues[5] * delayFeedbackBaseGain;
for (int j = 0; j < Parameter.ChannelCount; j++)
{
float lowPassResult = state.LowPassFeedbackGain * state.LowPassZ[j] + temp[j] * state.LowPassBaseGain;
state.LowPassZ[j] = lowPassResult;
state.DelayLines[j].Update(lowPassResult);
outputBuffers[j].Span[i] = (channelInput[j] * dryGain + delayLineValues[j] * outGain) / 64;
}
}
}
private void ProcessDelay(CommandList context)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
ReadOnlyMemory<float>[] inputBuffers = new ReadOnlyMemory<float>[Parameter.ChannelCount];
Memory<float>[] outputBuffers = new Memory<float>[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferMemory(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferMemory(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessDelayMono(outputBuffers[0].Span, inputBuffers[0].Span, context.SampleCount);
break;
case 2:
ProcessDelayStereo(outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessDelayQuadraphonic(outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessDelaySurround(outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException($"{Parameter.ChannelCount}");
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.GetBufferMemory(InputBufferIndices[i]).CopyTo(context.GetBufferMemory(OutputBufferIndices[i]));
}
}
}
}
public void Process(CommandList context)
{
ref DelayState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == UsageState.Invalid)
{
state = new DelayState(ref _parameter, WorkBuffer);
}
else if (Parameter.Status == UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessDelay(context);
}
}
}

View File

@ -0,0 +1,103 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DepopForMixBuffersCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DepopForMixBuffers;
public ulong EstimatedProcessingTime { get; set; }
public uint MixBufferOffset { get; }
public uint MixBufferCount { get; }
public float Decay { get; }
public Memory<float> DepopBuffer { get; }
private const int FixedPointPrecisionForDecay = 15;
public DepopForMixBuffersCommand(Memory<float> depopBuffer, uint bufferOffset, uint mixBufferCount, int nodeId, uint sampleRate)
{
Enabled = true;
NodeId = nodeId;
MixBufferOffset = bufferOffset;
MixBufferCount = mixBufferCount;
DepopBuffer = depopBuffer;
if (sampleRate == 48000)
{
Decay = 0.962189f;
}
else // if (sampleRate == 32000)
{
Decay = 0.943695f;
}
}
private float ProcessDepopMix(Span<float> buffer, float depopValue, uint sampleCount)
{
if (depopValue <= 0)
{
for (int i = 0; i < sampleCount; i++)
{
depopValue = FloatingPointHelper.MultiplyRoundDown(Decay, depopValue);
buffer[i] -= depopValue;
}
return -depopValue;
}
else
{
for (int i = 0; i < sampleCount; i++)
{
depopValue = FloatingPointHelper.MultiplyRoundDown(Decay, depopValue);
buffer[i] += depopValue;
}
return depopValue;
}
}
public void Process(CommandList context)
{
uint bufferCount = Math.Min(MixBufferOffset + MixBufferCount, context.BufferCount);
for (int i = (int)MixBufferOffset; i < bufferCount; i++)
{
float depopValue = DepopBuffer.Span[i];
if (depopValue != 0)
{
Span<float> buffer = context.GetBuffer(i);
DepopBuffer.Span[i] = ProcessDepopMix(buffer, depopValue, context.SampleCount);
}
}
}
}
}

View File

@ -0,0 +1,72 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Common;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DepopPrepareCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DepopPrepare;
public ulong EstimatedProcessingTime { get; set; }
public uint MixBufferCount { get; }
public ushort[] OutputBufferIndices { get; }
public Memory<VoiceUpdateState> State { get; }
public Memory<float> DepopBuffer { get; }
public DepopPrepareCommand(Memory<VoiceUpdateState> state, Memory<float> depopBuffer, uint mixBufferCount, uint bufferOffset, int nodeId, bool enabled)
{
Enabled = enabled;
NodeId = nodeId;
MixBufferCount = mixBufferCount;
OutputBufferIndices = new ushort[Constants.MixBufferCountMax];
for (int i = 0; i < Constants.MixBufferCountMax; i++)
{
OutputBufferIndices[i] = (ushort)(bufferOffset + i);
}
State = state;
DepopBuffer = depopBuffer;
}
public void Process(CommandList context)
{
ref VoiceUpdateState state = ref State.Span[0];
for (int i = 0; i < MixBufferCount; i++)
{
if (state.LastSamples[i] != 0)
{
DepopBuffer.Span[OutputBufferIndices[i]] += state.LastSamples[i];
state.LastSamples[i] = 0;
}
}
}
}
}

View File

@ -0,0 +1,108 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Integration;
using Ryujinx.Audio.Renderer.Server.Sink;
using System;
using System.Runtime.CompilerServices;
using System.Text;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DeviceSinkCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DeviceSink;
public ulong EstimatedProcessingTime { get; set; }
public string DeviceName { get; }
public int SessionId { get; }
public uint InputCount { get; }
public ushort[] InputBufferIndices { get; }
public Memory<float> Buffers { get; }
public DeviceSinkCommand(uint bufferOffset, DeviceSink sink, int sessionId, Memory<float> buffers, int nodeId)
{
Enabled = true;
NodeId = nodeId;
DeviceName = Encoding.ASCII.GetString(sink.Parameter.DeviceName).TrimEnd('\0');
SessionId = sessionId;
InputCount = sink.Parameter.InputCount;
InputBufferIndices = new ushort[InputCount];
for (int i = 0; i < InputCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + sink.Parameter.Input[i]);
}
if (sink.UpsamplerState != null)
{
Buffers = sink.UpsamplerState.OutputBuffer;
}
else
{
Buffers = buffers;
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private Span<float> GetBuffer(int index, int sampleCount)
{
return Buffers.Span.Slice(index * sampleCount, sampleCount);
}
public void Process(CommandList context)
{
IHardwareDevice device = context.OutputDevice;
if (device.GetSampleRate() == Constants.TargetSampleRate)
{
int channelCount = (int)device.GetChannelCount();
uint bufferCount = Math.Min(device.GetChannelCount(), InputCount);
const int sampleCount = Constants.TargetSampleCount;
short[] outputBuffer = new short[bufferCount * sampleCount];
for (int i = 0; i < bufferCount; i++)
{
ReadOnlySpan<float> inputBuffer = GetBuffer(InputBufferIndices[i], sampleCount);
for (int j = 0; j < sampleCount; j++)
{
outputBuffer[i + j * channelCount] = PcmHelper.Saturate(inputBuffer[j]);
}
}
device.AppendBuffer(outputBuffer, InputCount);
}
else
{
// TODO: support resampling for device only supporting something different
throw new NotImplementedException();
}
}
}
}

View File

@ -0,0 +1,89 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class DownMixSurroundToStereoCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.DownMixSurroundToStereo;
public ulong EstimatedProcessingTime { get; set; }
public ushort[] InputBufferIndices { get; }
public ushort[] OutputBufferIndices { get; }
public float[] Coefficients { get; }
public DownMixSurroundToStereoCommand(uint bufferOffset, Span<byte> inputBufferOffset, Span<byte> outputBufferOffset, ReadOnlySpan<float> downMixParameter, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Constants.VoiceChannelCountMax; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + inputBufferOffset[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + outputBufferOffset[i]);
}
Coefficients = downMixParameter.ToArray();
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static float DownMixSurroundToStereo(ReadOnlySpan<float> coefficients, float back, float lfe, float center, float front)
{
return FloatingPointHelper.RoundUp(coefficients[3] * back + coefficients[2] * lfe + coefficients[1] * center + coefficients[0] * front);
}
public void Process(CommandList context)
{
ReadOnlySpan<float> frontLeft = context.GetBuffer(InputBufferIndices[0]);
ReadOnlySpan<float> frontRight = context.GetBuffer(InputBufferIndices[1]);
ReadOnlySpan<float> frontCenter = context.GetBuffer(InputBufferIndices[2]);
ReadOnlySpan<float> lowFrequency = context.GetBuffer(InputBufferIndices[3]);
ReadOnlySpan<float> backLeft = context.GetBuffer(InputBufferIndices[4]);
ReadOnlySpan<float> backRight = context.GetBuffer(InputBufferIndices[5]);
Span<float> stereoLeft = context.GetBuffer(OutputBufferIndices[0]);
Span<float> stereoRight = context.GetBuffer(OutputBufferIndices[1]);
Span<float> unused2 = context.GetBuffer(OutputBufferIndices[2]);
Span<float> unused3 = context.GetBuffer(OutputBufferIndices[3]);
Span<float> unused4 = context.GetBuffer(OutputBufferIndices[4]);
Span<float> unused5 = context.GetBuffer(OutputBufferIndices[5]);
for (int i = 0; i < context.SampleCount; i++)
{
stereoLeft[i] = DownMixSurroundToStereo(Coefficients, backLeft[i], lowFrequency[i], frontCenter[i], frontLeft[i]);
stereoRight[i] = DownMixSurroundToStereo(Coefficients, backRight[i], lowFrequency[i], frontCenter[i], frontRight[i]);
}
unused2.Fill(0);
unused3.Fill(0);
unused4.Fill(0);
unused5.Fill(0);
}
}
}

View File

@ -0,0 +1,37 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public interface ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType { get; }
public ulong EstimatedProcessingTime { get; }
public void Process(CommandList context);
public bool ShouldMeter()
{
return false;
}
}
}

View File

@ -0,0 +1,125 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Mix;
public ulong EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume { get; }
public MixCommand(uint inputBufferIndex, uint outputBufferIndex, int nodeId, float volume)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
Volume = volume;
}
private void ProcessMixAvx(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector256<float> volumeVec = Vector256.Create(Volume);
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(inputMix);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(outputMix);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.Add(outputVec[i], Avx.Ceiling(Avx.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
private void ProcessMixSse41(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputMix);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputMix);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse.Add(outputVec[i], Sse41.Ceiling(Sse.Multiply(inputVec[i], volumeVec)));
}
for (int i = sisdStart; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessMixSlowPath(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
for (int i = 0; i < inputMix.Length; i++)
{
outputMix[i] += FloatingPointHelper.MultiplyRoundUp(inputMix[i], Volume);
}
}
private void ProcessMix(Span<float> outputMix, ReadOnlySpan<float> inputMix)
{
if (Avx.IsSupported)
{
ProcessMixAvx(outputMix, inputMix);
}
else if (Sse41.IsSupported)
{
ProcessMixSse41(outputMix, inputMix);
}
else
{
ProcessMixSlowPath(outputMix, inputMix);
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessMix(outputBuffer, inputBuffer);
}
}
}

View File

@ -0,0 +1,83 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Common;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixRampCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.MixRamp;
public ulong EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume0 { get; }
public float Volume1 { get; }
public Memory<VoiceUpdateState> State { get; }
public int LastSampleIndex { get; }
public MixRampCommand(float volume0, float volume1, uint inputBufferIndex, uint outputBufferIndex, int lastSampleIndex, Memory<VoiceUpdateState> state, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)inputBufferIndex;
OutputBufferIndex = (ushort)outputBufferIndex;
Volume0 = volume0;
Volume1 = volume1;
State = state;
LastSampleIndex = lastSampleIndex;
}
private float ProcessMixRamp(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, int sampleCount)
{
float ramp = (Volume1 - Volume0) / sampleCount;
float volume = Volume0;
float state = 0;
for (int i = 0; i < sampleCount; i++)
{
state = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
outputBuffer[i] += state;
volume += ramp;
}
return state;
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
State.Span[0].LastSamples[LastSampleIndex] = ProcessMixRamp(outputBuffer, inputBuffer, (int)context.SampleCount);
}
}
}

View File

@ -0,0 +1,106 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Common;
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class MixRampGroupedCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.MixRampGrouped;
public ulong EstimatedProcessingTime { get; set; }
public uint MixBufferCount { get; }
public ushort[] InputBufferIndices { get; }
public ushort[] OutputBufferIndices { get; }
public float[] Volume0 { get; }
public float[] Volume1 { get; }
public Memory<VoiceUpdateState> State { get; }
public MixRampGroupedCommand(uint mixBufferCount, uint inputBufferIndex, uint outputBufferIndex, Span<float> volume0, Span<float> volume1, Memory<VoiceUpdateState> state, int nodeId)
{
Enabled = true;
MixBufferCount = mixBufferCount;
NodeId = nodeId;
InputBufferIndices = new ushort[Constants.MixBufferCountMax];
OutputBufferIndices = new ushort[Constants.MixBufferCountMax];
Volume0 = new float[Constants.MixBufferCountMax];
Volume1 = new float[Constants.MixBufferCountMax];
for (int i = 0; i < mixBufferCount; i++)
{
InputBufferIndices[i] = (ushort)inputBufferIndex;
OutputBufferIndices[i] = (ushort)(outputBufferIndex + i);
Volume0[i] = volume0[i];
Volume1[i] = volume1[i];
}
State = state;
}
private float ProcessMixRampGrouped(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, float volume0, float volume1, int sampleCount)
{
float ramp = (volume1 - volume0) / sampleCount;
float volume = volume0;
float state = 0;
for (int i = 0; i < sampleCount; i++)
{
state = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
outputBuffer[i] += state;
volume += ramp;
}
return state;
}
public void Process(CommandList context)
{
for (int i = 0; i < MixBufferCount; i++)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndices[i]);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndices[i]);
float volume0 = Volume0[i];
float volume1 = Volume1[i];
ref VoiceUpdateState state = ref State.Span[0];
if (volume0 != 0 || volume1 != 0)
{
state.LastSamples[i] = ProcessMixRampGrouped(outputBuffer, inputBuffer, volume0, volume1, (int)context.SampleCount);
}
else
{
state.LastSamples[i] = 0;
}
}
}
}
}

View File

@ -0,0 +1,93 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PcmFloatDataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.PcmFloatDataSourceVersion1;
public ulong EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public PcmFloatDataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation()
{
State = State,
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.PcmInt16,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
WaveBuffers = WaveBuffers,
ExtraParameter = 0,
ExtraParameterSize = 0,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, info, context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@ -0,0 +1,93 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using System;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PcmInt16DataSourceCommandVersion1 : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.PcmInt16DataSourceVersion1;
public ulong EstimatedProcessingTime { get; set; }
public ushort OutputBufferIndex { get; }
public uint SampleRate { get; }
public uint ChannelIndex { get; }
public uint ChannelCount { get; }
public float Pitch { get; }
public WaveBuffer[] WaveBuffers { get; }
public Memory<VoiceUpdateState> State { get; }
public DecodingBehaviour DecodingBehaviour { get; }
public PcmInt16DataSourceCommandVersion1(ref Server.Voice.VoiceState serverState, Memory<VoiceUpdateState> state, ushort outputBufferIndex, ushort channelIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
OutputBufferIndex = (ushort)(channelIndex + outputBufferIndex);
SampleRate = serverState.SampleRate;
ChannelIndex = channelIndex;
ChannelCount = serverState.ChannelsCount;
Pitch = serverState.Pitch;
WaveBuffers = new WaveBuffer[Constants.VoiceWaveBufferCount];
for (int i = 0; i < WaveBuffers.Length; i++)
{
ref Server.Voice.WaveBuffer voiceWaveBuffer = ref serverState.WaveBuffers[i];
WaveBuffers[i] = voiceWaveBuffer.ToCommon(1);
}
State = state;
DecodingBehaviour = serverState.DecodingBehaviour;
}
public void Process(CommandList context)
{
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
DataSourceHelper.WaveBufferInformation info = new DataSourceHelper.WaveBufferInformation()
{
State = State,
SourceSampleRate = SampleRate,
SampleFormat = SampleFormat.PcmInt16,
Pitch = Pitch,
DecodingBehaviour = DecodingBehaviour,
WaveBuffers = WaveBuffers,
ExtraParameter = 0,
ExtraParameterSize = 0,
ChannelIndex = (int)ChannelIndex,
ChannelCount = (int)ChannelCount,
};
DataSourceHelper.ProcessWaveBuffers(context.MemoryManager, outputBuffer, info, context.SampleRate, (int)context.SampleCount);
}
}
}

View File

@ -0,0 +1,64 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Server.Performance;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class PerformanceCommand : ICommand
{
public enum Type
{
Invalid,
Start,
End
}
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Performance;
public ulong EstimatedProcessingTime { get; set; }
public PerformanceEntryAddresses PerformanceEntryAddresses { get; }
public Type PerformanceType { get; set; }
public PerformanceCommand(ref PerformanceEntryAddresses performanceEntryAddresses, Type performanceType, int nodeId)
{
Enabled = true;
PerformanceEntryAddresses = performanceEntryAddresses;
PerformanceType = performanceType;
NodeId = nodeId;
}
public void Process(CommandList context)
{
if (PerformanceType == Type.Start)
{
PerformanceEntryAddresses.SetStartTime(context.GetTimeElapsedSinceDspStartedProcessing());
}
else if (PerformanceType == Type.End)
{
PerformanceEntryAddresses.SetProcessingTime(context.GetTimeElapsedSinceDspStartedProcessing());
PerformanceEntryAddresses.IncrementEntryCount();
}
}
}
}

View File

@ -0,0 +1,263 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using Ryujinx.Audio.Renderer.Server.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class Reverb3dCommand : ICommand
{
private static readonly int[] OutputEarlyIndicesTableMono = new int[20] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private static readonly int[] TargetEarlyDelayLineIndicesTableMono = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableMono = new int[1] { 0 };
private static readonly int[] OutputEarlyIndicesTableStereo = new int[20] { 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1 };
private static readonly int[] TargetEarlyDelayLineIndicesTableStereo = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableStereo = new int[2] { 0, 1 };
private static readonly int[] OutputEarlyIndicesTableQuadraphonic = new int[20] { 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0, 0, 0, 0, 3, 3, 3 };
private static readonly int[] TargetEarlyDelayLineIndicesTableQuadraphonic = new int[20] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableSurround = new int[40] { 4, 5, 0, 5, 0, 5, 1, 5, 1, 5, 1, 5, 1, 5, 2, 5, 2, 5, 2, 5, 1, 5, 1, 5, 1, 5, 0, 5, 0, 5, 0, 5, 0, 5, 3, 5, 3, 5, 3, 5 };
private static readonly int[] TargetEarlyDelayLineIndicesTableSurround = new int[40] { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19 };
private static readonly int[] TargetOutputFeedbackIndicesTableSurround = new int[6] { 0, 1, 2, 3, -1, 3 };
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Reverb3d;
public ulong EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public Reverb3dParameter Parameter => _parameter;
public Memory<Reverb3dState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsEffectEnabled { get; }
private Reverb3dParameter _parameter;
public Reverb3dCommand(uint bufferOffset, Reverb3dParameter parameter, Memory<Reverb3dState> state, bool isEnabled, ulong workBuffer, int nodeId)
{
Enabled = true;
IsEffectEnabled = isEnabled;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
}
private void ProcessReverb3dMono(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableMono, TargetEarlyDelayLineIndicesTableMono, TargetOutputFeedbackIndicesTableMono);
}
private void ProcessReverb3dStereo(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableStereo, TargetEarlyDelayLineIndicesTableStereo, TargetOutputFeedbackIndicesTableStereo);
}
private void ProcessReverb3dQuadraphonic(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableQuadraphonic, TargetEarlyDelayLineIndicesTableQuadraphonic, TargetOutputFeedbackIndicesTableQuadraphonic);
}
private void ProcessReverb3dSurround(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverb3dGeneric(outputBuffers, inputBuffers, sampleCount, OutputEarlyIndicesTableSurround, TargetEarlyDelayLineIndicesTableSurround, TargetOutputFeedbackIndicesTableSurround);
}
private void ProcessReverb3dGeneric(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount, ReadOnlySpan<int> outputEarlyIndicesTable, ReadOnlySpan<int> targetEarlyDelayLineIndicesTable, ReadOnlySpan<int> targetOutputFeedbackIndicesTable)
{
const int delayLineSampleIndexOffset = 1;
ref Reverb3dState state = ref State.Span[0];
bool isMono = Parameter.ChannelCount == 1;
bool isSurround = Parameter.ChannelCount == 6;
float[] outputValues = new float[Constants.ChannelCountMax];
float[] channelInput = new float[Parameter.ChannelCount];
float[] feedbackValues = new float[4];
float[] feedbackOutputValues = new float[4];
float[] values = new float[4];
for (int sampleIndex = 0; sampleIndex < sampleCount; sampleIndex++)
{
outputValues.AsSpan().Fill(0);
float tapOut = state.PreDelayLine.TapUnsafe(state.ReflectionDelayTime, delayLineSampleIndexOffset);
for (int i = 0; i < targetEarlyDelayLineIndicesTable.Length; i++)
{
int earlyDelayIndex = targetEarlyDelayLineIndicesTable[i];
int outputIndex = outputEarlyIndicesTable[i];
float tempTapOut = state.PreDelayLine.TapUnsafe(state.EarlyDelayTime[earlyDelayIndex], delayLineSampleIndexOffset);
outputValues[outputIndex] += tempTapOut * state.EarlyGain[earlyDelayIndex];
}
float targetPreDelayValue = 0;
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = inputBuffers[channelIndex].Span[sampleIndex];
targetPreDelayValue += channelInput[channelIndex];
}
for (int i = 0; i < Parameter.ChannelCount; i++)
{
outputValues[i] *= state.EarlyReflectionsGain;
}
state.PreviousPreDelayValue = (targetPreDelayValue * state.TargetPreDelayGain) + (state.PreviousPreDelayValue * state.PreviousPreDelayGain);
state.PreDelayLine.Update(state.PreviousPreDelayValue);
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
float fdnValue = state.FdnDelayLines[i].Read();
float feedbackOutputValue = fdnValue * state.DecayDirectFdnGain[i] + state.PreviousFeedbackOutputDecayed[i];
state.PreviousFeedbackOutputDecayed[i] = (fdnValue * state.DecayCurrentFdnGain[i]) + (feedbackOutputValue * state.DecayCurrentOutputGain[i]);
feedbackOutputValues[i] = feedbackOutputValue;
}
feedbackValues[0] = feedbackOutputValues[2] + feedbackOutputValues[1];
feedbackValues[1] = -feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[2] = feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[3] = feedbackOutputValues[1] - feedbackOutputValues[2];
for (int i = 0; i < state.DecayDelays1.Length; i++)
{
float temp = state.DecayDelays1[i].Update(tapOut * state.LateReverbGain + feedbackValues[i]);
values[i] = state.DecayDelays2[i].Update(temp);
state.FdnDelayLines[i].Update(values[i]);
}
for (int channelIndex = 0; channelIndex < targetOutputFeedbackIndicesTable.Length; channelIndex++)
{
int targetOutputFeedbackIndex = targetOutputFeedbackIndicesTable[channelIndex];
if (targetOutputFeedbackIndex >= 0)
{
outputBuffers[channelIndex].Span[sampleIndex] = (outputValues[channelIndex] + values[targetOutputFeedbackIndex] + channelInput[channelIndex] * state.DryGain);
}
}
if (isMono)
{
outputBuffers[0].Span[sampleIndex] += values[1];
}
if (isSurround)
{
outputBuffers[4].Span[sampleIndex] += (outputValues[4] + state.BackLeftDelayLine.Update((values[2] - values[3]) * 0.5f) + channelInput[4] * state.DryGain);
}
}
}
public void ProcessReverb3d(CommandList context)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
ReadOnlyMemory<float>[] inputBuffers = new ReadOnlyMemory<float>[Parameter.ChannelCount];
Memory<float>[] outputBuffers = new Memory<float>[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferMemory(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferMemory(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessReverb3dMono(outputBuffers, inputBuffers, context.SampleCount);
break;
case 2:
ProcessReverb3dStereo(outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessReverb3dQuadraphonic(outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessReverb3dSurround(outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException($"{Parameter.ChannelCount}");
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.GetBufferMemory(InputBufferIndices[i]).CopyTo(context.GetBufferMemory(OutputBufferIndices[i]));
}
}
}
}
public void Process(CommandList context)
{
ref Reverb3dState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.ParameterStatus == UsageState.Invalid)
{
state = new Reverb3dState(ref _parameter, WorkBuffer);
}
else if (Parameter.ParameterStatus == UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessReverb3d(context);
}
}
}

View File

@ -0,0 +1,284 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Audio.Renderer.Parameter.Effect;
using System;
using System.Diagnostics;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class ReverbCommand : ICommand
{
private static readonly int[] OutputEarlyIndicesTableMono = new int[10] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
private static readonly int[] TargetEarlyDelayLineIndicesTableMono = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableMono = new int[4] { 0, 0, 0, 0 };
private static readonly int[] TargetOutputFeedbackIndicesTableMono = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableStereo = new int[10] { 0, 0, 1, 1, 0, 1, 0, 0, 1, 1 };
private static readonly int[] TargetEarlyDelayLineIndicesTableStereo = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableStereo = new int[4] { 0, 0, 1, 1 };
private static readonly int[] TargetOutputFeedbackIndicesTableStereo = new int[4] { 2, 0, 3, 1 };
private static readonly int[] OutputEarlyIndicesTableQuadraphonic = new int[10] { 0, 0, 1, 1, 0, 1, 2, 2, 3, 3 };
private static readonly int[] TargetEarlyDelayLineIndicesTableQuadraphonic = new int[10] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private static readonly int[] OutputIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] TargetOutputFeedbackIndicesTableQuadraphonic = new int[4] { 0, 1, 2, 3 };
private static readonly int[] OutputEarlyIndicesTableSurround = new int[20] { 0, 5, 0, 5, 1, 5, 1, 5, 4, 5, 4, 5, 2, 5, 2, 5, 3, 5, 3, 5 };
private static readonly int[] TargetEarlyDelayLineIndicesTableSurround = new int[20] { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9 };
private static readonly int[] OutputIndicesTableSurround = new int[Constants.ChannelCountMax] { 0, 1, 2, 3, 4, 5 };
private static readonly int[] TargetOutputFeedbackIndicesTableSurround = new int[Constants.ChannelCountMax] { 0, 1, 2, 3, -1, 3 };
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Reverb;
public ulong EstimatedProcessingTime { get; set; }
public ReverbParameter Parameter => _parameter;
public Memory<ReverbState> State { get; }
public ulong WorkBuffer { get; }
public ushort[] OutputBufferIndices { get; }
public ushort[] InputBufferIndices { get; }
public bool IsLongSizePreDelaySupported { get; }
public bool IsEffectEnabled { get; }
private ReverbParameter _parameter;
private const int FixedPointPrecision = 14;
public ReverbCommand(uint bufferOffset, ReverbParameter parameter, Memory<ReverbState> state, bool isEnabled, ulong workBuffer, int nodeId, bool isLongSizePreDelaySupported)
{
Enabled = true;
IsEffectEnabled = isEnabled;
NodeId = nodeId;
_parameter = parameter;
State = state;
WorkBuffer = workBuffer;
InputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
OutputBufferIndices = new ushort[Constants.VoiceChannelCountMax];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
InputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Input[i]);
OutputBufferIndices[i] = (ushort)(bufferOffset + Parameter.Output[i]);
}
IsLongSizePreDelaySupported = isLongSizePreDelaySupported;
}
private void ProcessReverbMono(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableMono,
TargetEarlyDelayLineIndicesTableMono,
TargetOutputFeedbackIndicesTableMono,
OutputIndicesTableMono);
}
private void ProcessReverbStereo(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableStereo,
TargetEarlyDelayLineIndicesTableStereo,
TargetOutputFeedbackIndicesTableStereo,
OutputIndicesTableStereo);
}
private void ProcessReverbQuadraphonic(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableQuadraphonic,
TargetEarlyDelayLineIndicesTableQuadraphonic,
TargetOutputFeedbackIndicesTableQuadraphonic,
OutputIndicesTableQuadraphonic);
}
private void ProcessReverbSurround(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount)
{
ProcessReverbGeneric(outputBuffers,
inputBuffers,
sampleCount,
OutputEarlyIndicesTableSurround,
TargetEarlyDelayLineIndicesTableSurround,
TargetOutputFeedbackIndicesTableSurround,
OutputIndicesTableSurround);
}
private void ProcessReverbGeneric(Memory<float>[] outputBuffers, ReadOnlyMemory<float>[] inputBuffers, uint sampleCount, ReadOnlySpan<int> outputEarlyIndicesTable, ReadOnlySpan<int> targetEarlyDelayLineIndicesTable, ReadOnlySpan<int> targetOutputFeedbackIndicesTable, ReadOnlySpan<int> outputIndicesTable)
{
ref ReverbState state = ref State.Span[0];
bool isSurround = Parameter.ChannelCount == 6;
float reverbGain = FixedPointHelper.ToFloat(Parameter.ReverbGain, FixedPointPrecision);
float lateGain = FixedPointHelper.ToFloat(Parameter.LateGain, FixedPointPrecision);
float outGain = FixedPointHelper.ToFloat(Parameter.OutGain, FixedPointPrecision);
float dryGain = FixedPointHelper.ToFloat(Parameter.DryGain, FixedPointPrecision);
float[] outputValues = new float[Constants.ChannelCountMax];
float[] feedbackValues = new float[4];
float[] feedbackOutputValues = new float[4];
float[] channelInput = new float[Parameter.ChannelCount];
for (int sampleIndex = 0; sampleIndex < sampleCount; sampleIndex++)
{
outputValues.AsSpan().Fill(0);
for (int i = 0; i < targetEarlyDelayLineIndicesTable.Length; i++)
{
int earlyDelayIndex = targetEarlyDelayLineIndicesTable[i];
int outputIndex = outputEarlyIndicesTable[i];
float tapOutput = state.PreDelayLine.TapUnsafe(state.EarlyDelayTime[earlyDelayIndex], 0);
outputValues[outputIndex] += tapOutput * state.EarlyGain[earlyDelayIndex];
}
if (isSurround)
{
outputValues[5] *= 0.2f;
}
float targetPreDelayValue = 0;
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
channelInput[channelIndex] = inputBuffers[channelIndex].Span[sampleIndex] * 64;
targetPreDelayValue += channelInput[channelIndex] * reverbGain;
}
state.PreDelayLine.Update(targetPreDelayValue);
float lateValue = state.PreDelayLine.Tap(state.PreDelayLineDelayTime) * lateGain;
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
feedbackOutputValues[i] = state.FdnDelayLines[i].Read() * state.HighFrequencyDecayDirectGain[i] + state.PreviousFeedbackOutput[i] * state.HighFrequencyDecayPreviousGain[i];
state.PreviousFeedbackOutput[i] = feedbackOutputValues[i];
}
feedbackValues[0] = feedbackOutputValues[2] + feedbackOutputValues[1];
feedbackValues[1] = -feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[2] = feedbackOutputValues[0] - feedbackOutputValues[3];
feedbackValues[3] = feedbackOutputValues[1] - feedbackOutputValues[2];
for (int i = 0; i < state.FdnDelayLines.Length; i++)
{
feedbackOutputValues[i] = state.DecayDelays[i].Update(feedbackValues[i] + lateValue);
state.FdnDelayLines[i].Update(feedbackOutputValues[i]);
}
for (int i = 0; i < targetOutputFeedbackIndicesTable.Length; i++)
{
int targetOutputFeedbackIndex = targetOutputFeedbackIndicesTable[i];
int outputIndex = outputIndicesTable[i];
if (targetOutputFeedbackIndex >= 0)
{
outputValues[outputIndex] += feedbackOutputValues[targetOutputFeedbackIndex];
}
}
if (isSurround)
{
outputValues[4] += state.BackLeftDelayLine.Update((feedbackOutputValues[2] - feedbackOutputValues[3]) * 0.5f);
}
for (int channelIndex = 0; channelIndex < Parameter.ChannelCount; channelIndex++)
{
outputBuffers[channelIndex].Span[sampleIndex] = (outputValues[channelIndex] * outGain + channelInput[channelIndex] * dryGain) / 64;
}
}
}
private void ProcessReverb(CommandList context)
{
Debug.Assert(Parameter.IsChannelCountValid());
if (IsEffectEnabled && Parameter.IsChannelCountValid())
{
ReadOnlyMemory<float>[] inputBuffers = new ReadOnlyMemory<float>[Parameter.ChannelCount];
Memory<float>[] outputBuffers = new Memory<float>[Parameter.ChannelCount];
for (int i = 0; i < Parameter.ChannelCount; i++)
{
inputBuffers[i] = context.GetBufferMemory(InputBufferIndices[i]);
outputBuffers[i] = context.GetBufferMemory(OutputBufferIndices[i]);
}
switch (Parameter.ChannelCount)
{
case 1:
ProcessReverbMono(outputBuffers, inputBuffers, context.SampleCount);
break;
case 2:
ProcessReverbStereo(outputBuffers, inputBuffers, context.SampleCount);
break;
case 4:
ProcessReverbQuadraphonic(outputBuffers, inputBuffers, context.SampleCount);
break;
case 6:
ProcessReverbSurround(outputBuffers, inputBuffers, context.SampleCount);
break;
default:
throw new NotImplementedException($"{Parameter.ChannelCount}");
}
}
else
{
for (int i = 0; i < Parameter.ChannelCount; i++)
{
if (InputBufferIndices[i] != OutputBufferIndices[i])
{
context.GetBufferMemory(InputBufferIndices[i]).CopyTo(context.GetBufferMemory(OutputBufferIndices[i]));
}
}
}
}
public void Process(CommandList context)
{
ref ReverbState state = ref State.Span[0];
if (IsEffectEnabled)
{
if (Parameter.Status == Server.Effect.UsageState.Invalid)
{
state = new ReverbState(ref _parameter, WorkBuffer, IsLongSizePreDelaySupported);
}
else if (Parameter.Status == Server.Effect.UsageState.New)
{
state.UpdateParameter(ref _parameter);
}
}
ProcessReverb(context);
}
}
}

View File

@ -0,0 +1,87 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Renderer.Server.Upsampler;
using System;
using System.Runtime.CompilerServices;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class UpsampleCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Upsample;
public ulong EstimatedProcessingTime { get; set; }
public uint BufferCount { get; }
public uint InputBufferIndex { get; }
public uint InputSampleCount { get; }
public uint InputSampleRate { get; }
public UpsamplerState UpsamplerInfo { get; }
public Memory<float> OutBuffer { get; }
public UpsampleCommand(uint bufferOffset, UpsamplerState info, uint inputCount, Span<byte> inputBufferOffset, uint bufferCount, uint sampleCount, uint sampleRate, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = 0;
OutBuffer = info.OutputBuffer;
BufferCount = bufferCount;
InputSampleCount = sampleCount;
InputSampleRate = sampleRate;
info.SourceSampleCount = inputCount;
info.InputBufferIndices = new ushort[inputCount];
for (int i = 0; i < inputCount; i++)
{
info.InputBufferIndices[i] = (ushort)(bufferOffset + inputBufferOffset[i]);
}
UpsamplerInfo = info;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private Span<float> GetBuffer(int index, int sampleCount)
{
return UpsamplerInfo.OutputBuffer.Span.Slice(index * sampleCount, sampleCount);
}
public void Process(CommandList context)
{
float ratio = (float)InputSampleRate / Constants.TargetSampleRate;
uint bufferCount = Math.Min(BufferCount, UpsamplerInfo.SourceSampleCount);
for (int i = 0; i < bufferCount; i++)
{
Span<float> inputBuffer = context.GetBuffer(UpsamplerInfo.InputBufferIndices[i]);
Span<float> outputBuffer = GetBuffer(UpsamplerInfo.InputBufferIndices[i], (int)UpsamplerInfo.SampleCount);
float fraction = 0.0f;
ResamplerHelper.ResampleForUpsampler(outputBuffer, inputBuffer, ratio, ref fraction, (int)(InputSampleCount / ratio));
}
}
}
}

View File

@ -0,0 +1,125 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class VolumeCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.Volume;
public ulong EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume { get; }
public VolumeCommand(float volume, uint bufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)bufferIndex;
OutputBufferIndex = (ushort)bufferIndex;
Volume = volume;
}
private void ProcessVolumeAvx(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector256<float> volumeVec = Vector256.Create(Volume);
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(inputBuffer);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(outputBuffer);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.Ceiling(Avx.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
private void ProcessVolumeSse41(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
Vector128<float> volumeVec = Vector128.Create(Volume);
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(inputBuffer);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(outputBuffer);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse41.Ceiling(Sse.Multiply(inputVec[i], volumeVec));
}
for (int i = sisdStart; i < inputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
private void ProcessVolume(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
if (Avx.IsSupported)
{
ProcessVolumeAvx(outputBuffer, inputBuffer);
}
else if (Sse41.IsSupported)
{
ProcessVolumeSse41(outputBuffer, inputBuffer);
}
else
{
ProcessVolumeSlowPath(outputBuffer, inputBuffer);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void ProcessVolumeSlowPath(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer)
{
for (int i = 0; i < outputBuffer.Length; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], Volume);
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessVolume(outputBuffer, inputBuffer);
}
}
}

View File

@ -0,0 +1,71 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using System;
namespace Ryujinx.Audio.Renderer.Dsp.Command
{
public class VolumeRampCommand : ICommand
{
public bool Enabled { get; set; }
public int NodeId { get; }
public CommandType CommandType => CommandType.VolumeRamp;
public ulong EstimatedProcessingTime { get; set; }
public ushort InputBufferIndex { get; }
public ushort OutputBufferIndex { get; }
public float Volume0 { get; }
public float Volume1 { get; }
public VolumeRampCommand(float volume0, float volume1, uint bufferIndex, int nodeId)
{
Enabled = true;
NodeId = nodeId;
InputBufferIndex = (ushort)bufferIndex;
OutputBufferIndex = (ushort)bufferIndex;
Volume0 = volume0;
Volume1 = volume1;
}
private void ProcessVolumeRamp(Span<float> outputBuffer, ReadOnlySpan<float> inputBuffer, int sampleCount)
{
float ramp = (Volume1 - Volume0) / sampleCount;
float volume = Volume0;
for (int i = 0; i < sampleCount; i++)
{
outputBuffer[i] = FloatingPointHelper.MultiplyRoundUp(inputBuffer[i], volume);
volume += ramp;
}
}
public void Process(CommandList context)
{
ReadOnlySpan<float> inputBuffer = context.GetBuffer(InputBufferIndex);
Span<float> outputBuffer = context.GetBuffer(OutputBufferIndex);
ProcessVolumeRamp(outputBuffer, inputBuffer, (int)context.SampleCount);
}
}
}

View File

@ -0,0 +1,409 @@
//
// Copyright (c) 2019-2021 Ryujinx
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
using Ryujinx.Audio.Common;
using Ryujinx.Audio.Renderer.Common;
using Ryujinx.Audio.Renderer.Dsp.State;
using Ryujinx.Common.Logging;
using Ryujinx.Memory;
using System;
using System.Buffers;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
using static Ryujinx.Audio.Renderer.Parameter.VoiceInParameter;
namespace Ryujinx.Audio.Renderer.Dsp
{
public static class DataSourceHelper
{
private const int FixedPointPrecision = 15;
public class WaveBufferInformation
{
public Memory<VoiceUpdateState> State;
public uint SourceSampleRate;
public SampleFormat SampleFormat;
public float Pitch;
public DecodingBehaviour DecodingBehaviour;
public WaveBuffer[] WaveBuffers;
public ulong ExtraParameter;
public ulong ExtraParameterSize;
public int ChannelIndex;
public int ChannelCount;
public SampleRateConversionQuality SrcQuality;
}
private static int GetPitchLimitBySrcQuality(SampleRateConversionQuality quality)
{
switch (quality)
{
case SampleRateConversionQuality.Default:
case SampleRateConversionQuality.Low:
return 4;
case SampleRateConversionQuality.High:
return 8;
default:
throw new ArgumentException($"{quality}");
}
}
public static void ProcessWaveBuffers(IVirtualMemoryManager memoryManager, Span<float> outputBuffer, WaveBufferInformation info, uint targetSampleRate, int sampleCount)
{
const int tempBufferSize = 0x3F00;
ref VoiceUpdateState state = ref info.State.Span[0];
short[] tempBuffer = ArrayPool<short>.Shared.Rent(tempBufferSize);
float sampleRateRatio = ((float)info.SourceSampleRate / targetSampleRate * info.Pitch);
float fraction = state.Fraction;
int waveBufferIndex = (int)state.WaveBufferIndex;
ulong playedSampleCount = state.PlayedSampleCount;
int offset = state.Offset;
uint waveBufferConsumed = state.WaveBufferConsumed;
int pitchMaxLength = GetPitchLimitBySrcQuality(info.SrcQuality);
int totalNeededSize = (int)MathF.Truncate(fraction + sampleRateRatio * sampleCount);
if (totalNeededSize + pitchMaxLength <= tempBufferSize && totalNeededSize >= 0)
{
int sourceSampleCountToProcess = sampleCount;
int maxSampleCountPerIteration = Math.Min((int)MathF.Truncate((tempBufferSize - fraction) / sampleRateRatio), sampleCount);
bool isStarving = false;
int i = 0;
while (i < sourceSampleCountToProcess)
{
int tempBufferIndex = 0;
if (!info.DecodingBehaviour.HasFlag(DecodingBehaviour.SkipPitchAndSampleRateConversion))
{
state.Pitch.ToSpan().Slice(0, pitchMaxLength).CopyTo(tempBuffer.AsSpan());
tempBufferIndex += pitchMaxLength;
}
int sampleCountToProcess = Math.Min(sourceSampleCountToProcess, maxSampleCountPerIteration);
int y = 0;
int sampleCountToDecode = (int)MathF.Truncate(fraction + sampleRateRatio * sampleCountToProcess);
while (y < sampleCountToDecode)
{
if (waveBufferIndex >= Constants.VoiceWaveBufferCount)
{
Logger.Error?.Print(LogClass.AudioRenderer, $"Invalid WaveBuffer index {waveBufferIndex}");
waveBufferIndex = 0;
playedSampleCount = 0;
}
if (!state.IsWaveBufferValid[waveBufferIndex])
{
isStarving = true;
break;
}
ref WaveBuffer waveBuffer = ref info.WaveBuffers[waveBufferIndex];
if (offset == 0 && info.SampleFormat == SampleFormat.Adpcm && waveBuffer.Context != 0)
{
state.LoopContext = memoryManager.Read<AdpcmLoopContext>(waveBuffer.Context);
}
Span<short> tempSpan = tempBuffer.AsSpan().Slice(tempBufferIndex + y);
int decodedSampleCount = -1;
int targetSampleStartOffset;
int targetSampleEndOffset;
if (state.LoopCount > 0 && waveBuffer.LoopStartSampleOffset != 0 && waveBuffer.LoopEndSampleOffset != 0 && waveBuffer.LoopStartSampleOffset <= waveBuffer.LoopEndSampleOffset)
{
targetSampleStartOffset = (int)waveBuffer.LoopStartSampleOffset;
targetSampleEndOffset = (int)waveBuffer.LoopEndSampleOffset;
}
else
{
targetSampleStartOffset = (int)waveBuffer.StartSampleOffset;
targetSampleEndOffset = (int)waveBuffer.EndSampleOffset;
}
int targetWaveBufferSampleCount = targetSampleEndOffset - targetSampleStartOffset;
switch (info.SampleFormat)
{
case SampleFormat.Adpcm:
ReadOnlySpan<byte> waveBufferAdpcm = ReadOnlySpan<byte>.Empty;
if (waveBuffer.Buffer != 0 && waveBuffer.BufferSize != 0)
{
// TODO: we are possibly copying a lot of unneeded data here, we should only take what we need.
waveBufferAdpcm = memoryManager.GetSpan(waveBuffer.Buffer, (int)waveBuffer.BufferSize);
}
ReadOnlySpan<short> coefficients = MemoryMarshal.Cast<byte, short>(memoryManager.GetSpan(info.ExtraParameter, (int)info.ExtraParameterSize));
decodedSampleCount = AdpcmHelper.Decode(tempSpan, waveBufferAdpcm, targetSampleStartOffset, targetSampleEndOffset, offset, sampleCountToDecode - y, coefficients, ref state.LoopContext);
break;
case SampleFormat.PcmInt16:
ReadOnlySpan<short> waveBufferPcm16 = ReadOnlySpan<short>.Empty;
if (waveBuffer.Buffer != 0 && waveBuffer.BufferSize != 0)
{
ulong bufferOffset = waveBuffer.Buffer + PcmHelper.GetBufferOffset<short>(targetSampleStartOffset, offset, info.ChannelCount);
int bufferSize = PcmHelper.GetBufferSize<short>(targetSampleStartOffset, targetSampleEndOffset, offset, sampleCountToDecode - y) * info.ChannelCount;
waveBufferPcm16 = MemoryMarshal.Cast<byte, short>(memoryManager.GetSpan(bufferOffset, bufferSize));
}
decodedSampleCount = PcmHelper.Decode(tempSpan, waveBufferPcm16, targetSampleStartOffset, targetSampleEndOffset, info.ChannelIndex, info.ChannelCount);
break;
case SampleFormat.PcmFloat:
ReadOnlySpan<float> waveBufferPcmFloat = ReadOnlySpan<float>.Empty;
if (waveBuffer.Buffer != 0 && waveBuffer.BufferSize != 0)
{
ulong bufferOffset = waveBuffer.Buffer + PcmHelper.GetBufferOffset<float>(targetSampleStartOffset, offset, info.ChannelCount);
int bufferSize = PcmHelper.GetBufferSize<float>(targetSampleStartOffset, targetSampleEndOffset, offset, sampleCountToDecode - y) * info.ChannelCount;
waveBufferPcmFloat = MemoryMarshal.Cast<byte, float>(memoryManager.GetSpan(bufferOffset, bufferSize));
}
decodedSampleCount = PcmHelper.Decode(tempSpan, waveBufferPcmFloat, targetSampleStartOffset, targetSampleEndOffset, info.ChannelIndex, info.ChannelCount);
break;
default:
Logger.Warning?.Print(LogClass.AudioRenderer, $"Unsupported sample format {info.SampleFormat}");
break;
}
Debug.Assert(decodedSampleCount <= sampleCountToDecode);
if (decodedSampleCount < 0)
{
Logger.Warning?.Print(LogClass.AudioRenderer, $"Decoding failed, skipping WaveBuffer");
state.MarkEndOfBufferWaveBufferProcessing(ref waveBuffer, ref waveBufferIndex, ref waveBufferConsumed, ref playedSampleCount);
decodedSampleCount = 0;
}
y += decodedSampleCount;
offset += decodedSampleCount;
playedSampleCount += (uint)decodedSampleCount;
if (offset >= targetWaveBufferSampleCount || decodedSampleCount == 0)
{
offset = 0;
if (waveBuffer.Looping)
{
state.LoopCount++;
if (waveBuffer.LoopCount >= 0)
{
if (decodedSampleCount == 0 || state.LoopCount > waveBuffer.LoopCount)
{
state.MarkEndOfBufferWaveBufferProcessing(ref waveBuffer, ref waveBufferIndex, ref waveBufferConsumed, ref playedSampleCount);
}
}
if (decodedSampleCount == 0)
{
isStarving = true;
break;
}
if (info.DecodingBehaviour.HasFlag(DecodingBehaviour.PlayedSampleCountResetWhenLooping))
{
playedSampleCount = 0;
}
}
else
{
state.MarkEndOfBufferWaveBufferProcessing(ref waveBuffer, ref waveBufferIndex, ref waveBufferConsumed, ref playedSampleCount);
}
}
}
Span<float> outputSpan = outputBuffer.Slice(i);
Span<int> outputSpanInt = MemoryMarshal.Cast<float, int>(outputSpan);
if (info.DecodingBehaviour.HasFlag(DecodingBehaviour.SkipPitchAndSampleRateConversion))
{
for (int j = 0; j < y; j++)
{
outputBuffer[j] = tempBuffer[j];
}
}
else
{
Span<short> tempSpan = tempBuffer.AsSpan().Slice(tempBufferIndex + y);
tempSpan.Slice(0, sampleCountToDecode - y).Fill(0);
ToFloat(outputBuffer, outputSpanInt, sampleCountToProcess);
ResamplerHelper.Resample(outputBuffer, tempBuffer, sampleRateRatio, ref fraction, sampleCountToProcess, info.SrcQuality, y != sourceSampleCountToProcess || info.Pitch != 1.0f);
tempBuffer.AsSpan().Slice(sampleCountToDecode, pitchMaxLength).CopyTo(state.Pitch.ToSpan());
}
i += sampleCountToProcess;
}
Debug.Assert(sourceSampleCountToProcess == i || !isStarving);
state.WaveBufferConsumed = waveBufferConsumed;
state.Offset = offset;
state.PlayedSampleCount = playedSampleCount;
state.WaveBufferIndex = (uint)waveBufferIndex;
state.Fraction = fraction;
}
ArrayPool<short>.Shared.Return(tempBuffer);
}
private static void ToFloatAvx(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
ReadOnlySpan<Vector256<int>> inputVec = MemoryMarshal.Cast<int, Vector256<int>>(input);
Span<Vector256<float>> outputVec = MemoryMarshal.Cast<float, Vector256<float>>(output);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.ConvertToVector256Single(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = input[i];
}
}
private static void ToFloatSse2(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
ReadOnlySpan<Vector128<int>> inputVec = MemoryMarshal.Cast<int, Vector128<int>>(input);
Span<Vector128<float>> outputVec = MemoryMarshal.Cast<float, Vector128<float>>(output);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Sse2.ConvertToVector128Single(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToFloatSlow(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
for (int i = 0; i < sampleCount; i++)
{
output[i] = input[i];
}
}
public static void ToFloat(Span<float> output, ReadOnlySpan<int> input, int sampleCount)
{
if (Avx.IsSupported)
{
ToFloatAvx(output, input, sampleCount);
}
else if (Sse2.IsSupported)
{
ToFloatSse2(output, input, sampleCount);
}
else
{
ToFloatSlow(output, input, sampleCount);
}
}
public static void ToIntAvx(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
ReadOnlySpan<Vector256<float>> inputVec = MemoryMarshal.Cast<float, Vector256<float>>(input);
Span<Vector256<int>> outputVec = MemoryMarshal.Cast<int, Vector256<int>>(output);
int sisdStart = inputVec.Length * 8;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.ConvertToVector256Int32(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
public static void ToIntSse2(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
ReadOnlySpan<Vector128<float>> inputVec = MemoryMarshal.Cast<float, Vector128<float>>(input);
Span<Vector128<int>> outputVec = MemoryMarshal.Cast<int, Vector128<int>>(output);
int sisdStart = inputVec.Length * 4;
for (int i = 0; i < inputVec.Length; i++)
{
outputVec[i] = Avx.ConvertToVector128Int32(inputVec[i]);
}
for (int i = sisdStart; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void ToIntSlow(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
for (int i = 0; i < sampleCount; i++)
{
output[i] = (int)input[i];
}
}
public static void ToInt(Span<int> output, ReadOnlySpan<float> input, int sampleCount)
{
if (Avx.IsSupported)
{
ToIntAvx(output, input, sampleCount);
}
else if (Sse2.IsSupported)
{
ToIntSse2(output, input, sampleCount);
}
else
{
ToIntSlow(output, input, sampleCount);
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More