new StreamingVoice and AudioData system

pull/50/head
cosmonaut 2023-08-02 14:10:48 -07:00
parent a0408a863c
commit eebbaeb6ae
15 changed files with 360 additions and 1122 deletions

36
src/Audio/AudioData.cs Normal file
View File

@ -0,0 +1,36 @@
using System;
namespace MoonWorks.Audio
{
public abstract class AudioData
{
public Format Format { get; protected set; }
public abstract uint DecodeBufferSize { get; }
public abstract bool Loaded { get; }
/// <summary>
/// Loads the raw audio data into memory.
/// </summary>
public abstract void Load();
/// <summary>
/// Seeks to the given sample frame.
/// </summary>
public abstract void Seek(uint sampleFrame);
/// <summary>
/// Attempts to decodes data of length bufferLengthInBytes into the provided buffer.
/// </summary>
/// <param name="buffer">The buffer that decoded bytes will be placed into.</param>
/// <param name="bufferLengthInBytes">Requested length of decoded audio data.</param>
/// <param name="filledLengthInBytes">How much data was actually filled in by the decode.</param>
/// <param name="reachedEnd">Whether the end of the data was reached on this decode.</param>
public abstract unsafe void Decode(void* buffer, int bufferLengthInBytes, out int filledLengthInBytes, out bool reachedEnd);
/// <summary>
/// Unloads the raw audio data from memory.
/// </summary>
public abstract void Unload();
}
}

96
src/Audio/AudioDataOgg.cs Normal file
View File

@ -0,0 +1,96 @@
using System;
using System.IO;
using System.Runtime.InteropServices;
namespace MoonWorks.Audio
{
public class AudioDataOgg : AudioData
{
private IntPtr FileDataPtr = IntPtr.Zero;
private IntPtr VorbisHandle = IntPtr.Zero;
private string FilePath;
public override bool Loaded => VorbisHandle != IntPtr.Zero;
public override uint DecodeBufferSize => 32768;
public AudioDataOgg(string filePath)
{
FilePath = filePath;
var handle = FAudio.stb_vorbis_open_filename(filePath, out var error, IntPtr.Zero);
if (error != 0)
{
throw new AudioLoadException("Error loading file!");
}
var info = FAudio.stb_vorbis_get_info(handle);
Format = new Format
{
Tag = FormatTag.IEEE_FLOAT,
BitsPerSample = 32,
Channels = (ushort) info.channels,
SampleRate = info.sample_rate
};
FAudio.stb_vorbis_close(handle);
}
public override unsafe void Decode(void* buffer, int bufferLengthInBytes, out int filledLengthInBytes, out bool reachedEnd)
{
var lengthInFloats = bufferLengthInBytes / sizeof(float);
/* NOTE: this function returns samples per channel, not total samples */
var samples = FAudio.stb_vorbis_get_samples_float_interleaved(
VorbisHandle,
Format.Channels,
(IntPtr) buffer,
lengthInFloats
);
var sampleCount = samples * Format.Channels;
reachedEnd = sampleCount < lengthInFloats;
filledLengthInBytes = sampleCount * sizeof(float);
}
public override unsafe void Load()
{
if (!Loaded)
{
var fileStream = new FileStream(FilePath, FileMode.Open, FileAccess.Read);
FileDataPtr = (nint) NativeMemory.Alloc((nuint) fileStream.Length);
var fileDataSpan = new Span<byte>((void*) FileDataPtr, (int) fileStream.Length);
fileStream.ReadExactly(fileDataSpan);
fileStream.Close();
VorbisHandle = FAudio.stb_vorbis_open_memory(FileDataPtr, fileDataSpan.Length, out int error, IntPtr.Zero);
if (error != 0)
{
NativeMemory.Free((void*) FileDataPtr);
Logger.LogError("Error opening OGG file!");
Logger.LogError("Error: " + error);
throw new AudioLoadException("Error opening OGG file!");
}
}
}
public override void Seek(uint sampleFrame)
{
FAudio.stb_vorbis_seek(VorbisHandle, sampleFrame);
}
public override unsafe void Unload()
{
if (Loaded)
{
FAudio.stb_vorbis_close(VorbisHandle);
NativeMemory.Free((void*) FileDataPtr);
VorbisHandle = IntPtr.Zero;
FileDataPtr = IntPtr.Zero;
}
}
}
}

View File

@ -1,37 +1,28 @@
using System; using System;
using System.IO; using System.IO;
using System.Runtime.InteropServices; using System.Runtime.InteropServices;
namespace MoonWorks.Audio namespace MoonWorks.Audio
{ {
public class StreamingSoundQoa : StreamingSoundSeekable public class AudioDataQoa : AudioData
{ {
private IntPtr QoaHandle = IntPtr.Zero; private IntPtr QoaHandle = IntPtr.Zero;
private IntPtr FileDataPtr = IntPtr.Zero; private IntPtr FileDataPtr = IntPtr.Zero;
uint Channels;
uint SamplesPerChannelPerFrame;
uint TotalSamplesPerChannel;
public override bool Loaded => QoaHandle != IntPtr.Zero;
private string FilePath; private string FilePath;
private const uint QOA_MAGIC = 0x716f6166; /* 'qoaf' */ private const uint QOA_MAGIC = 0x716f6166; /* 'qoaf' */
private static unsafe UInt64 ReverseEndianness(UInt64 value) public override bool Loaded => QoaHandle != IntPtr.Zero;
{
byte* bytes = (byte*) &value;
return private uint decodeBufferSize;
((UInt64)(bytes[0]) << 56) | ((UInt64)(bytes[1]) << 48) | public override uint DecodeBufferSize => decodeBufferSize;
((UInt64)(bytes[2]) << 40) | ((UInt64)(bytes[3]) << 32) |
((UInt64)(bytes[4]) << 24) | ((UInt64)(bytes[5]) << 16) |
((UInt64)(bytes[6]) << 8) | ((UInt64)(bytes[7]) << 0);
}
public unsafe static StreamingSoundQoa Create(AudioDevice device, string filePath) public AudioDataQoa(string filePath)
{ {
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read); FilePath = filePath;
using var stream = new FileStream(FilePath, FileMode.Open, FileAccess.Read);
using var reader = new BinaryReader(stream); using var reader = new BinaryReader(stream);
UInt64 fileHeader = ReverseEndianness(reader.ReadUInt64()); UInt64 fileHeader = ReverseEndianness(reader.ReadUInt64());
@ -51,45 +42,32 @@ namespace MoonWorks.Audio
uint samplerate = (uint) ((frameHeader >> 32) & 0xFFFFFF); uint samplerate = (uint) ((frameHeader >> 32) & 0xFFFFFF);
uint samplesPerChannelPerFrame = (uint) ((frameHeader >> 16) & 0x00FFFF); uint samplesPerChannelPerFrame = (uint) ((frameHeader >> 16) & 0x00FFFF);
return new StreamingSoundQoa( Format = new Format
device,
filePath,
channels,
samplerate,
samplesPerChannelPerFrame,
totalSamplesPerChannel
);
}
internal unsafe StreamingSoundQoa(
AudioDevice device,
string filePath,
uint channels,
uint samplesPerSecond,
uint samplesPerChannelPerFrame,
uint totalSamplesPerChannel
) : base(
device,
1,
16,
(ushort) (2 * channels),
(ushort) channels,
samplesPerSecond,
samplesPerChannelPerFrame * channels * sizeof(short),
true
) {
Channels = channels;
SamplesPerChannelPerFrame = samplesPerChannelPerFrame;
TotalSamplesPerChannel = totalSamplesPerChannel;
FilePath = filePath;
}
public override void Seek(uint sampleFrame)
{ {
FAudio.qoa_seek_frame(QoaHandle, (int) sampleFrame); Tag = FormatTag.PCM,
BitsPerSample = 16,
Channels = (ushort) channels,
SampleRate = samplerate
};
decodeBufferSize = channels * samplesPerChannelPerFrame * sizeof(short);
}
public override unsafe void Decode(void* buffer, int bufferLengthInBytes, out int filledLengthInBytes, out bool reachedEnd)
{
var lengthInShorts = bufferLengthInBytes / sizeof(short);
// NOTE: this function returns samples per channel!
var samples = FAudio.qoa_decode_next_frame(QoaHandle, (short*) buffer);
var sampleCount = samples * Format.Channels;
reachedEnd = sampleCount < lengthInShorts;
filledLengthInBytes = (int) (sampleCount * sizeof(short));
} }
public override unsafe void Load() public override unsafe void Load()
{
if (!Loaded)
{ {
var fileStream = new FileStream(FilePath, FileMode.Open, FileAccess.Read); var fileStream = new FileStream(FilePath, FileMode.Open, FileAccess.Read);
FileDataPtr = (nint) NativeMemory.Alloc((nuint) fileStream.Length); FileDataPtr = (nint) NativeMemory.Alloc((nuint) fileStream.Length);
@ -105,6 +83,12 @@ namespace MoonWorks.Audio
throw new AudioLoadException("Error opening QOA file!"); throw new AudioLoadException("Error opening QOA file!");
} }
} }
}
public override void Seek(uint sampleFrame)
{
FAudio.qoa_seek_frame(QoaHandle, (int) sampleFrame);
}
public override unsafe void Unload() public override unsafe void Unload()
{ {
@ -118,20 +102,15 @@ namespace MoonWorks.Audio
} }
} }
protected override unsafe void FillBuffer( private static unsafe UInt64 ReverseEndianness(UInt64 value)
void* buffer, {
int bufferLengthInBytes, byte* bytes = (byte*) &value;
out int filledLengthInBytes,
out bool reachedEnd
) {
var lengthInShorts = bufferLengthInBytes / sizeof(short);
// NOTE: this function returns samples per channel! return
var samples = FAudio.qoa_decode_next_frame(QoaHandle, (short*) buffer); ((UInt64)(bytes[0]) << 56) | ((UInt64)(bytes[1]) << 48) |
((UInt64)(bytes[2]) << 40) | ((UInt64)(bytes[3]) << 32) |
var sampleCount = samples * Channels; ((UInt64)(bytes[4]) << 24) | ((UInt64)(bytes[5]) << 16) |
reachedEnd = sampleCount < lengthInShorts; ((UInt64)(bytes[6]) << 8) | ((UInt64)(bytes[7]) << 0);
filledLengthInBytes = (int) (sampleCount * sizeof(short));
} }
} }
} }

View File

@ -22,13 +22,12 @@ namespace MoonWorks.Audio
public float SpeedOfSound = 343.5f; public float SpeedOfSound = 343.5f;
private readonly HashSet<WeakReference> resources = new HashSet<WeakReference>(); private readonly HashSet<WeakReference> resources = new HashSet<WeakReference>();
private readonly List<StreamingSound> autoUpdateStreamingSoundReferences = new List<StreamingSound>(); private readonly HashSet<SourceVoice> activeSourceVoices = new HashSet<SourceVoice>();
private readonly List<WeakReference<SoundSequence>> soundSequenceReferences = new List<WeakReference<SoundSequence>>();
private readonly List<SourceVoice> autoFreeSourceVoices = new List<SourceVoice>();
private AudioTweenManager AudioTweenManager; private AudioTweenManager AudioTweenManager;
private SourceVoicePool VoicePool; private SourceVoicePool VoicePool;
private List<SourceVoice> VoicesToReturn = new List<SourceVoice>();
private const int Step = 200; private const int Step = 200;
private TimeSpan UpdateInterval; private TimeSpan UpdateInterval;
@ -106,7 +105,7 @@ namespace MoonWorks.Audio
return; return;
} }
fauxMasteringVoice = new SubmixVoice(this, FAudio.FAUDIO_DEFAULT_CHANNELS, FAudio.FAUDIO_DEFAULT_SAMPLERATE); fauxMasteringVoice = new SubmixVoice(this, DeviceDetails.OutputFormat.Format.nChannels, DeviceDetails.OutputFormat.Format.nSamplesPerSec);
/* Init 3D Audio */ /* Init 3D Audio */
@ -159,45 +158,21 @@ namespace MoonWorks.Audio
previousTickTime = TickStopwatch.Elapsed.Ticks; previousTickTime = TickStopwatch.Elapsed.Ticks;
float elapsedSeconds = (float) tickDelta / System.TimeSpan.TicksPerSecond; float elapsedSeconds = (float) tickDelta / System.TimeSpan.TicksPerSecond;
// TODO: call an Update on all active voices
for (var i = autoUpdateStreamingSoundReferences.Count - 1; i >= 0; i -= 1)
{
var streamingSound = autoUpdateStreamingSoundReferences[i];
if (streamingSound.Loaded)
{
streamingSound.Update();
}
else
{
autoUpdateStreamingSoundReferences.RemoveAt(i);
}
}
for (var i = soundSequenceReferences.Count - 1; i >= 0; i -= 1)
{
if (soundSequenceReferences[i].TryGetTarget(out var soundSequence))
{
soundSequence.OnUpdate();
}
else
{
soundSequenceReferences.RemoveAt(i);
}
}
for (var i = autoFreeSourceVoices.Count - 1; i >= 0; i -= 1)
{
var voice = autoFreeSourceVoices[i];
if (voice.BuffersQueued == 0)
{
Return(voice);
autoFreeSourceVoices.RemoveAt(i);
}
}
AudioTweenManager.Update(elapsedSeconds); AudioTweenManager.Update(elapsedSeconds);
foreach (var voice in activeSourceVoices)
{
voice.Update();
}
foreach (var voice in VoicesToReturn)
{
voice.Reset();
activeSourceVoices.Remove(voice);
VoicePool.Return(voice);
}
VoicesToReturn.Clear();
} }
/// <summary> /// <summary>
@ -208,26 +183,18 @@ namespace MoonWorks.Audio
FAudio.FAudio_CommitChanges(Handle, syncGroup); FAudio.FAudio_CommitChanges(Handle, syncGroup);
} }
// TODO: is pooling SourceVoices generically a good idea? there are a lot of different kinds
/// <summary> /// <summary>
/// Obtains an appropriate source voice from the voice pool. /// Obtains an appropriate source voice from the voice pool.
/// </summary> /// </summary>
/// <param name="format">The format that the voice must match.</param> /// <param name="format">The format that the voice must match.</param>
/// <returns>A source voice with the given format.</returns> /// <returns>A source voice with the given format.</returns>
public SourceVoice Obtain(Format format) public T Obtain<T>(Format format) where T : SourceVoice, IPoolable<T>
{ {
lock (StateLock) lock (StateLock)
{ {
return VoicePool.Obtain(format); var voice = VoicePool.Obtain<T>(format);
} activeSourceVoices.Add(voice);
} return voice;
internal void ReturnWhenIdle(SourceVoice voice)
{
lock (StateLock)
{
autoFreeSourceVoices.Add(voice);
} }
} }
@ -239,8 +206,7 @@ namespace MoonWorks.Audio
{ {
lock (StateLock) lock (StateLock)
{ {
voice.Reset(); VoicesToReturn.Add(voice);
VoicePool.Return(voice);
} }
} }
@ -298,16 +264,6 @@ namespace MoonWorks.Audio
} }
} }
internal void AddAutoUpdateStreamingSoundInstance(StreamingSound instance)
{
autoUpdateStreamingSoundReferences.Add(instance);
}
internal void AddSoundSequenceReference(SoundSequence sequence)
{
soundSequenceReferences.Add(new WeakReference<SoundSequence>(sequence));
}
protected virtual void Dispose(bool disposing) protected virtual void Dispose(bool disposing)
{ {
if (!IsDisposed) if (!IsDisposed)

7
src/Audio/IPoolable.cs Normal file
View File

@ -0,0 +1,7 @@
namespace MoonWorks.Audio
{
public interface IPoolable<T>
{
static abstract T Create(AudioDevice device, Format format);
}
}

View File

@ -16,7 +16,7 @@ namespace MoonWorks.Audio
var descriptor = new FAudio.FAudioEffectDescriptor(); var descriptor = new FAudio.FAudioEffectDescriptor();
descriptor.InitialState = 1; descriptor.InitialState = 1;
descriptor.OutputChannels = Device.DeviceDetails.OutputFormat.Format.nChannels; descriptor.OutputChannels = 1;
descriptor.pEffect = reverb; descriptor.pEffect = reverb;
chain.EffectCount = 1; chain.EffectCount = 1;

View File

@ -1,508 +0,0 @@
using System;
using System.Runtime.InteropServices;
using EasingFunction = System.Func<float, float>;
namespace MoonWorks.Audio
{
public abstract class SoundInstance : AudioResource
{
internal IntPtr Voice;
private FAudio.FAudioWaveFormatEx format;
public FAudio.FAudioWaveFormatEx Format => format;
protected FAudio.F3DAUDIO_DSP_SETTINGS dspSettings;
private ReverbEffect ReverbEffect;
private FAudio.FAudioVoiceSends ReverbSends;
public bool Is3D { get; protected set; }
public virtual SoundState State { get; protected set; }
private float pan = 0;
public float Pan
{
get => pan;
internal set
{
value = Math.MathHelper.Clamp(value, -1f, 1f);
if (pan != value)
{
pan = value;
if (pan < -1f)
{
pan = -1f;
}
if (pan > 1f)
{
pan = 1f;
}
if (Is3D) { return; }
SetPanMatrixCoefficients();
FAudio.FAudioVoice_SetOutputMatrix(
Voice,
Device.MasteringVoice,
dspSettings.SrcChannelCount,
dspSettings.DstChannelCount,
dspSettings.pMatrixCoefficients,
0
);
}
}
}
private float pitch = 0;
public float Pitch
{
get => pitch;
internal set
{
value = Math.MathHelper.Clamp(value, -1f, 1f);
if (pitch != value)
{
pitch = value;
UpdatePitch();
}
}
}
private float volume = 1;
public float Volume
{
get => volume;
internal set
{
value = Math.MathHelper.Max(0, value);
if (volume != value)
{
volume = value;
FAudio.FAudioVoice_SetVolume(Voice, volume, 0);
}
}
}
private const float MAX_FILTER_FREQUENCY = 1f;
private const float MAX_FILTER_ONEOVERQ = 1.5f;
private FAudio.FAudioFilterParameters filterParameters = new FAudio.FAudioFilterParameters
{
Type = FAudio.FAudioFilterType.FAudioLowPassFilter,
Frequency = 1f,
OneOverQ = 1f
};
public float FilterFrequency
{
get => filterParameters.Frequency;
internal set
{
value = System.Math.Clamp(value, 0.01f, MAX_FILTER_FREQUENCY);
if (filterParameters.Frequency != value)
{
filterParameters.Frequency = value;
FAudio.FAudioVoice_SetFilterParameters(
Voice,
ref filterParameters,
0
);
}
}
}
public float FilterOneOverQ
{
get => filterParameters.OneOverQ;
internal set
{
value = System.Math.Clamp(value, 0.01f, MAX_FILTER_ONEOVERQ);
if (filterParameters.OneOverQ != value)
{
filterParameters.OneOverQ = value;
FAudio.FAudioVoice_SetFilterParameters(
Voice,
ref filterParameters,
0
);
}
}
}
private FilterType filterType;
public FilterType FilterType
{
get => filterType;
set
{
if (filterType != value)
{
filterType = value;
switch (filterType)
{
case FilterType.None:
filterParameters = new FAudio.FAudioFilterParameters
{
Type = FAudio.FAudioFilterType.FAudioLowPassFilter,
Frequency = 1f,
OneOverQ = 1f
};
break;
case FilterType.LowPass:
filterParameters.Type = FAudio.FAudioFilterType.FAudioLowPassFilter;
filterParameters.Frequency = 1f;
break;
case FilterType.BandPass:
filterParameters.Type = FAudio.FAudioFilterType.FAudioBandPassFilter;
break;
case FilterType.HighPass:
filterParameters.Type = FAudio.FAudioFilterType.FAudioHighPassFilter;
filterParameters.Frequency = 0f;
break;
}
FAudio.FAudioVoice_SetFilterParameters(
Voice,
ref filterParameters,
0
);
}
}
}
private float reverb;
public unsafe float Reverb
{
get => reverb;
internal set
{
if (ReverbEffect != null)
{
value = MathF.Max(0, value);
if (reverb != value)
{
reverb = value;
float* outputMatrix = (float*) dspSettings.pMatrixCoefficients;
outputMatrix[0] = reverb;
if (dspSettings.SrcChannelCount == 2)
{
outputMatrix[1] = reverb;
}
FAudio.FAudioVoice_SetOutputMatrix(
Voice,
ReverbEffect.Voice,
dspSettings.SrcChannelCount,
1,
dspSettings.pMatrixCoefficients,
0
);
}
}
#if DEBUG
if (ReverbEffect == null)
{
Logger.LogWarn("Tried to set reverb value before applying a reverb effect");
}
#endif
}
}
public unsafe SoundInstance(
AudioDevice device,
ushort formatTag,
ushort bitsPerSample,
ushort blockAlign,
ushort channels,
uint samplesPerSecond
) : base(device)
{
format = new FAudio.FAudioWaveFormatEx
{
wFormatTag = formatTag,
wBitsPerSample = bitsPerSample,
nChannels = channels,
nBlockAlign = blockAlign,
nSamplesPerSec = samplesPerSecond,
nAvgBytesPerSec = blockAlign * samplesPerSecond
};
FAudio.FAudio_CreateSourceVoice(
Device.Handle,
out Voice,
ref format,
FAudio.FAUDIO_VOICE_USEFILTER,
FAudio.FAUDIO_DEFAULT_FREQ_RATIO,
IntPtr.Zero,
IntPtr.Zero,
IntPtr.Zero
);
if (Voice == IntPtr.Zero)
{
Logger.LogError("SoundInstance failed to initialize!");
return;
}
InitDSPSettings(Format.nChannels);
State = SoundState.Stopped;
}
public void Apply3D(AudioListener listener, AudioEmitter emitter)
{
Is3D = true;
emitter.emitterData.CurveDistanceScaler = Device.CurveDistanceScalar;
emitter.emitterData.ChannelCount = dspSettings.SrcChannelCount;
FAudio.F3DAudioCalculate(
Device.Handle3D,
ref listener.listenerData,
ref emitter.emitterData,
FAudio.F3DAUDIO_CALCULATE_MATRIX | FAudio.F3DAUDIO_CALCULATE_DOPPLER,
ref dspSettings
);
UpdatePitch();
FAudio.FAudioVoice_SetOutputMatrix(
Voice,
Device.MasteringVoice,
dspSettings.SrcChannelCount,
dspSettings.DstChannelCount,
dspSettings.pMatrixCoefficients,
0
);
}
public unsafe void ApplyReverb(ReverbEffect reverbEffect)
{
ReverbSends = new FAudio.FAudioVoiceSends();
ReverbSends.SendCount = 2;
ReverbSends.pSends = (nint) NativeMemory.Alloc((nuint) (2 * Marshal.SizeOf<FAudio.FAudioSendDescriptor>()));
FAudio.FAudioSendDescriptor* sendDesc = (FAudio.FAudioSendDescriptor*) ReverbSends.pSends;
sendDesc[0].Flags = 0;
sendDesc[0].pOutputVoice = Device.MasteringVoice;
sendDesc[1].Flags = 0;
sendDesc[1].pOutputVoice = reverbEffect.Voice;
FAudio.FAudioVoice_SetOutputVoices(
Voice,
ref ReverbSends
);
ReverbEffect = reverbEffect;
}
public void SetPan(float targetValue)
{
Pan = targetValue;
Device.ClearTweens(this, AudioTweenProperty.Pan);
}
public void SetPan(float targetValue, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Pan, easingFunction, Pan, targetValue, duration, 0);
}
public void SetPan(float targetValue, float delayTime, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Pan, easingFunction, Pan, targetValue, duration, delayTime);
}
public void SetPitch(float targetValue)
{
Pitch = targetValue;
Device.ClearTweens(this, AudioTweenProperty.Pitch);
}
public void SetPitch(float targetValue, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Pitch, easingFunction, Pan, targetValue, duration, 0);
}
public void SetPitch(float targetValue, float delayTime, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Pitch, easingFunction, Pan, targetValue, duration, delayTime);
}
public void SetVolume(float targetValue)
{
Volume = targetValue;
Device.ClearTweens(this, AudioTweenProperty.Volume);
}
public void SetVolume(float targetValue, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Volume, easingFunction, Volume, targetValue, duration, 0);
}
public void SetVolume(float targetValue, float delayTime, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Volume, easingFunction, Volume, targetValue, duration, delayTime);
}
public void SetFilterFrequency(float targetValue)
{
FilterFrequency = targetValue;
Device.ClearTweens(this, AudioTweenProperty.FilterFrequency);
}
public void SetFilterFrequency(float targetValue, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.FilterFrequency, easingFunction, FilterFrequency, targetValue, duration, 0);
}
public void SetFilterFrequency(float targetValue, float delayTime, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.FilterFrequency, easingFunction, FilterFrequency, targetValue, duration, delayTime);
}
public void SetFilterOneOverQ(float targetValue)
{
FilterOneOverQ = targetValue;
}
public void SetReverb(float targetValue)
{
Reverb = targetValue;
Device.ClearTweens(this, AudioTweenProperty.Reverb);
}
public void SetReverb(float targetValue, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Reverb, easingFunction, Volume, targetValue, duration, 0);
}
public void SetReverb(float targetValue, float delayTime, float duration, EasingFunction easingFunction)
{
Device.CreateTween(this, AudioTweenProperty.Reverb, easingFunction, Volume, targetValue, duration, delayTime);
}
public abstract void Play();
public abstract void QueueSyncPlay();
public abstract void Pause();
public abstract void Stop();
public abstract void StopImmediate();
private unsafe void InitDSPSettings(uint srcChannels)
{
dspSettings = new FAudio.F3DAUDIO_DSP_SETTINGS();
dspSettings.DopplerFactor = 1f;
dspSettings.SrcChannelCount = srcChannels;
dspSettings.DstChannelCount = Device.DeviceDetails.OutputFormat.Format.nChannels;
nuint memsize = (
4 *
dspSettings.SrcChannelCount *
dspSettings.DstChannelCount
);
dspSettings.pMatrixCoefficients = (nint) NativeMemory.Alloc(memsize);
byte* memPtr = (byte*) dspSettings.pMatrixCoefficients;
for (uint i = 0; i < memsize; i += 1)
{
memPtr[i] = 0;
}
SetPanMatrixCoefficients();
}
private void UpdatePitch()
{
float doppler;
float dopplerScale = Device.DopplerScale;
if (!Is3D || dopplerScale == 0.0f)
{
doppler = 1.0f;
}
else
{
doppler = dspSettings.DopplerFactor * dopplerScale;
}
FAudio.FAudioSourceVoice_SetFrequencyRatio(
Voice,
(float) System.Math.Pow(2.0, pitch) * doppler,
0
);
}
// Taken from https://github.com/FNA-XNA/FNA/blob/master/src/Audio/SoundEffectInstance.cs
private unsafe void SetPanMatrixCoefficients()
{
/* Two major things to notice:
* 1. The spec assumes any speaker count >= 2 has Front Left/Right.
* 2. Stereo panning is WAY more complicated than you think.
* The main thing is that hard panning does NOT eliminate an
* entire channel; the two channels are blended on each side.
* -flibit
*/
float* outputMatrix = (float*) dspSettings.pMatrixCoefficients;
if (dspSettings.SrcChannelCount == 1)
{
if (dspSettings.DstChannelCount == 1)
{
outputMatrix[0] = 1.0f;
}
else
{
outputMatrix[0] = (pan > 0.0f) ? (1.0f - pan) : 1.0f;
outputMatrix[1] = (pan < 0.0f) ? (1.0f + pan) : 1.0f;
}
}
else
{
if (dspSettings.DstChannelCount == 1)
{
outputMatrix[0] = 1.0f;
outputMatrix[1] = 1.0f;
}
else
{
if (pan <= 0.0f)
{
// Left speaker blends left/right channels
outputMatrix[0] = 0.5f * pan + 1.0f;
outputMatrix[1] = 0.5f * -pan;
// Right speaker gets less of the right channel
outputMatrix[2] = 0.0f;
outputMatrix[3] = pan + 1.0f;
}
else
{
// Left speaker gets less of the left channel
outputMatrix[0] = -pan + 1.0f;
outputMatrix[1] = 0.0f;
// Right speaker blends right/left channels
outputMatrix[2] = 0.5f * pan;
outputMatrix[3] = 0.5f * -pan + 1.0f;
}
}
}
}
protected unsafe override void Destroy()
{
StopImmediate();
FAudio.FAudioVoice_DestroyVoice(Voice);
NativeMemory.Free((void*) dspSettings.pMatrixCoefficients);
if (ReverbEffect != null)
{
NativeMemory.Free((void*) ReverbSends.pSends);
}
}
}
}

View File

@ -1,5 +1,3 @@
using System;
namespace MoonWorks.Audio namespace MoonWorks.Audio
{ {
// NOTE: all sounds played with a SoundSequence must have the same audio format! // NOTE: all sounds played with a SoundSequence must have the same audio format!
@ -11,17 +9,15 @@ namespace MoonWorks.Audio
public SoundSequence(AudioDevice device, Format format) : base(device, format) public SoundSequence(AudioDevice device, Format format) : base(device, format)
{ {
device.AddSoundSequenceReference(this);
OnUpdate += Update;
} }
public SoundSequence(AudioDevice device, StaticSound templateSound) : base(device, templateSound.Format) public SoundSequence(AudioDevice device, StaticSound templateSound) : base(device, templateSound.Format)
{ {
device.AddSoundSequenceReference(this);
OnUpdate += Update;
} }
private void Update() public override void Update()
{ {
lock (StateLock) lock (StateLock)
{ {
@ -30,14 +26,7 @@ namespace MoonWorks.Audio
if (NeedSoundThreshold > 0) if (NeedSoundThreshold > 0)
{ {
FAudio.FAudioSourceVoice_GetState( for (int i = 0; i < NeedSoundThreshold - BuffersQueued; i += 1)
Handle,
out var state,
FAudio.FAUDIO_VOICE_NOSAMPLESPLAYED
);
var queuedBufferCount = state.BuffersQueued;
for (int i = 0; i < NeedSoundThreshold - queuedBufferCount; i += 1)
{ {
if (OnSoundNeeded != null) if (OnSoundNeeded != null)
{ {
@ -59,7 +48,7 @@ namespace MoonWorks.Audio
lock (StateLock) lock (StateLock)
{ {
Submit(sound); Submit(sound.Buffer);
} }
} }
} }

View File

@ -45,9 +45,6 @@ namespace MoonWorks.Audio
} }
} }
public delegate void OnUpdateFunc();
public OnUpdateFunc OnUpdate; // called by AudioDevice thread
public SourceVoice( public SourceVoice(
AudioDevice device, AudioDevice device,
Format format Format format
@ -126,32 +123,14 @@ namespace MoonWorks.Audio
} }
} }
/// <summary>
/// Adds a static sound to the voice queue.
/// The voice processes and plays back the buffers in its queue in the order that they were submitted.
/// </summary>
/// <param name="sound">The sound to submit to the voice.</param>
/// <param name="loop">Designates that the voice will loop the submitted buffer.</param>
public void Submit(StaticSound sound, bool loop = false)
{
if (loop)
{
sound.Buffer.LoopCount = FAudio.FAUDIO_LOOP_INFINITE;
}
else
{
sound.Buffer.LoopCount = 0;
}
Submit(sound.Buffer);
}
/// <summary> /// <summary>
/// Adds an FAudio buffer to the voice queue. /// Adds an FAudio buffer to the voice queue.
/// The voice processes and plays back the buffers in its queue in the order that they were submitted. /// The voice processes and plays back the buffers in its queue in the order that they were submitted.
/// </summary> /// </summary>
/// <param name="buffer">The buffer to submit to the voice.</param> /// <param name="buffer">The buffer to submit to the voice.</param>
public void Submit(FAudio.FAudioBuffer buffer) public void Submit(FAudio.FAudioBuffer buffer)
{
lock (StateLock)
{ {
FAudio.FAudioSourceVoice_SubmitSourceBuffer( FAudio.FAudioSourceVoice_SubmitSourceBuffer(
Handle, Handle,
@ -159,25 +138,23 @@ namespace MoonWorks.Audio
IntPtr.Zero IntPtr.Zero
); );
} }
/// <summary>
/// Designates that this source voice will return to the voice pool once all its buffers are exhausted.
/// </summary>
public void ReturnWhenIdle()
{
Device.ReturnWhenIdle(this);
} }
/// <summary> /// <summary>
/// Returns this source voice to the voice pool. /// Specifies that this source voice can be returned to the voice pool.
/// Holding on to the reference after calling this will cause problems!
/// </summary> /// </summary>
public void Return() public void Return()
{ {
Stop(); Stop();
Reset();
Device.Return(this); Device.Return(this);
} }
/// <summary>
/// Called automatically by AudioDevice in the audio thread.
/// </summary>
public virtual void Update() { }
protected override unsafe void Destroy() protected override unsafe void Destroy()
{ {
Stop(); Stop();

View File

@ -6,33 +6,33 @@ namespace MoonWorks.Audio
{ {
private AudioDevice Device; private AudioDevice Device;
Dictionary<Format, Queue<SourceVoice>> VoiceLists = new Dictionary<Format, Queue<SourceVoice>>(); Dictionary<(System.Type, Format), Queue<SourceVoice>> VoiceLists = new Dictionary<(System.Type, Format), Queue<SourceVoice>>();
public SourceVoicePool(AudioDevice device) public SourceVoicePool(AudioDevice device)
{ {
Device = device; Device = device;
} }
public SourceVoice Obtain(Format format) public T Obtain<T>(Format format) where T : SourceVoice, IPoolable<T>
{ {
if (!VoiceLists.ContainsKey(format)) if (!VoiceLists.ContainsKey((typeof(T), format)))
{ {
VoiceLists.Add(format, new Queue<SourceVoice>()); VoiceLists.Add((typeof(T), format), new Queue<SourceVoice>());
} }
var list = VoiceLists[format]; var list = VoiceLists[(typeof(T), format)];
if (list.Count == 0) if (list.Count == 0)
{ {
list.Enqueue(new SourceVoice(Device, format)); list.Enqueue(T.Create(Device, format));
} }
return list.Dequeue(); return (T) list.Dequeue();
} }
public void Return(SourceVoice voice) public void Return(SourceVoice voice)
{ {
var list = VoiceLists[voice.Format]; var list = VoiceLists[(voice.GetType(), voice.Format)];
list.Enqueue(voice); list.Enqueue(voice);
} }
} }

54
src/Audio/StaticVoice.cs Normal file
View File

@ -0,0 +1,54 @@
namespace MoonWorks.Audio
{
public class StaticVoice : SourceVoice, IPoolable<StaticVoice>
{
/// <summary>
/// Indicates if the voice should return to the voice pool when the voice is idle.
/// If you set this and then hold on to the voice reference there will be problems!
/// </summary>
public bool DeactivateWhenIdle { get; set; }
public static StaticVoice Create(AudioDevice device, Format format)
{
return new StaticVoice(device, format);
}
public StaticVoice(AudioDevice device, Format format) : base(device, format)
{
}
public override void Update()
{
lock (StateLock)
{
if (DeactivateWhenIdle)
{
if (BuffersQueued == 0)
{
Return();
}
}
}
}
/// <summary>
/// Adds a static sound to the voice queue.
/// The voice processes and plays back the buffers in its queue in the order that they were submitted.
/// </summary>
/// <param name="sound">The sound to submit to the voice.</param>
/// <param name="loop">Designates that the voice will loop the submitted buffer.</param>
public void Submit(StaticSound sound, bool loop = false)
{
if (loop)
{
sound.Buffer.LoopCount = FAudio.FAUDIO_LOOP_INFINITE;
}
else
{
sound.Buffer.LoopCount = 0;
}
Submit(sound.Buffer);
}
}
}

View File

@ -1,239 +0,0 @@
using System;
using System.Runtime.InteropServices;
namespace MoonWorks.Audio
{
/// <summary>
/// For streaming long playback.
/// Must be extended with a decoder routine called by FillBuffer.
/// See StreamingSoundOgg for an example.
/// </summary>
public abstract class StreamingSound : SoundInstance
{
// Are we actively consuming buffers?
protected bool ConsumingBuffers = false;
private const int BUFFER_COUNT = 3;
private nuint BufferSize;
private readonly IntPtr[] buffers;
private int nextBufferIndex = 0;
private uint queuedBufferCount = 0;
private readonly object StateLock = new object();
public bool AutoUpdate { get; }
public abstract bool Loaded { get; }
public unsafe StreamingSound(
AudioDevice device,
ushort formatTag,
ushort bitsPerSample,
ushort blockAlign,
ushort channels,
uint samplesPerSecond,
uint bufferSize,
bool autoUpdate // should the AudioDevice thread automatically update this sound?
) : base(device, formatTag, bitsPerSample, blockAlign, channels, samplesPerSecond)
{
BufferSize = bufferSize;
buffers = new IntPtr[BUFFER_COUNT];
for (int i = 0; i < BUFFER_COUNT; i += 1)
{
buffers[i] = (IntPtr) NativeMemory.Alloc(bufferSize);
}
AutoUpdate = autoUpdate;
}
public override void Play()
{
PlayUsingOperationSet(0);
}
public override void QueueSyncPlay()
{
PlayUsingOperationSet(1);
}
private void PlayUsingOperationSet(uint operationSet)
{
lock (StateLock)
{
if (!Loaded)
{
Logger.LogError("Cannot play StreamingSound before calling Load!");
return;
}
if (State == SoundState.Playing)
{
return;
}
State = SoundState.Playing;
ConsumingBuffers = true;
if (AutoUpdate)
{
Device.AddAutoUpdateStreamingSoundInstance(this);
}
QueueBuffers();
FAudio.FAudioSourceVoice_Start(Voice, 0, operationSet);
}
}
public override void Pause()
{
lock (StateLock)
{
if (State == SoundState.Playing)
{
ConsumingBuffers = false;
FAudio.FAudioSourceVoice_Stop(Voice, 0, 0);
State = SoundState.Paused;
}
}
}
public override void Stop()
{
lock (StateLock)
{
ConsumingBuffers = false;
State = SoundState.Stopped;
}
}
public override void StopImmediate()
{
lock (StateLock)
{
ConsumingBuffers = false;
FAudio.FAudioSourceVoice_Stop(Voice, 0, 0);
FAudio.FAudioSourceVoice_FlushSourceBuffers(Voice);
ClearBuffers();
State = SoundState.Stopped;
}
}
internal unsafe void Update()
{
lock (StateLock)
{
if (!IsDisposed)
{
if (State != SoundState.Playing)
{
return;
}
QueueBuffers();
}
}
}
protected void QueueBuffers()
{
FAudio.FAudioSourceVoice_GetState(
Voice,
out var state,
FAudio.FAUDIO_VOICE_NOSAMPLESPLAYED
);
queuedBufferCount = state.BuffersQueued;
if (ConsumingBuffers)
{
for (int i = 0; i < BUFFER_COUNT - queuedBufferCount; i += 1)
{
AddBuffer();
}
}
else if (queuedBufferCount == 0)
{
Stop();
}
}
protected unsafe void ClearBuffers()
{
nextBufferIndex = 0;
queuedBufferCount = 0;
}
public unsafe void AddBuffer()
{
var buffer = buffers[nextBufferIndex];
nextBufferIndex = (nextBufferIndex + 1) % BUFFER_COUNT;
FillBuffer(
(void*) buffer,
(int) BufferSize,
out int filledLengthInBytes,
out bool reachedEnd
);
if (filledLengthInBytes > 0)
{
FAudio.FAudioBuffer buf = new FAudio.FAudioBuffer
{
AudioBytes = (uint) filledLengthInBytes,
pAudioData = (IntPtr) buffer,
PlayLength = (
(uint) (filledLengthInBytes /
Format.nChannels /
(uint) (Format.wBitsPerSample / 8))
)
};
FAudio.FAudioSourceVoice_SubmitSourceBuffer(
Voice,
ref buf,
IntPtr.Zero
);
queuedBufferCount += 1;
}
if (reachedEnd)
{
/* We have reached the end of the data, what do we do? */
ConsumingBuffers = false;
OnReachedEnd();
}
}
public abstract void Load();
public abstract void Unload();
protected unsafe abstract void FillBuffer(
void* buffer,
int bufferLengthInBytes, /* in bytes */
out int filledLengthInBytes, /* in bytes */
out bool reachedEnd
);
protected abstract void OnReachedEnd();
protected unsafe override void Destroy()
{
lock (StateLock)
{
if (!IsDisposed)
{
StopImmediate();
Unload();
for (int i = 0; i < BUFFER_COUNT; i += 1)
{
NativeMemory.Free((void*) buffers[i]);
}
}
}
}
}
}

View File

@ -1,113 +0,0 @@
using System;
using System.IO;
using System.Runtime.InteropServices;
namespace MoonWorks.Audio
{
public class StreamingSoundOgg : StreamingSoundSeekable
{
private IntPtr FileDataPtr = IntPtr.Zero;
private IntPtr VorbisHandle = IntPtr.Zero;
private FAudio.stb_vorbis_info Info;
public override bool Loaded => VorbisHandle != IntPtr.Zero;
private string FilePath;
public unsafe static StreamingSoundOgg Create(AudioDevice device, string filePath)
{
var handle = FAudio.stb_vorbis_open_filename(filePath, out int error, IntPtr.Zero);
if (error != 0)
{
Logger.LogError("Error: " + error);
throw new AudioLoadException("Error opening ogg file!");
}
var info = FAudio.stb_vorbis_get_info(handle);
var streamingSound = new StreamingSoundOgg(
device,
filePath,
info
);
FAudio.stb_vorbis_close(handle);
return streamingSound;
}
internal unsafe StreamingSoundOgg(
AudioDevice device,
string filePath,
FAudio.stb_vorbis_info info,
uint bufferSize = 32768
) : base(
device,
3, /* float type */
32, /* size of float */
(ushort) (4 * info.channels),
(ushort) info.channels,
info.sample_rate,
bufferSize,
true
) {
Info = info;
FilePath = filePath;
}
public override void Seek(uint sampleFrame)
{
FAudio.stb_vorbis_seek(VorbisHandle, sampleFrame);
}
public override unsafe void Load()
{
var fileStream = new FileStream(FilePath, FileMode.Open, FileAccess.Read);
FileDataPtr = (nint) NativeMemory.Alloc((nuint) fileStream.Length);
var fileDataSpan = new Span<byte>((void*) FileDataPtr, (int) fileStream.Length);
fileStream.ReadExactly(fileDataSpan);
fileStream.Close();
VorbisHandle = FAudio.stb_vorbis_open_memory(FileDataPtr, fileDataSpan.Length, out int error, IntPtr.Zero);
if (error != 0)
{
NativeMemory.Free((void*) FileDataPtr);
Logger.LogError("Error opening OGG file!");
Logger.LogError("Error: " + error);
throw new AudioLoadException("Error opening OGG file!");
}
}
public override unsafe void Unload()
{
if (Loaded)
{
FAudio.stb_vorbis_close(VorbisHandle);
NativeMemory.Free((void*) FileDataPtr);
VorbisHandle = IntPtr.Zero;
FileDataPtr = IntPtr.Zero;
}
}
protected unsafe override void FillBuffer(
void* buffer,
int bufferLengthInBytes,
out int filledLengthInBytes,
out bool reachedEnd
) {
var lengthInFloats = bufferLengthInBytes / sizeof(float);
/* NOTE: this function returns samples per channel, not total samples */
var samples = FAudio.stb_vorbis_get_samples_float_interleaved(
VorbisHandle,
Info.channels,
(IntPtr) buffer,
lengthInFloats
);
var sampleCount = samples * Info.channels;
reachedEnd = sampleCount < lengthInFloats;
filledLengthInBytes = sampleCount * sizeof(float);
}
}
}

View File

@ -1,40 +0,0 @@
namespace MoonWorks.Audio
{
public abstract class StreamingSoundSeekable : StreamingSound
{
public bool Loop { get; set; }
protected StreamingSoundSeekable(
AudioDevice device,
ushort formatTag,
ushort bitsPerSample,
ushort blockAlign,
ushort channels,
uint samplesPerSecond,
uint bufferSize,
bool autoUpdate
) : base(
device,
formatTag,
bitsPerSample,
blockAlign,
channels,
samplesPerSecond,
bufferSize,
autoUpdate
) {
}
public abstract void Seek(uint sampleFrame);
protected override void OnReachedEnd()
{
if (Loop)
{
ConsumingBuffers = true;
Seek(0);
}
}
}
}

View File

@ -1,8 +1,9 @@
using System; using System;
using System.Runtime.InteropServices;
namespace MoonWorks.Audio namespace MoonWorks.Audio
{ {
public abstract class StreamingVoice : SourceVoice public class StreamingVoice : SourceVoice, IPoolable<StreamingVoice>
{ {
private const int BUFFER_COUNT = 3; private const int BUFFER_COUNT = 3;
private readonly IntPtr[] buffers; private readonly IntPtr[] buffers;
@ -11,18 +12,55 @@ namespace MoonWorks.Audio
public bool Loop { get; set; } public bool Loop { get; set; }
public StreamingVoice(AudioDevice device, Format format, uint bufferSize) : base(device, format) public AudioData AudioData { get; protected set; }
public static StreamingVoice Create(AudioDevice device, Format format)
{ {
BufferSize = bufferSize; return new StreamingVoice(device, format);
} }
internal unsafe void Update() public unsafe StreamingVoice(AudioDevice device, Format format) : base(device, format)
{
buffers = new IntPtr[BUFFER_COUNT];
}
public void Load(AudioData data)
{
lock (StateLock)
{
if (AudioData != null)
{
AudioData.Unload();
}
data.Load();
AudioData = data;
InitializeBuffers();
QueueBuffers();
}
}
public void Unload()
{
lock (StateLock)
{
if (AudioData != null)
{
Stop();
AudioData.Unload();
AudioData = null;
}
}
}
public override void Update()
{ {
lock (StateLock) lock (StateLock)
{ {
if (!IsDisposed) if (!IsDisposed)
{ {
if (State != SoundState.Playing) if (AudioData == null || State != SoundState.Playing)
{ {
return; return;
} }
@ -34,8 +72,8 @@ namespace MoonWorks.Audio
protected void QueueBuffers() protected void QueueBuffers()
{ {
var buffersQueued = BuffersQueued; int buffersNeeded = BUFFER_COUNT - (int) BuffersQueued; // don't get got by uint underflow!
for (int i = 0; i < BUFFER_COUNT - buffersQueued; i += 1) for (int i = 0; i < buffersNeeded; i += 1)
{ {
AddBuffer(); AddBuffer();
} }
@ -46,7 +84,7 @@ namespace MoonWorks.Audio
var buffer = buffers[nextBufferIndex]; var buffer = buffers[nextBufferIndex];
nextBufferIndex = (nextBufferIndex + 1) % BUFFER_COUNT; nextBufferIndex = (nextBufferIndex + 1) % BUFFER_COUNT;
FillBuffer( AudioData.Decode(
(void*) buffer, (void*) buffer,
(int) BufferSize, (int) BufferSize,
out int filledLengthInBytes, out int filledLengthInBytes,
@ -74,19 +112,25 @@ namespace MoonWorks.Audio
/* We have reached the end of the data, what do we do? */ /* We have reached the end of the data, what do we do? */
if (Loop) if (Loop)
{ {
SeekStart(); AudioData.Seek(0);
AddBuffer(); AddBuffer();
} }
} }
} }
protected unsafe abstract void FillBuffer( private unsafe void InitializeBuffers()
void* buffer, {
int bufferLengthInBytes, /* in bytes */ BufferSize = AudioData.DecodeBufferSize;
out int filledLengthInBytes, /* in bytes */
out bool reachedEnd
);
protected abstract void SeekStart(); for (int i = 0; i < BUFFER_COUNT; i += 1)
{
if (buffers[i] != IntPtr.Zero)
{
NativeMemory.Free((void*) buffers[i]);
}
buffers[i] = (IntPtr) NativeMemory.Alloc(BufferSize);
}
}
} }
} }