Theora video support + audio improvements (#20)

- `SoundInstance.Play` no longer takes a loop parameter
- `SoundInstance.Stop` is split into `Stop` and `StopImmediate` instead of taking an immediate parameter
- Added `StreamingSoundSeekable` to better support streaming audio that does not support seek
- `StreamingSound` no longer has a Loop property, but `StreamingSoundSeekable` does
- abstract `StreamingSound.AddBuffer` renamed to `FillBuffer`
- `FillBuffer` is now provided with a native buffer to avoid an extra data copy
- `StreamingSound` buffer implementation optimized to avoid repeated alloc/frees

- added `Video` class which can load and play Theora (.ogv) streaming video/audio

Reviewed-on: #20
pull/21/head
cosmonaut 2022-08-02 21:04:12 +00:00
parent 5a5fbc0c77
commit efb9893aef
19 changed files with 667 additions and 179 deletions

3
.gitmodules vendored
View File

@ -10,3 +10,6 @@
[submodule "lib/WellspringCS"]
path = lib/WellspringCS
url = https://gitea.moonside.games/MoonsideGames/WellspringCS.git
[submodule "lib/Theorafile"]
path = lib/Theorafile
url = https://github.com/FNA-XNA/Theorafile.git

View File

@ -15,6 +15,7 @@
<ProjectReference Include=".\lib\RefreshCS\RefreshCS.csproj" />
<ProjectReference Include=".\lib\FAudio\csharp\FAudio-CS.Core.csproj" />
<ProjectReference Include=".\lib\WellspringCS\WellspringCS.csproj" />
<ProjectReference Include=".\lib\Theorafile\csharp\Theorafile-CS.Core.csproj" />
</ItemGroup>
<ItemGroup>
@ -22,4 +23,13 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="src\Video\Shaders\Compiled\FullscreenVert.spv">
<LogicalName>MoonWorks.Shaders.FullscreenVert.spv</LogicalName>
</EmbeddedResource>
<EmbeddedResource Include="src\Video\Shaders\Compiled\YUV2RGBAFrag.spv">
<LogicalName>MoonWorks.Shaders.YUV2RGBAFrag.spv</LogicalName>
</EmbeddedResource>
</ItemGroup>
</Project>

View File

@ -15,4 +15,8 @@
<dllmap dll="Wellspring" os="windows" target="Wellspring.dll"/>
<dllmap dll="Wellspring" os="osx" target="libWellspring.0.dylib"/>
<dllmap dll="Wellspring" os="linux,freebsd,netbsd" target="libWellspring.so.0"/>
<dllmap dll="Theorafile" os="windows" target="libtheorafile.dll"/>
<dllmap dll="Theorafile" os="osx" target="libtheorafile.dylib"/>
<dllmap dll="Theorafile" os="linux,freebsd,netbsd" target="libtheorafile.so"/>
</configuration>

1
lib/Theorafile Submodule

@ -0,0 +1 @@
Subproject commit dd8c7fa69e678b6182cdaa71458ad08dd31c65da

View File

@ -1,6 +1,5 @@
using System;
using System.Runtime.InteropServices;
using MoonWorks.Math.Float;
namespace MoonWorks.Audio
{
@ -8,13 +7,12 @@ namespace MoonWorks.Audio
{
internal IntPtr Handle;
internal FAudio.FAudioWaveFormatEx Format;
public bool Loop { get; protected set; } = false;
protected FAudio.F3DAUDIO_DSP_SETTINGS dspSettings;
public bool Is3D { get; protected set; }
public abstract SoundState State { get; protected set; }
public virtual SoundState State { get; protected set; }
private float _pan = 0;
public float Pan
@ -238,11 +236,10 @@ namespace MoonWorks.Audio
);
}
public abstract void Play(bool loop);
public abstract void Play();
public abstract void Pause();
public abstract void Stop(bool immediate);
public abstract void Seek(float seconds);
public abstract void Seek(uint sampleFrame);
public abstract void Stop();
public abstract void StopImmediate();
private void InitDSPSettings(uint srcChannels)
{
@ -345,8 +342,7 @@ namespace MoonWorks.Audio
protected override void Destroy()
{
Stop(true);
StopImmediate();
FAudio.FAudioVoice_DestroyVoice(Handle);
Marshal.FreeHGlobal(dspSettings.pMatrixCoefficients);
}

View File

@ -6,6 +6,8 @@ namespace MoonWorks.Audio
{
public StaticSound Parent { get; }
public bool Loop { get; set; }
private SoundState _state = SoundState.Stopped;
public override SoundState State
{
@ -18,7 +20,7 @@ namespace MoonWorks.Audio
);
if (state.BuffersQueued == 0)
{
Stop(true);
StopImmediate();
}
return _state;
@ -38,15 +40,13 @@ namespace MoonWorks.Audio
Parent = parent;
}
public override void Play(bool loop = false)
public override void Play()
{
if (State == SoundState.Playing)
{
return;
}
Loop = loop;
if (Loop)
{
Parent.Handle.LoopCount = 255;
@ -79,21 +79,20 @@ namespace MoonWorks.Audio
}
}
public override void Stop(bool immediate = true)
public override void Stop()
{
if (immediate)
FAudio.FAudioSourceVoice_ExitLoop(Handle, 0);
State = SoundState.Stopped;
}
public override void StopImmediate()
{
FAudio.FAudioSourceVoice_Stop(Handle, 0, 0);
FAudio.FAudioSourceVoice_FlushSourceBuffers(Handle);
State = SoundState.Stopped;
}
else
{
FAudio.FAudioSourceVoice_ExitLoop(Handle, 0);
}
}
private void PerformSeek(uint sampleFrame)
public void Seek(uint sampleFrame)
{
if (State == SoundState.Playing)
{
@ -102,20 +101,6 @@ namespace MoonWorks.Audio
}
Parent.Handle.PlayBegin = sampleFrame;
Play();
}
public override void Seek(float seconds)
{
uint sampleFrame =
(uint) (Parent.SamplesPerSecond * seconds);
PerformSeek(sampleFrame);
}
public override void Seek(uint sampleFrame)
{
PerformSeek(sampleFrame);
}
public void Free()

View File

@ -1,38 +1,46 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace MoonWorks.Audio
{
/// <summary>
/// For streaming long playback.
/// Can be extended to support custom decoders.
/// Must be extended with a decoder routine called by FillBuffer.
/// See StreamingSoundOgg for an example.
/// </summary>
public abstract class StreamingSound : SoundInstance
{
private readonly List<IntPtr> queuedBuffers = new List<IntPtr>();
private readonly List<uint> queuedSizes = new List<uint>();
private const int MINIMUM_BUFFER_CHECK = 3;
private const int BUFFER_COUNT = 3;
private readonly IntPtr[] buffers;
private int nextBufferIndex = 0;
private uint queuedBufferCount = 0;
protected abstract int BUFFER_SIZE { get; }
public int PendingBufferCount => queuedBuffers.Count;
public StreamingSound(
public unsafe StreamingSound(
AudioDevice device,
ushort formatTag,
ushort bitsPerSample,
ushort blockAlign,
ushort channels,
uint samplesPerSecond
) : base(device, formatTag, bitsPerSample, blockAlign, channels, samplesPerSecond) { }
) : base(device, formatTag, bitsPerSample, blockAlign, channels, samplesPerSecond)
{
device.AddDynamicSoundInstance(this);
public override void Play(bool loop = false)
buffers = new IntPtr[BUFFER_COUNT];
for (int i = 0; i < BUFFER_COUNT; i += 1)
{
buffers[i] = (IntPtr) NativeMemory.Alloc((nuint) BUFFER_SIZE);
}
}
public override void Play()
{
if (State == SoundState.Playing)
{
return;
}
Loop = loop;
State = SoundState.Playing;
Update();
@ -48,19 +56,21 @@ namespace MoonWorks.Audio
}
}
public override void Stop(bool immediate = true)
public override void Stop()
{
if (immediate)
State = SoundState.Stopped;
}
public override void StopImmediate()
{
FAudio.FAudioSourceVoice_Stop(Handle, 0, 0);
FAudio.FAudioSourceVoice_FlushSourceBuffers(Handle);
ClearBuffers();
}
State = SoundState.Stopped;
}
internal void Update()
internal unsafe void Update()
{
if (State != SoundState.Playing)
{
@ -73,68 +83,45 @@ namespace MoonWorks.Audio
FAudio.FAUDIO_VOICE_NOSAMPLESPLAYED
);
while (PendingBufferCount > state.BuffersQueued)
lock (queuedBuffers)
{
Marshal.FreeHGlobal(queuedBuffers[0]);
queuedBuffers.RemoveAt(0);
}
queuedBufferCount = state.BuffersQueued;
QueueBuffers();
}
protected void QueueBuffers()
{
for (
int i = MINIMUM_BUFFER_CHECK - PendingBufferCount;
i > 0;
i -= 1
)
for (int i = 0; i < BUFFER_COUNT - queuedBufferCount; i += 1)
{
AddBuffer();
}
}
protected void ClearBuffers()
protected unsafe void ClearBuffers()
{
lock (queuedBuffers)
{
foreach (IntPtr buf in queuedBuffers)
{
Marshal.FreeHGlobal(buf);
}
queuedBuffers.Clear();
queuedSizes.Clear();
}
nextBufferIndex = 0;
queuedBufferCount = 0;
}
protected void AddBuffer()
protected unsafe void AddBuffer()
{
AddBuffer(
out var buffer,
out var bufferOffset,
out var bufferLength,
out var reachedEnd
var buffer = buffers[nextBufferIndex];
nextBufferIndex = (nextBufferIndex + 1) % BUFFER_COUNT;
FillBuffer(
(void*) buffer,
BUFFER_SIZE,
out int filledLengthInBytes,
out bool reachedEnd
);
var lengthInBytes = bufferLength * sizeof(float);
IntPtr next = Marshal.AllocHGlobal((int) lengthInBytes);
Marshal.Copy(buffer, (int) bufferOffset, next, (int) bufferLength);
lock (queuedBuffers)
{
queuedBuffers.Add(next);
if (State != SoundState.Stopped)
{
FAudio.FAudioBuffer buf = new FAudio.FAudioBuffer
{
AudioBytes = lengthInBytes,
pAudioData = next,
AudioBytes = (uint) filledLengthInBytes,
pAudioData = (IntPtr) buffer,
PlayLength = (
lengthInBytes /
(uint) (filledLengthInBytes /
Format.nChannels /
(uint) (Format.wBitsPerSample / 8)
(uint) (Format.wBitsPerSample / 8))
)
};
@ -143,39 +130,36 @@ namespace MoonWorks.Audio
ref buf,
IntPtr.Zero
);
}
else
{
queuedSizes.Add(lengthInBytes);
}
}
queuedBufferCount += 1;
/* We have reached the end of the file, what do we do? */
if (reachedEnd)
{
if (Loop)
{
SeekStart();
}
else
{
Stop(false);
}
OnReachedEnd();
}
}
protected abstract void AddBuffer(
out float[] buffer,
out uint bufferOffset, /* in floats */
out uint bufferLength, /* in floats */
protected virtual void OnReachedEnd()
{
Stop();
}
protected unsafe abstract void FillBuffer(
void* buffer,
int bufferLengthInBytes, /* in bytes */
out int filledLengthInBytes, /* in bytes */
out bool reachedEnd
);
protected abstract void SeekStart();
protected override void Destroy()
protected unsafe override void Destroy()
{
Stop(true);
StopImmediate();
for (int i = 0; i < BUFFER_COUNT; i += 1)
{
NativeMemory.Free((void*) buffers[i]);
}
}
}
}

View File

@ -4,28 +4,23 @@ using System.Runtime.InteropServices;
namespace MoonWorks.Audio
{
public class StreamingSoundOgg : StreamingSound
public class StreamingSoundOgg : StreamingSoundSeekable
{
// FIXME: what should this value be?
public const int BUFFER_SIZE = 1024 * 128;
private IntPtr VorbisHandle;
private IntPtr FileDataPtr;
private FAudio.stb_vorbis_info Info;
private readonly float[] buffer; // currently decoded bytes
protected override int BUFFER_SIZE => 32768;
public override SoundState State { get; protected set; }
public static StreamingSoundOgg Load(AudioDevice device, string filePath)
public unsafe static StreamingSoundOgg Load(AudioDevice device, string filePath)
{
var fileData = File.ReadAllBytes(filePath);
var fileDataPtr = Marshal.AllocHGlobal(fileData.Length);
Marshal.Copy(fileData, 0, fileDataPtr, fileData.Length);
var vorbisHandle = FAudio.stb_vorbis_open_memory(fileDataPtr, fileData.Length, out int error, IntPtr.Zero);
var fileDataPtr = NativeMemory.Alloc((nuint) fileData.Length);
Marshal.Copy(fileData, 0, (IntPtr) fileDataPtr, fileData.Length);
var vorbisHandle = FAudio.stb_vorbis_open_memory((IntPtr) fileDataPtr, fileData.Length, out int error, IntPtr.Zero);
if (error != 0)
{
((GCHandle) fileDataPtr).Free();
NativeMemory.Free(fileDataPtr);
Logger.LogError("Error opening OGG file!");
Logger.LogError("Error: " + error);
throw new AudioLoadException("Error opening OGG file!");
@ -34,7 +29,7 @@ namespace MoonWorks.Audio
return new StreamingSoundOgg(
device,
fileDataPtr,
(IntPtr) fileDataPtr,
vorbisHandle,
info
);
@ -42,7 +37,7 @@ namespace MoonWorks.Audio
internal StreamingSoundOgg(
AudioDevice device,
IntPtr fileDataPtr, // MUST BE AN ALLOCHGLOBAL HANDLE!!
IntPtr fileDataPtr, // MUST BE A NATIVE MEMORY HANDLE!!
IntPtr vorbisHandle,
FAudio.stb_vorbis_info info
) : base(
@ -57,12 +52,9 @@ namespace MoonWorks.Audio
FileDataPtr = fileDataPtr;
VorbisHandle = vorbisHandle;
Info = info;
buffer = new float[BUFFER_SIZE];
device.AddDynamicSoundInstance(this);
}
private void PerformSeek(uint sampleFrame)
public override void Seek(uint sampleFrame)
{
if (State == SoundState.Playing)
{
@ -80,49 +72,32 @@ namespace MoonWorks.Audio
}
}
public override void Seek(float seconds)
{
uint sampleFrame = (uint) (Info.sample_rate * seconds);
PerformSeek(sampleFrame);
}
public override void Seek(uint sampleFrame)
{
PerformSeek(sampleFrame);
}
protected override void AddBuffer(
out float[] buffer,
out uint bufferOffset,
out uint bufferLength,
protected unsafe override void FillBuffer(
void* buffer,
int bufferLengthInBytes,
out int filledLengthInBytes,
out bool reachedEnd
)
{
buffer = this.buffer;
var lengthInFloats = bufferLengthInBytes / sizeof(float);
/* NOTE: this function returns samples per channel, not total samples */
var samples = FAudio.stb_vorbis_get_samples_float_interleaved(
VorbisHandle,
Info.channels,
buffer,
buffer.Length
(IntPtr) buffer,
lengthInFloats
);
var sampleCount = samples * Info.channels;
bufferOffset = 0;
bufferLength = (uint) sampleCount;
reachedEnd = sampleCount < buffer.Length;
reachedEnd = sampleCount < lengthInFloats;
filledLengthInBytes = sampleCount * sizeof(float);
}
protected override void SeekStart()
{
FAudio.stb_vorbis_seek_start(VorbisHandle);
}
protected override void Destroy()
protected unsafe override void Destroy()
{
FAudio.stb_vorbis_close(VorbisHandle);
Marshal.FreeHGlobal(FileDataPtr);
NativeMemory.Free((void*) FileDataPtr);
}
}
}

View File

@ -0,0 +1,25 @@
namespace MoonWorks.Audio
{
public abstract class StreamingSoundSeekable : StreamingSound
{
public bool Loop { get; set; }
protected StreamingSoundSeekable(AudioDevice device, ushort formatTag, ushort bitsPerSample, ushort blockAlign, ushort channels, uint samplesPerSecond) : base(device, formatTag, bitsPerSample, blockAlign, channels, samplesPerSecond)
{
}
public abstract void Seek(uint sampleFrame);
protected override void OnReachedEnd()
{
if (Loop)
{
Seek(0);
}
else
{
Stop();
}
}
}
}

View File

@ -1,6 +1,4 @@
using System;
using System.Runtime.InteropServices;
using MoonWorks.Math;
using RefreshCS;
namespace MoonWorks.Graphics
@ -835,6 +833,26 @@ namespace MoonWorks.Graphics
SetTextureData(new TextureSlice(texture), dataPtr, dataLengthInBytes);
}
/// <summary>
/// Asynchronously copies YUV data into three textures. Use with compressed video.
/// </summary>
public void SetTextureDataYUV(Texture yTexture, Texture uTexture, Texture vTexture, IntPtr dataPtr, uint dataLengthInBytes)
{
Refresh.Refresh_SetTextureDataYUV(
Device.Handle,
Handle,
yTexture.Handle,
uTexture.Handle,
vTexture.Handle,
yTexture.Width,
yTexture.Height,
uTexture.Width,
uTexture.Height,
dataPtr,
dataLengthInBytes
);
}
/// <summary>
/// Performs an asynchronous texture-to-texture copy on the GPU.
/// </summary>

View File

@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using RefreshCS;
namespace MoonWorks.Graphics
@ -8,6 +9,11 @@ namespace MoonWorks.Graphics
{
public IntPtr Handle { get; }
// Built-in video pipeline
private ShaderModule VideoVertexShader { get; }
private ShaderModule VideoFragmentShader { get; }
internal GraphicsPipeline VideoPipeline { get; }
public bool IsDisposed { get; private set; }
private readonly List<WeakReference<GraphicsResource>> resources = new List<WeakReference<GraphicsResource>>();
@ -28,6 +34,26 @@ namespace MoonWorks.Graphics
presentationParameters,
Conversions.BoolToByte(debugMode)
);
VideoVertexShader = new ShaderModule(this, GetEmbeddedResource("MoonWorks.Shaders.FullscreenVert.spv"));
VideoFragmentShader = new ShaderModule(this, GetEmbeddedResource("MoonWorks.Shaders.YUV2RGBAFrag.spv"));
VideoPipeline = new GraphicsPipeline(
this,
new GraphicsPipelineCreateInfo
{
AttachmentInfo = new GraphicsPipelineAttachmentInfo(
new ColorAttachmentDescription(TextureFormat.R8G8B8A8, ColorAttachmentBlendState.None)
),
DepthStencilState = DepthStencilState.Disable,
VertexShaderInfo = GraphicsShaderInfo.Create(VideoVertexShader, "main", 0),
FragmentShaderInfo = GraphicsShaderInfo.Create(VideoFragmentShader, "main", 3),
VertexInputState = VertexInputState.Empty,
RasterizerState = RasterizerState.CCW_CullNone,
PrimitiveType = PrimitiveType.TriangleList,
MultisampleState = MultisampleState.None
}
);
}
public CommandBuffer AcquireCommandBuffer()
@ -77,6 +103,11 @@ namespace MoonWorks.Graphics
}
}
private static Stream GetEmbeddedResource(string name)
{
return typeof(GraphicsDevice).Assembly.GetManifestResourceStream(name);
}
protected virtual void Dispose(bool disposing)
{
if (!IsDisposed)

View File

@ -159,6 +159,12 @@ namespace MoonWorks.Input
{ AxisButtonCode.RightY_Down, RightYDown }
};
TriggerCodeToTriggerButton = new Dictionary<TriggerCode, TriggerButton>
{
{ TriggerCode.Left, TriggerLeftButton },
{ TriggerCode.Right, TriggerRightButton }
};
VirtualButtons = new VirtualButton[]
{
A,

View File

@ -196,6 +196,7 @@ namespace MoonWorks
NativeLibrary.SetDllImportResolver(typeof(RefreshCS.Refresh).Assembly, MapAndLoad);
NativeLibrary.SetDllImportResolver(typeof(FAudio).Assembly, MapAndLoad);
NativeLibrary.SetDllImportResolver(typeof(WellspringCS.Wellspring).Assembly, MapAndLoad);
NativeLibrary.SetDllImportResolver(typeof(Theorafile).Assembly, MapAndLoad);
}
#endregion

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,9 @@
#version 450
layout(location = 0) out vec2 outTexCoord;
void main()
{
outTexCoord = vec2((gl_VertexIndex << 1) & 2, gl_VertexIndex & 2);
gl_Position = vec4(outTexCoord * 2.0 - 1.0, 0.0, 1.0);
}

View File

@ -0,0 +1,38 @@
/*
* This effect is based on the YUV-to-RGBA GLSL shader found in SDL.
* Thus, it also released under the zlib license:
* http://libsdl.org/license.php
*/
#version 450
layout(location = 0) in vec2 TexCoord;
layout(location = 0) out vec4 FragColor;
layout(binding = 0, set = 1) uniform sampler2D YSampler;
layout(binding = 1, set = 1) uniform sampler2D USampler;
layout(binding = 2, set = 1) uniform sampler2D VSampler;
/* More info about colorspace conversion:
* http://www.equasys.de/colorconversion.html
* http://www.equasys.de/colorformat.html
*/
const vec3 offset = vec3(-0.0625, -0.5, -0.5);
const vec3 Rcoeff = vec3(1.164, 0.000, 1.793);
const vec3 Gcoeff = vec3(1.164, -0.213, -0.533);
const vec3 Bcoeff = vec3(1.164, 2.112, 0.000);
void main()
{
vec3 yuv;
yuv.x = texture(YSampler, TexCoord).r;
yuv.y = texture(USampler, TexCoord).r;
yuv.z = texture(VSampler, TexCoord).r;
yuv += offset;
FragColor.r = dot(yuv, Rcoeff);
FragColor.g = dot(yuv, Gcoeff);
FragColor.b = dot(yuv, Bcoeff);
FragColor.a = 1.0;
}

View File

@ -0,0 +1,45 @@
using System;
using MoonWorks.Audio;
namespace MoonWorks.Video
{
public unsafe class StreamingSoundTheora : StreamingSound
{
private IntPtr VideoHandle;
protected override int BUFFER_SIZE => 8192;
internal StreamingSoundTheora(
AudioDevice device,
IntPtr videoHandle,
int channels,
uint sampleRate
) : base(
device,
3, /* float type */
32, /* size of float */
(ushort) (4 * channels),
(ushort) channels,
sampleRate
) {
VideoHandle = videoHandle;
}
protected override unsafe void FillBuffer(
void* buffer,
int bufferLengthInBytes,
out int filledLengthInBytes,
out bool reachedEnd
) {
var lengthInFloats = bufferLengthInBytes / sizeof(float);
int samples = Theorafile.tf_readaudio(
VideoHandle,
(IntPtr) buffer,
lengthInFloats
);
filledLengthInBytes = samples * sizeof(float);
reachedEnd = Theorafile.tf_eos(VideoHandle) == 1;
}
}
}

357
src/Video/Video.cs Normal file
View File

@ -0,0 +1,357 @@
/* Heavily based on https://github.com/FNA-XNA/FNA/blob/master/src/Media/Xiph/VideoPlayer.cs */
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using MoonWorks.Audio;
using MoonWorks.Graphics;
namespace MoonWorks.Video
{
public enum VideoState
{
Playing,
Paused,
Stopped
}
public unsafe class Video : IDisposable
{
internal IntPtr Handle;
public bool Loop { get; private set; }
public float Volume {
get => volume;
set
{
volume = value;
if (audioStream != null)
{
audioStream.Volume = value;
}
}
}
public float PlaybackSpeed { get; set; }
public double FramesPerSecond => fps;
private VideoState State = VideoState.Stopped;
private double fps;
private int yWidth;
private int yHeight;
private int uvWidth;
private int uvHeight;
private void* yuvData = null;
private int yuvDataLength;
private int currentFrame;
private GraphicsDevice GraphicsDevice;
private Texture RenderTexture = null;
private Texture yTexture = null;
private Texture uTexture = null;
private Texture vTexture = null;
private Sampler LinearSampler;
private AudioDevice AudioDevice = null;
private StreamingSoundTheora audioStream = null;
private float volume = 1.0f;
private Stopwatch timer;
private double lastTimestamp;
private double timeElapsed;
private bool disposed;
/* TODO: is there some way for us to load the data into memory? */
public Video(GraphicsDevice graphicsDevice, AudioDevice audioDevice, string filename)
{
GraphicsDevice = graphicsDevice;
AudioDevice = audioDevice;
if (!System.IO.File.Exists(filename))
{
throw new ArgumentException("Video file not found!");
}
if (Theorafile.tf_fopen(filename, out Handle) < 0)
{
throw new ArgumentException("Invalid video file!");
}
Theorafile.th_pixel_fmt format;
Theorafile.tf_videoinfo(
Handle,
out yWidth,
out yHeight,
out fps,
out format
);
if (format == Theorafile.th_pixel_fmt.TH_PF_420)
{
uvWidth = yWidth / 2;
uvHeight = yHeight / 2;
}
else if (format == Theorafile.th_pixel_fmt.TH_PF_422)
{
uvWidth = yWidth / 2;
uvHeight = yHeight;
}
else if (format == Theorafile.th_pixel_fmt.TH_PF_444)
{
uvWidth = yWidth;
uvHeight = yHeight;
}
else
{
throw new NotSupportedException("Unrecognized YUV format!");
}
yuvDataLength = (
(yWidth * yHeight) +
(uvWidth * uvHeight * 2)
);
yuvData = NativeMemory.Alloc((nuint) yuvDataLength);
InitializeTheoraStream();
if (Theorafile.tf_hasvideo(Handle) == 1)
{
RenderTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) yWidth,
(uint) yHeight,
TextureFormat.R8G8B8A8,
TextureUsageFlags.ColorTarget | TextureUsageFlags.Sampler
);
yTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) yWidth,
(uint) yHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
uTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) uvWidth,
(uint) uvHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
vTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) uvWidth,
(uint) uvHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
LinearSampler = new Sampler(GraphicsDevice, SamplerCreateInfo.LinearClamp);
}
timer = new Stopwatch();
}
public void Play(bool loop = false)
{
if (State == VideoState.Playing)
{
return;
}
Loop = loop;
timer.Start();
if (audioStream != null)
{
audioStream.Play();
}
State = VideoState.Playing;
}
public void Pause()
{
if (State != VideoState.Playing)
{
return;
}
timer.Stop();
if (audioStream != null)
{
audioStream.Pause();
}
State = VideoState.Paused;
}
public void Stop()
{
if (State == VideoState.Stopped)
{
return;
}
timer.Stop();
timer.Reset();
Theorafile.tf_reset(Handle);
lastTimestamp = 0;
timeElapsed = 0;
if (audioStream != null)
{
audioStream.StopImmediate();
audioStream.Dispose();
audioStream = null;
}
State = VideoState.Stopped;
}
public Texture GetTexture()
{
if (RenderTexture == null)
{
throw new InvalidOperationException();
}
if (State == VideoState.Stopped)
{
return RenderTexture;
}
timeElapsed += (timer.Elapsed.TotalMilliseconds - lastTimestamp) * PlaybackSpeed;
lastTimestamp = timer.Elapsed.TotalMilliseconds;
int thisFrame = ((int) (timeElapsed / (1000.0 / FramesPerSecond)));
if (thisFrame > currentFrame)
{
if (Theorafile.tf_readvideo(
Handle,
(IntPtr) yuvData,
thisFrame - currentFrame
) == 1 || currentFrame == -1) {
UpdateTexture();
}
currentFrame = thisFrame;
}
bool ended = Theorafile.tf_eos(Handle) == 1;
if (ended)
{
timer.Stop();
timer.Reset();
if (audioStream != null)
{
audioStream.Stop();
audioStream.Dispose();
audioStream = null;
}
Theorafile.tf_reset(Handle);
if (Loop)
{
// Start over!
InitializeTheoraStream();
timer.Start();
}
else
{
State = VideoState.Stopped;
}
}
return RenderTexture;
}
private void UpdateTexture()
{
var commandBuffer = GraphicsDevice.AcquireCommandBuffer();
commandBuffer.SetTextureDataYUV(
yTexture,
uTexture,
vTexture,
(IntPtr) yuvData,
(uint) yuvDataLength
);
commandBuffer.BeginRenderPass(
new ColorAttachmentInfo(RenderTexture, Color.Black)
);
commandBuffer.BindGraphicsPipeline(GraphicsDevice.VideoPipeline);
commandBuffer.BindFragmentSamplers(
new TextureSamplerBinding(yTexture, LinearSampler),
new TextureSamplerBinding(uTexture, LinearSampler),
new TextureSamplerBinding(vTexture, LinearSampler)
);
commandBuffer.DrawPrimitives(0, 1, 0, 0);
commandBuffer.EndRenderPass();
GraphicsDevice.Submit(commandBuffer);
}
private void InitializeTheoraStream()
{
// Grab the first video frame ASAP.
while (Theorafile.tf_readvideo(Handle, (IntPtr) yuvData, 1) == 0);
// Grab the first bit of audio. We're trying to start the decoding ASAP.
if (AudioDevice != null && Theorafile.tf_hasaudio(Handle) == 1)
{
int channels, sampleRate;
Theorafile.tf_audioinfo(Handle, out channels, out sampleRate);
audioStream = new StreamingSoundTheora(AudioDevice, Handle, channels, (uint) sampleRate);
}
currentFrame = -1;
}
protected virtual void Dispose(bool disposing)
{
if (!disposed)
{
if (disposing)
{
// dispose managed state (managed objects)
RenderTexture.Dispose();
yTexture.Dispose();
uTexture.Dispose();
vTexture.Dispose();
}
// free unmanaged resources (unmanaged objects)
Theorafile.tf_close(ref Handle);
NativeMemory.Free(yuvData);
disposed = true;
}
}
~Video()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: false);
}
public void Dispose()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
}
}