AV1 Video instead of Theora (#49)

VideoPlayer now takes AV1 video instead of Ogg Theora. This brings a significant decode speed improvement. The decoder now also operates in a threaded manner, which should prevent runtime stalls when fetching video frames.

Reviewed-on: #49
pull/50/head
cosmonaut 2023-06-07 21:18:44 +00:00
parent 00f4bfdeae
commit 496eb670ab
13 changed files with 245 additions and 320 deletions

6
.gitmodules vendored
View File

@ -10,6 +10,6 @@
[submodule "lib/WellspringCS"]
path = lib/WellspringCS
url = https://gitea.moonside.games/MoonsideGames/WellspringCS.git
[submodule "lib/Theorafile"]
path = lib/Theorafile
url = https://github.com/FNA-XNA/Theorafile.git
[submodule "lib/dav1dfile"]
path = lib/dav1dfile
url = git@github.com:MoonsideGames/dav1dfile.git

View File

@ -15,8 +15,8 @@
<Compile Include="lib\FAudio\csharp\FAudio.cs" />
<Compile Include="lib\RefreshCS\src\Refresh.cs" />
<Compile Include="lib\SDL2-CS\src\SDL2.cs" />
<Compile Include="lib\Theorafile\csharp\Theorafile.cs" />
<Compile Include="lib\WellspringCS\WellspringCS.cs" />
<Compile Include="lib\dav1dfile\csharp\dav1dfile.cs" />
</ItemGroup>
<ItemGroup>

View File

@ -19,4 +19,8 @@
<dllmap dll="Theorafile" os="windows" target="libtheorafile.dll"/>
<dllmap dll="Theorafile" os="osx" target="libtheorafile.dylib"/>
<dllmap dll="Theorafile" os="linux,freebsd,netbsd" target="libtheorafile.so"/>
<dllmap dll="dav1dfile" os="windows" target="dav1dfile.dll"/>
<dllmap dll="dav1dfile" os="osx" target="libdav1dfile.0.dylib"/>
<dllmap dll="dav1dfile" os="linux,freebsd,netbsd,openbsd" target="libdav1dfile.so.0"/>
</configuration>

@ -1 +1 @@
Subproject commit ebf511133aa6f567c004d687acac474e1649bbde
Subproject commit 60a7523fac254d5e2d89185392e8c1afd8581aa9

@ -1 +0,0 @@
Subproject commit 8f9419ea856480e08294698e1d6be8752df3710b

1
lib/dav1dfile Submodule

@ -0,0 +1 @@
Subproject commit 859f47f6fa0dfa0f7f941dcced6664fa83736202

View File

@ -1946,7 +1946,17 @@ namespace MoonWorks.Graphics
/// <summary>
/// Asynchronously copies YUV data into three textures. Use with compressed video.
/// </summary>
public void SetTextureDataYUV(Texture yTexture, Texture uTexture, Texture vTexture, IntPtr dataPtr, uint dataLengthInBytes)
public void SetTextureDataYUV(
Texture yTexture,
Texture uTexture,
Texture vTexture,
IntPtr yDataPtr,
IntPtr uDataPtr,
IntPtr vDataPtr,
uint yDataLengthInBytes,
uint uvDataLengthInBytes,
uint yStride,
uint uvStride)
{
#if DEBUG
AssertRenderPassInactive("Cannot copy during render pass!");
@ -1962,8 +1972,13 @@ namespace MoonWorks.Graphics
yTexture.Height,
uTexture.Width,
uTexture.Height,
dataPtr,
dataLengthInBytes
yDataPtr,
uDataPtr,
vDataPtr,
yDataLengthInBytes,
uvDataLengthInBytes,
yStride,
uvStride
);
}

View File

@ -1,67 +0,0 @@
using System;
using MoonWorks.Audio;
namespace MoonWorks.Video
{
// TODO: should we just not handle theora sound? it sucks!
internal unsafe class StreamingSoundTheora : StreamingSound
{
private IntPtr VideoHandle;
public override bool Loaded => true;
internal StreamingSoundTheora(
AudioDevice device,
IntPtr videoHandle,
int channels,
uint sampleRate,
uint bufferSize = 8192
) : base(
device,
3, /* float type */
32, /* size of float */
(ushort) (4 * channels),
(ushort) channels,
sampleRate,
bufferSize,
false // Theorafile is not thread safe, so let's update on the main thread
) {
VideoHandle = videoHandle;
}
public override unsafe void Load()
{
// no-op
}
public override unsafe void Unload()
{
// no-op
}
protected override unsafe void FillBuffer(
void* buffer,
int bufferLengthInBytes,
out int filledLengthInBytes,
out bool reachedEnd
) {
var lengthInFloats = bufferLengthInBytes / sizeof(float);
// FIXME: this gets gnarly with theorafile being not thread safe
// is there some way we could just manually update in VideoPlayer
// instead of going through AudioDevice?
lock (Device.StateLock)
{
int samples = Theorafile.tf_readaudio(
VideoHandle,
(IntPtr) buffer,
lengthInFloats
);
filledLengthInBytes = samples * sizeof(float);
reachedEnd = Theorafile.tf_eos(VideoHandle) == 1;
}
}
protected override void OnReachedEnd() { }
}
}

View File

@ -1,127 +0,0 @@
/* Heavily based on https://github.com/FNA-XNA/FNA/blob/master/src/Media/Xiph/VideoPlayer.cs */
using System;
using System.IO;
using System.Runtime.InteropServices;
using SDL2;
namespace MoonWorks.Video
{
public enum VideoState
{
Playing,
Paused,
Stopped
}
public unsafe class Video : IDisposable
{
internal IntPtr Handle;
private IntPtr rwData;
private void* videoData;
private int videoDataLength;
public double FramesPerSecond => fps;
public int Width => yWidth;
public int Height => yHeight;
public int UVWidth { get; }
public int UVHeight { get; }
private double fps;
private int yWidth;
private int yHeight;
private bool IsDisposed;
public Video(string filename)
{
if (!File.Exists(filename))
{
throw new ArgumentException("Video file not found!");
}
var fileStream = new FileStream(filename, FileMode.Open, FileAccess.Read);
videoDataLength = (int) fileStream.Length;
videoData = NativeMemory.Alloc((nuint) videoDataLength);
var fileBufferSpan = new Span<byte>(videoData, videoDataLength);
fileStream.ReadExactly(fileBufferSpan);
fileStream.Close();
rwData = SDL.SDL_RWFromMem((IntPtr) videoData, videoDataLength);
if (Theorafile.tf_open_callbacks(rwData, out Handle, callbacks) < 0)
{
throw new ArgumentException("Invalid video file!");
}
Theorafile.th_pixel_fmt format;
Theorafile.tf_videoinfo(
Handle,
out yWidth,
out yHeight,
out fps,
out format
);
if (format == Theorafile.th_pixel_fmt.TH_PF_420)
{
UVWidth = Width / 2;
UVHeight = Height / 2;
}
else if (format == Theorafile.th_pixel_fmt.TH_PF_422)
{
UVWidth = Width / 2;
UVHeight = Height;
}
else if (format == Theorafile.th_pixel_fmt.TH_PF_444)
{
UVWidth = Width;
UVHeight = Height;
}
else
{
throw new NotSupportedException("Unrecognized YUV format!");
}
}
private static IntPtr Read(IntPtr ptr, IntPtr size, IntPtr nmemb, IntPtr datasource) => (IntPtr) SDL2.SDL.SDL_RWread(datasource, ptr, size, nmemb);
private static int Seek(IntPtr datasource, long offset, Theorafile.SeekWhence whence) => (int) SDL2.SDL.SDL_RWseek(datasource, offset, (int) whence);
private static int Close(IntPtr datasource) => (int) SDL2.SDL.SDL_RWclose(datasource);
private static Theorafile.tf_callbacks callbacks = new Theorafile.tf_callbacks
{
read_func = Read,
seek_func = Seek,
close_func = Close
};
protected virtual void Dispose(bool disposing)
{
if (!IsDisposed)
{
if (disposing)
{
// dispose managed state (managed objects)
}
// free unmanaged resources (unmanaged objects)
Theorafile.tf_close(ref Handle);
SDL.SDL_RWclose(rwData);
NativeMemory.Free(videoData);
IsDisposed = true;
}
}
~Video()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: false);
}
public void Dispose()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
}
}

71
src/Video/VideoAV1.cs Normal file
View File

@ -0,0 +1,71 @@
using System;
using System.IO;
namespace MoonWorks.Video
{
/// <summary>
/// This class takes in a filename for AV1 data in .obu (open bitstream unit) format
/// </summary>
public unsafe class VideoAV1
{
public string Filename { get; }
// "double buffering" so we can loop without a stutter
internal VideoAV1Stream StreamA { get; }
internal VideoAV1Stream StreamB { get; }
public int Width => width;
public int Height => height;
public double FramesPerSecond { get; set; }
public Dav1dfile.PixelLayout PixelLayout => pixelLayout;
public int UVWidth { get; }
public int UVHeight { get; }
private int width;
private int height;
private Dav1dfile.PixelLayout pixelLayout;
public VideoAV1(string filename, double framesPerSecond)
{
if (!File.Exists(filename))
{
throw new ArgumentException("Video file not found!");
}
if (Dav1dfile.df_fopen(filename, out var handle) == 0)
{
throw new Exception("Failed to open video file!");
}
Dav1dfile.df_videoinfo(handle, out width, out height, out pixelLayout);
Dav1dfile.df_close(handle);
if (pixelLayout == Dav1dfile.PixelLayout.I420)
{
UVWidth = Width / 2;
UVHeight = Height / 2;
}
else if (pixelLayout == Dav1dfile.PixelLayout.I422)
{
UVWidth = Width / 2;
UVHeight = Height;
}
else if (pixelLayout == Dav1dfile.PixelLayout.I444)
{
UVWidth = width;
UVHeight = height;
}
else
{
throw new NotSupportedException("Unrecognized YUV format!");
}
FramesPerSecond = framesPerSecond;
Filename = filename;
StreamA = new VideoAV1Stream(this);
StreamB = new VideoAV1Stream(this);
}
}
}

View File

@ -0,0 +1,91 @@
using System;
namespace MoonWorks.Video
{
internal class VideoAV1Stream
{
public IntPtr Handle => handle;
IntPtr handle;
public bool Ended => Dav1dfile.df_eos(Handle) == 1;
public IntPtr yDataHandle;
public IntPtr uDataHandle;
public IntPtr vDataHandle;
public uint yDataLength;
public uint uvDataLength;
public uint yStride;
public uint uvStride;
public bool FrameDataUpdated { get; private set; }
bool IsDisposed;
public VideoAV1Stream(VideoAV1 video)
{
if (Dav1dfile.df_fopen(video.Filename, out handle) == 0)
{
throw new Exception("Failed to open video file!");
}
Reset();
}
public void Reset()
{
lock (this)
{
Dav1dfile.df_reset(Handle);
ReadNextFrame();
}
}
public void ReadNextFrame()
{
lock (this)
{
if (Dav1dfile.df_readvideo(
Handle,
1,
out yDataHandle,
out uDataHandle,
out vDataHandle,
out yDataLength,
out uvDataLength,
out yStride,
out uvStride) == 1
) {
FrameDataUpdated = true;
}
}
}
protected virtual void Dispose(bool disposing)
{
if (!IsDisposed)
{
if (disposing)
{
// dispose managed state (managed objects)
}
// free unmanaged resources (unmanaged objects)
Dav1dfile.df_close(Handle);
IsDisposed = true;
}
}
~VideoAV1Stream()
{
Dispose(disposing: false);
}
public void Dispose()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
}
}

View File

@ -1,5 +1,6 @@
using System;
using System.Diagnostics;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using MoonWorks.Audio;
using MoonWorks.Graphics;
@ -11,20 +12,10 @@ namespace MoonWorks.Video
public Texture RenderTexture { get; private set; } = null;
public VideoState State { get; private set; } = VideoState.Stopped;
public bool Loop { get; set; }
public float Volume {
get => volume;
set
{
volume = value;
if (audioStream != null)
{
audioStream.Volume = value;
}
}
}
public float PlaybackSpeed { get; set; } = 1;
private Video Video = null;
private VideoAV1 Video = null;
private VideoAV1Stream CurrentStream = null;
private GraphicsDevice GraphicsDevice;
private Texture yTexture = null;
@ -32,22 +23,15 @@ namespace MoonWorks.Video
private Texture vTexture = null;
private Sampler LinearSampler;
private void* yuvData = null;
private int yuvDataLength = 0;
private int currentFrame;
private AudioDevice AudioDevice;
private StreamingSoundTheora audioStream = null;
private float volume = 1.0f;
private Stopwatch timer;
private double lastTimestamp;
private double timeElapsed;
private bool disposed;
public VideoPlayer(GraphicsDevice graphicsDevice, AudioDevice audioDevice)
public VideoPlayer(GraphicsDevice graphicsDevice)
{
GraphicsDevice = graphicsDevice;
if (GraphicsDevice.VideoPipeline == null)
@ -55,13 +39,12 @@ namespace MoonWorks.Video
throw new InvalidOperationException("Missing video shaders!");
}
AudioDevice = audioDevice;
LinearSampler = new Sampler(graphicsDevice, SamplerCreateInfo.LinearClamp);
timer = new Stopwatch();
}
public void Load(Video video)
public void Load(VideoAV1 video)
{
if (Video != video)
{
@ -111,20 +94,9 @@ namespace MoonWorks.Video
vTexture = CreateSubTexture(GraphicsDevice, video.UVWidth, video.UVHeight);
}
var newDataLength = (
(video.Width * video.Height) +
(video.UVWidth * video.UVHeight * 2)
);
if (newDataLength != yuvDataLength)
{
yuvData = NativeMemory.Realloc(yuvData, (nuint) newDataLength);
yuvDataLength = newDataLength;
}
Video = video;
InitializeTheoraStream();
InitializeDav1dStream();
}
}
@ -139,11 +111,6 @@ namespace MoonWorks.Video
timer.Start();
if (audioStream != null)
{
audioStream.Play();
}
State = VideoState.Playing;
}
@ -158,11 +125,6 @@ namespace MoonWorks.Video
timer.Stop();
if (audioStream != null)
{
audioStream.Pause();
}
State = VideoState.Paused;
}
@ -181,9 +143,7 @@ namespace MoonWorks.Video
lastTimestamp = 0;
timeElapsed = 0;
DestroyAudioStream();
Theorafile.tf_reset(Video.Handle);
InitializeDav1dStream();
State = VideoState.Stopped;
}
@ -194,16 +154,6 @@ namespace MoonWorks.Video
Video = null;
}
public void Update()
{
if (Video == null) { return; }
if (audioStream != null)
{
audioStream.Update();
}
}
public void Render()
{
if (Video == null || State == VideoState.Stopped)
@ -217,33 +167,27 @@ namespace MoonWorks.Video
int thisFrame = ((int) (timeElapsed / (1000.0 / Video.FramesPerSecond)));
if (thisFrame > currentFrame)
{
if (Theorafile.tf_readvideo(
Video.Handle,
(IntPtr) yuvData,
thisFrame - currentFrame
) == 1 || currentFrame == -1)
if (CurrentStream.FrameDataUpdated)
{
UpdateRenderTexture();
}
currentFrame = thisFrame;
Task.Run(CurrentStream.ReadNextFrame);
}
bool ended = Theorafile.tf_eos(Video.Handle) == 1;
if (ended)
if (CurrentStream.Ended)
{
timer.Stop();
timer.Reset();
DestroyAudioStream();
Theorafile.tf_reset(Video.Handle);
Task.Run(CurrentStream.Reset);
if (Loop)
{
// Start over!
InitializeTheoraStream();
// Start over on the next stream!
CurrentStream = (CurrentStream == Video.StreamA) ? Video.StreamB : Video.StreamA;
currentFrame = -1;
timer.Start();
}
else
@ -255,32 +199,40 @@ namespace MoonWorks.Video
private void UpdateRenderTexture()
{
var commandBuffer = GraphicsDevice.AcquireCommandBuffer();
lock (CurrentStream)
{
var commandBuffer = GraphicsDevice.AcquireCommandBuffer();
commandBuffer.SetTextureDataYUV(
yTexture,
uTexture,
vTexture,
(IntPtr) yuvData,
(uint) yuvDataLength
);
commandBuffer.SetTextureDataYUV(
yTexture,
uTexture,
vTexture,
CurrentStream.yDataHandle,
CurrentStream.uDataHandle,
CurrentStream.vDataHandle,
CurrentStream.yDataLength,
CurrentStream.uvDataLength,
CurrentStream.yStride,
CurrentStream.uvStride
);
commandBuffer.BeginRenderPass(
new ColorAttachmentInfo(RenderTexture, Color.Black)
);
commandBuffer.BeginRenderPass(
new ColorAttachmentInfo(RenderTexture, Color.Black)
);
commandBuffer.BindGraphicsPipeline(GraphicsDevice.VideoPipeline);
commandBuffer.BindFragmentSamplers(
new TextureSamplerBinding(yTexture, LinearSampler),
new TextureSamplerBinding(uTexture, LinearSampler),
new TextureSamplerBinding(vTexture, LinearSampler)
);
commandBuffer.BindGraphicsPipeline(GraphicsDevice.VideoPipeline);
commandBuffer.BindFragmentSamplers(
new TextureSamplerBinding(yTexture, LinearSampler),
new TextureSamplerBinding(uTexture, LinearSampler),
new TextureSamplerBinding(vTexture, LinearSampler)
);
commandBuffer.DrawPrimitives(0, 1, 0, 0);
commandBuffer.DrawPrimitives(0, 1, 0, 0);
commandBuffer.EndRenderPass();
commandBuffer.EndRenderPass();
GraphicsDevice.Submit(commandBuffer);
GraphicsDevice.Submit(commandBuffer);
}
}
private static Texture CreateRenderTexture(GraphicsDevice graphicsDevice, int width, int height)
@ -305,35 +257,15 @@ namespace MoonWorks.Video
);
}
private void InitializeTheoraStream()
private void InitializeDav1dStream()
{
// Grab the first video frame ASAP.
while (Theorafile.tf_readvideo(Video.Handle, (IntPtr) yuvData, 1) == 0);
// Grab the first bit of audio. We're trying to start the decoding ASAP.
if (AudioDevice != null && Theorafile.tf_hasaudio(Video.Handle) == 1)
{
DestroyAudioStream();
int channels, sampleRate;
Theorafile.tf_audioinfo(Video.Handle, out channels, out sampleRate);
audioStream = new StreamingSoundTheora(AudioDevice, Video.Handle, channels, (uint) sampleRate);
}
Task.Run(Video.StreamA.Reset);
Task.Run(Video.StreamB.Reset);
CurrentStream = Video.StreamA;
currentFrame = -1;
}
private void DestroyAudioStream()
{
if (audioStream != null)
{
audioStream.StopImmediate();
audioStream.Dispose();
audioStream = null;
}
}
protected virtual void Dispose(bool disposing)
{
if (!disposed)
@ -347,9 +279,6 @@ namespace MoonWorks.Video
vTexture.Dispose();
}
// free unmanaged resources (unmanaged objects) and override finalizer
NativeMemory.Free(yuvData);
disposed = true;
}
}

9
src/Video/VideoState.cs Normal file
View File

@ -0,0 +1,9 @@
namespace MoonWorks.Video
{
public enum VideoState
{
Playing,
Paused,
Stopped
}
}