move VideoPlayer functions to Video

pull/20/head
cosmonaut 2022-07-29 18:40:35 -07:00
parent 845881533b
commit 3966a0c4ea
2 changed files with 234 additions and 301 deletions

View File

@ -1,42 +1,66 @@
/* Heavily based on https://github.com/FNA-XNA/FNA/blob/master/src/Media/Xiph/Video.cs */ /* Heavily based on https://github.com/FNA-XNA/FNA/blob/master/src/Media/Xiph/VideoPlayer.cs */
using System; using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using MoonWorks.Graphics;
namespace MoonWorks.Video namespace MoonWorks.Video
{ {
public class Video : IDisposable public enum VideoState
{ {
public IntPtr Handle => handle; Playing,
public int YWidth => yWidth; Paused,
public int YHeight => yHeight; Stopped
public int UVWidth => uvWidth; }
public int UVHeight => uvHeight;
public double FramesPerSecond => fps;
private IntPtr handle; public unsafe class Video : IDisposable
{
private IntPtr Handle;
public bool Loop { get; set; }
public bool Mute { get; set; }
public float Volume { get; set; }
public double FramesPerSecond => fps;
private VideoState State = VideoState.Stopped;
private double fps;
private int yWidth; private int yWidth;
private int yHeight; private int yHeight;
private int uvWidth; private int uvWidth;
private int uvHeight; private int uvHeight;
private double fps;
private void* yuvData = null;
private int yuvDataLength;
private int currentFrame;
private GraphicsDevice GraphicsDevice;
private Texture RenderTexture = null;
private Texture yTexture = null;
private Texture uTexture = null;
private Texture vTexture = null;
private Sampler LinearSampler;
private Stopwatch timer;
private bool disposed; private bool disposed;
public Video(string filename) public Video(GraphicsDevice graphicsDevice, string filename)
{ {
Theorafile.th_pixel_fmt format; GraphicsDevice = graphicsDevice;
if (!System.IO.File.Exists(filename)) if (!System.IO.File.Exists(filename))
{ {
throw new ArgumentException("Video file not found!"); throw new ArgumentException("Video file not found!");
} }
if (Theorafile.tf_fopen(filename, out handle) < 0) if (Theorafile.tf_fopen(filename, out Handle) < 0)
{ {
throw new ArgumentException("Invalid video file!"); throw new ArgumentException("Invalid video file!");
} }
Theorafile.th_pixel_fmt format;
Theorafile.tf_videoinfo( Theorafile.tf_videoinfo(
handle, Handle,
out yWidth, out yWidth,
out yHeight, out yHeight,
out fps, out fps,
@ -62,6 +86,193 @@ namespace MoonWorks.Video
{ {
throw new NotSupportedException("Unrecognized YUV format!"); throw new NotSupportedException("Unrecognized YUV format!");
} }
yuvDataLength = (
(yWidth * yHeight) +
(uvWidth * uvHeight * 2)
);
yuvData = NativeMemory.Alloc((nuint) yuvDataLength);
InitializeTheoraStream();
if (Theorafile.tf_hasvideo(Handle) == 1)
{
RenderTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) yWidth,
(uint) yHeight,
TextureFormat.R8G8B8A8,
TextureUsageFlags.ColorTarget | TextureUsageFlags.Sampler
);
yTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) yWidth,
(uint) yHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
uTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) uvWidth,
(uint) uvHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
vTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) uvWidth,
(uint) uvHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
LinearSampler = new Sampler(GraphicsDevice, SamplerCreateInfo.LinearClamp);
}
timer = new Stopwatch();
}
public void Play(bool loop = false)
{
if (State == VideoState.Playing)
{
return;
}
Loop = loop;
timer.Start();
State = VideoState.Playing;
}
public void Pause()
{
if (State != VideoState.Playing)
{
return;
}
timer.Stop();
State = VideoState.Paused;
}
public void Stop()
{
if (State == VideoState.Stopped)
{
return;
}
timer.Stop();
timer.Reset();
Theorafile.tf_reset(Handle);
State = VideoState.Stopped;
}
public Texture GetTexture()
{
if (RenderTexture == null)
{
throw new InvalidOperationException();
}
if (State == VideoState.Stopped)
{
return RenderTexture;
}
int thisFrame = (int) (timer.Elapsed.TotalMilliseconds / (1000.0 / FramesPerSecond));
if (thisFrame > currentFrame)
{
if (Theorafile.tf_readvideo(
Handle,
(IntPtr) yuvData,
thisFrame - currentFrame
) == 1 || currentFrame == -1) {
UpdateTexture();
}
currentFrame = thisFrame;
}
bool ended = Theorafile.tf_eos(Handle) == 1;
if (ended)
{
timer.Stop();
timer.Reset();
Theorafile.tf_reset(Handle);
if (Loop)
{
// Start over!
InitializeTheoraStream();
timer.Start();
}
else
{
State = VideoState.Stopped;
}
}
return RenderTexture;
}
private void UpdateTexture()
{
var commandBuffer = GraphicsDevice.AcquireCommandBuffer();
commandBuffer.SetTextureDataYUV(
yTexture,
uTexture,
vTexture,
(IntPtr) yuvData,
(uint) yuvDataLength
);
commandBuffer.BeginRenderPass(
new ColorAttachmentInfo(RenderTexture, Color.Black)
);
commandBuffer.BindGraphicsPipeline(GraphicsDevice.VideoPipeline);
commandBuffer.BindFragmentSamplers(
new TextureSamplerBinding(yTexture, LinearSampler),
new TextureSamplerBinding(uTexture, LinearSampler),
new TextureSamplerBinding(vTexture, LinearSampler)
);
commandBuffer.DrawPrimitives(0, 1, 0, 0);
commandBuffer.EndRenderPass();
GraphicsDevice.Submit(commandBuffer);
}
private void InitializeTheoraStream()
{
// Grab the first video frame ASAP.
while (Theorafile.tf_readvideo(Handle, (IntPtr) yuvData, 1) == 0);
// Grab the first bit of audio. We're trying to start the decoding ASAP.
if (Theorafile.tf_hasaudio(Handle) == 1)
{
int channels, samplerate;
Theorafile.tf_audioinfo(Handle, out channels, out samplerate);
// TODO: audio stream
}
currentFrame = -1;
} }
protected virtual void Dispose(bool disposing) protected virtual void Dispose(bool disposing)
@ -70,10 +281,17 @@ namespace MoonWorks.Video
{ {
if (disposing) if (disposing)
{ {
// TODO: dispose managed state (managed objects) // dispose managed state (managed objects)
RenderTexture.Dispose();
yTexture.Dispose();
uTexture.Dispose();
vTexture.Dispose();
} }
Theorafile.tf_close(ref handle); // free unmanaged resources (unmanaged objects)
Theorafile.tf_close(ref Handle);
NativeMemory.Free(yuvData);
disposed = true; disposed = true;
} }
} }

View File

@ -1,285 +0,0 @@
/* Heavily based on https://github.com/FNA-XNA/FNA/blob/master/src/Media/Xiph/VideoPlayer.cs */
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using MoonWorks.Graphics;
namespace MoonWorks.Video
{
public enum VideoState
{
Playing,
Paused,
Stopped
}
public unsafe class VideoPlayer : IDisposable
{
public bool Loop { get; set; }
public bool Mute { get; set; }
public float Volume { get; set; }
private Video Video = null;
private VideoState State = VideoState.Stopped;
private void* yuvData = null;
private int yuvDataLength;
private int currentFrame;
private GraphicsDevice GraphicsDevice;
private Texture RenderTexture = null;
private Texture[] YUVTextures = new Texture[3];
private Sampler LinearSampler;
private Stopwatch timer;
private bool disposed;
public VideoPlayer(GraphicsDevice graphicsDevice)
{
GraphicsDevice = graphicsDevice;
timer = new Stopwatch();
LinearSampler = new Sampler(GraphicsDevice, SamplerCreateInfo.LinearClamp);
}
public void Load(Video video)
{
Video = video;
State = VideoState.Stopped;
if (yuvData != null)
{
NativeMemory.Free(yuvData);
}
yuvDataLength = (
(Video.YWidth * Video.YHeight) +
(Video.UVWidth * video.UVHeight * 2)
);
yuvData = NativeMemory.Alloc((nuint) yuvDataLength);
InitializeTheoraStream();
if (Theorafile.tf_hasvideo(Video.Handle) == 1)
{
if (RenderTexture != null)
{
RenderTexture.Dispose();
}
RenderTexture = Texture.CreateTexture2D(
GraphicsDevice,
(uint) Video.YWidth,
(uint) Video.YHeight,
TextureFormat.R8G8B8A8,
TextureUsageFlags.ColorTarget | TextureUsageFlags.Sampler
);
for (int i = 0; i < 3; i += 1)
{
if (YUVTextures[i] != null)
{
YUVTextures[i].Dispose();
}
}
YUVTextures[0] = Texture.CreateTexture2D(
GraphicsDevice,
(uint) Video.YWidth,
(uint) Video.YHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
YUVTextures[1] = Texture.CreateTexture2D(
GraphicsDevice,
(uint) Video.UVWidth,
(uint) Video.UVHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
YUVTextures[2] = Texture.CreateTexture2D(
GraphicsDevice,
(uint) Video.UVWidth,
(uint) Video.UVHeight,
TextureFormat.R8,
TextureUsageFlags.Sampler
);
}
}
public void Play(bool loop = false)
{
if (State == VideoState.Playing)
{
return;
}
Loop = loop;
timer.Start();
State = VideoState.Playing;
}
public void Pause()
{
if (State == VideoState.Paused)
{
return;
}
timer.Stop();
State = VideoState.Paused;
}
public void Stop()
{
if (State == VideoState.Stopped)
{
return;
}
timer.Stop();
timer.Reset();
Theorafile.tf_reset(Video.Handle);
State = VideoState.Stopped;
}
public Texture GetTexture()
{
if (Video == null)
{
throw new InvalidOperationException();
}
if (State == VideoState.Stopped || Video.Handle == IntPtr.Zero || Theorafile.tf_hasvideo(Video.Handle) == 0)
{
return RenderTexture;
}
int thisFrame = (int) (timer.Elapsed.TotalMilliseconds / (1000.0 / Video.FramesPerSecond));
if (thisFrame > currentFrame)
{
if (Theorafile.tf_readvideo(
Video.Handle,
(IntPtr) yuvData,
thisFrame - currentFrame
) == 1 || currentFrame == -1) {
UpdateTexture();
}
currentFrame = thisFrame;
}
bool ended = Theorafile.tf_eos(Video.Handle) == 1;
if (ended)
{
timer.Stop();
timer.Reset();
Theorafile.tf_reset(Video.Handle);
if (Loop)
{
// Start over!
InitializeTheoraStream();
timer.Start();
}
else
{
State = VideoState.Stopped;
}
}
return RenderTexture;
}
private void InitializeTheoraStream()
{
// Grab the first video frame ASAP.
while (Theorafile.tf_readvideo(Video.Handle, (IntPtr) yuvData, 1) == 0);
// Grab the first bit of audio. We're trying to start the decoding ASAP.
if (Theorafile.tf_hasaudio(Video.Handle) == 1)
{
int channels, samplerate;
Theorafile.tf_audioinfo(Video.Handle, out channels, out samplerate);
// TODO: audio stream
}
currentFrame = -1;
}
private void UpdateTexture()
{
var commandBuffer = GraphicsDevice.AcquireCommandBuffer();
commandBuffer.SetTextureDataYUV(
YUVTextures[0],
YUVTextures[1],
YUVTextures[2],
(IntPtr) yuvData,
(uint) yuvDataLength
);
commandBuffer.BeginRenderPass(
new ColorAttachmentInfo(RenderTexture, Color.Black)
);
commandBuffer.BindGraphicsPipeline(GraphicsDevice.VideoPipeline);
commandBuffer.BindFragmentSamplers(
new TextureSamplerBinding(YUVTextures[0], LinearSampler),
new TextureSamplerBinding(YUVTextures[1], LinearSampler),
new TextureSamplerBinding(YUVTextures[2], LinearSampler)
);
commandBuffer.DrawPrimitives(0, 1, 0, 0);
commandBuffer.EndRenderPass();
GraphicsDevice.Submit(commandBuffer);
}
protected virtual void Dispose(bool disposing)
{
if (!disposed)
{
if (disposing)
{
// dispose managed state (managed objects)
RenderTexture.Dispose();
YUVTextures[0].Dispose();
YUVTextures[1].Dispose();
YUVTextures[2].Dispose();
}
// free unmanaged resources (unmanaged objects) and override finalizer
NativeMemory.Free(yuvData);
disposed = true;
}
}
~VideoPlayer()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: false);
}
public void Dispose()
{
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
}
}