Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
146 changes: 75 additions & 71 deletions src/BizHawk.Client.EmuHawk/MainForm.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3210,14 +3210,9 @@ private void RecordAvBase(string videoWriterName, string filename, bool unattend

aw = Config.VideoWriterAudioSyncEffective ? new VideoStretcher(aw) : new AudioStretcher(aw);
aw.SetMovieParameters(Emulator.VsyncNumerator(), Emulator.VsyncDenominator());
if (Config.AVWriterResizeWidth > 0 && Config.AVWriterResizeHeight > 0)
{
aw.SetVideoParameters(Config.AVWriterResizeWidth, Config.AVWriterResizeHeight);
}
else
{
aw.SetVideoParameters(_currentVideoProvider.BufferWidth, _currentVideoProvider.BufferHeight);
}
(IVideoProvider output, Action dispose) = GetCaptureProvider();
aw.SetVideoParameters(output.BufferWidth, output.BufferHeight);
if (dispose != null) dispose();

aw.SetAudioParameters(44100, 2, 16);

Expand Down Expand Up @@ -3322,6 +3317,70 @@ private void RecordAvBase(string videoWriterName, string filename, bool unattend
RewireSound();
}

private (IVideoProvider Output, Action/*?*/ Dispose) GetCaptureProvider()
{
// TODO ZERO - this code is pretty jacked. we'll want to frugalize buffers better for speedier dumping, and we might want to rely on the GL layer for padding
if (Config.AVWriterResizeWidth > 0 && Config.AVWriterResizeHeight > 0)
{
BitmapBuffer bbIn = null;
Bitmap bmpIn = null;
try
{
bbIn = Config.AviCaptureOsd
? CaptureOSD()
: new BitmapBuffer(_currentVideoProvider.BufferWidth, _currentVideoProvider.BufferHeight, _currentVideoProvider.GetVideoBuffer());

bbIn.DiscardAlpha();

Bitmap bmpOut = new(width: Config.AVWriterResizeWidth, height: Config.AVWriterResizeHeight, PixelFormat.Format32bppArgb);
bmpIn = bbIn.ToSysdrawingBitmap();
using (var g = Graphics.FromImage(bmpOut))
{
if (Config.AVWriterPad)
{
g.Clear(Color.FromArgb(_currentVideoProvider.BackgroundColor));
g.DrawImageUnscaled(bmpIn, (bmpOut.Width - bmpIn.Width) / 2, (bmpOut.Height - bmpIn.Height) / 2);
}
else
{
g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.NearestNeighbor;
g.PixelOffsetMode = System.Drawing.Drawing2D.PixelOffsetMode.Half;
g.DrawImage(bmpIn, new Rectangle(0, 0, bmpOut.Width, bmpOut.Height));
}
}

IVideoProvider output = new BmpVideoProvider(bmpOut, _currentVideoProvider.VsyncNumerator, _currentVideoProvider.VsyncDenominator);
return (output, bmpOut.Dispose);
}
finally
{
bbIn?.Dispose();
bmpIn?.Dispose();
}
}
else
{
BitmapBuffer source = null;
if (Config.AviCaptureOsd)
{
source = CaptureOSD();
}
else if (Config.AviCaptureLua)
{
source = CaptureLua();
}

if (source != null)
{
return (new BitmapBufferVideoProvider(source), source.Dispose);
}
else
{
return (_currentVideoProvider, null);
}
}
}

private void AbortAv()
{
if (_currAviWriter == null)
Expand Down Expand Up @@ -3365,74 +3424,17 @@ private void StopAv()

private void AvFrameAdvance()
{
if (_currAviWriter == null) return;

// is this the best time to handle this? or deeper inside?
if (_argParser._currAviWriterFrameList?.Contains(Emulator.Frame) != false)
{
// TODO ZERO - this code is pretty jacked. we'll want to frugalize buffers better for speedier dumping, and we might want to rely on the GL layer for padding
if (_currAviWriter == null) return;
Action dispose = null;
try
{
IVideoProvider output;
IDisposable disposableOutput = null;
if (Config.AVWriterResizeWidth > 0 && Config.AVWriterResizeHeight > 0)
{
BitmapBuffer bbIn = null;
Bitmap bmpIn = null;
try
{
bbIn = Config.AviCaptureOsd
? CaptureOSD()
: new BitmapBuffer(_currentVideoProvider.BufferWidth, _currentVideoProvider.BufferHeight, _currentVideoProvider.GetVideoBuffer());

bbIn.DiscardAlpha();

Bitmap bmpOut = new(width: Config.AVWriterResizeWidth, height: Config.AVWriterResizeHeight, PixelFormat.Format32bppArgb);
bmpIn = bbIn.ToSysdrawingBitmap();
using (var g = Graphics.FromImage(bmpOut))
{
if (Config.AVWriterPad)
{
g.Clear(Color.FromArgb(_currentVideoProvider.BackgroundColor));
g.DrawImageUnscaled(bmpIn, (bmpOut.Width - bmpIn.Width) / 2, (bmpOut.Height - bmpIn.Height) / 2);
}
else
{
g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.NearestNeighbor;
g.PixelOffsetMode = System.Drawing.Drawing2D.PixelOffsetMode.Half;
g.DrawImage(bmpIn, new Rectangle(0, 0, bmpOut.Width, bmpOut.Height));
}
}

output = new BmpVideoProvider(bmpOut, _currentVideoProvider.VsyncNumerator, _currentVideoProvider.VsyncDenominator);
disposableOutput = (IDisposable) output;
}
finally
{
bbIn?.Dispose();
bmpIn?.Dispose();
}
}
else
{
if (Config.AviCaptureOsd)
{
output = new BitmapBufferVideoProvider(CaptureOSD());
disposableOutput = (IDisposable) output;
}
else if (Config.AviCaptureLua)
{
output = new BitmapBufferVideoProvider(CaptureLua());
disposableOutput = (IDisposable) output;
}
else
{
output = _currentVideoProvider;
}
}

_currAviWriter.SetFrame(Emulator.Frame);

(IVideoProvider output, dispose) = GetCaptureProvider();

short[] samp;
int nsamp;
if (Config.VideoWriterAudioSyncEffective)
Expand All @@ -3444,15 +3446,17 @@ private void AvFrameAdvance()
((AudioStretcher) _currAviWriter).DumpAV(output, _aviSoundInputAsync, out samp, out nsamp);
}

disposableOutput?.Dispose();

_dumpProxy.PutSamples(samp, nsamp);
}
catch (Exception e)
{
ShowMessageBox(owner: null, $"Video dumping died:\n\n{e}");
AbortAv();
}
finally
{
if (dispose != null) dispose();
}
}

if (_autoDumpLength > 0) //TODO this is probably not necessary because of the call to StopAv --yoshi
Expand Down