diff --git a/crates/frame-converter/src/d3d11.rs b/crates/frame-converter/src/d3d11.rs index dffc31055d..e3617939d3 100644 --- a/crates/frame-converter/src/d3d11.rs +++ b/crates/frame-converter/src/d3d11.rs @@ -2,26 +2,31 @@ use crate::{ConversionConfig, ConvertError, ConverterBackend, FrameConverter}; use ffmpeg::{format::Pixel, frame}; use parking_lot::Mutex; use std::{ + mem::ManuallyDrop, ptr, sync::atomic::{AtomicBool, AtomicU64, Ordering}, }; use windows::{ - Win32::Graphics::{ - Direct3D::D3D_DRIVER_TYPE_HARDWARE, - Direct3D11::{ - D3D11_BIND_RENDER_TARGET, D3D11_CPU_ACCESS_READ, D3D11_CPU_ACCESS_WRITE, - D3D11_CREATE_DEVICE_VIDEO_SUPPORT, D3D11_MAP_READ, D3D11_MAP_WRITE, D3D11_SDK_VERSION, - D3D11_TEXTURE2D_DESC, D3D11_USAGE_DEFAULT, D3D11_USAGE_STAGING, - D3D11_VIDEO_PROCESSOR_CONTENT_DESC, D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC, - D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC, D3D11_VIDEO_PROCESSOR_STREAM, - D3D11_VPIV_DIMENSION_TEXTURE2D, D3D11_VPOV_DIMENSION_TEXTURE2D, D3D11CreateDevice, - ID3D11Device, ID3D11DeviceContext, ID3D11Texture2D, ID3D11VideoContext, - ID3D11VideoDevice, ID3D11VideoProcessor, ID3D11VideoProcessorEnumerator, - ID3D11VideoProcessorInputView, ID3D11VideoProcessorOutputView, - }, - Dxgi::{ - Common::{DXGI_FORMAT, DXGI_FORMAT_NV12, DXGI_FORMAT_YUY2}, - IDXGIAdapter, IDXGIDevice, + Win32::{ + Foundation::HMODULE, + Graphics::{ + Direct3D::D3D_DRIVER_TYPE_HARDWARE, + Direct3D11::{ + D3D11_BIND_RENDER_TARGET, D3D11_CPU_ACCESS_READ, D3D11_CPU_ACCESS_WRITE, + D3D11_CREATE_DEVICE_VIDEO_SUPPORT, D3D11_MAP_READ, D3D11_MAP_WRITE, + D3D11_MAPPED_SUBRESOURCE, D3D11_SDK_VERSION, D3D11_TEXTURE2D_DESC, + D3D11_USAGE_DEFAULT, D3D11_USAGE_STAGING, D3D11_VIDEO_PROCESSOR_CONTENT_DESC, + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC, D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC, + D3D11_VIDEO_PROCESSOR_STREAM, D3D11_VPIV_DIMENSION_TEXTURE2D, + D3D11_VPOV_DIMENSION_TEXTURE2D, D3D11CreateDevice, ID3D11Device, + ID3D11DeviceContext, ID3D11Texture2D, ID3D11VideoContext, ID3D11VideoDevice, + ID3D11VideoProcessor, ID3D11VideoProcessorEnumerator, + ID3D11VideoProcessorInputView, ID3D11VideoProcessorOutputView, + }, + Dxgi::{ + Common::{DXGI_FORMAT, DXGI_FORMAT_NV12, DXGI_FORMAT_YUY2}, + IDXGIAdapter, IDXGIDevice, + }, }, }, core::Interface, @@ -150,7 +155,7 @@ impl D3D11Converter { D3D11CreateDevice( None, D3D_DRIVER_TYPE_HARDWARE, - None, + HMODULE::default(), D3D11_CREATE_DEVICE_VIDEO_SUPPORT, None, D3D11_SDK_VERSION, @@ -243,7 +248,7 @@ impl D3D11Converter { config.input_height, input_dxgi, D3D11_USAGE_DEFAULT, - D3D11_BIND_RENDER_TARGET.0, + D3D11_BIND_RENDER_TARGET.0 as u32, 0, )?; @@ -253,7 +258,7 @@ impl D3D11Converter { config.output_height, output_dxgi, D3D11_USAGE_DEFAULT, - D3D11_BIND_RENDER_TARGET.0, + D3D11_BIND_RENDER_TARGET.0 as u32, 0, )?; @@ -264,7 +269,7 @@ impl D3D11Converter { input_dxgi, D3D11_USAGE_STAGING, 0, - D3D11_CPU_ACCESS_WRITE.0, + D3D11_CPU_ACCESS_WRITE.0 as u32, )?; let staging_output = create_texture( @@ -274,7 +279,7 @@ impl D3D11Converter { output_dxgi, D3D11_USAGE_STAGING, 0, - D3D11_CPU_ACCESS_READ.0, + D3D11_CPU_ACCESS_READ.0 as u32, )?; let resources = D3D11Resources { @@ -333,12 +338,19 @@ impl FrameConverter for D3D11Converter { } let pts = input.pts(); - let mut resources = self.resources.lock(); + let resources = self.resources.lock(); unsafe { - let mapped = resources + let mut mapped = D3D11_MAPPED_SUBRESOURCE::default(); + resources .context - .Map(&resources.staging_input, 0, D3D11_MAP_WRITE, 0) + .Map( + &resources.staging_input, + 0, + D3D11_MAP_WRITE, + 0, + Some(&mut mapped), + ) .map_err(|e| { ConvertError::ConversionFailed(format!("Map input failed: {:?}", e)) })?; @@ -363,16 +375,21 @@ impl FrameConverter for D3D11Converter { }, }; - let input_view: ID3D11VideoProcessorInputView = resources + let mut input_view: Option = None; + resources .video_device .CreateVideoProcessorInputView( &resources.input_texture, &resources.enumerator, &input_view_desc, + Some(&mut input_view), ) .map_err(|e| { ConvertError::ConversionFailed(format!("CreateInputView failed: {:?}", e)) })?; + let input_view = input_view.ok_or_else(|| { + ConvertError::ConversionFailed("CreateInputView returned null".to_string()) + })?; let output_view_desc = D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC { ViewDimension: D3D11_VPOV_DIMENSION_TEXTURE2D, @@ -384,16 +401,21 @@ impl FrameConverter for D3D11Converter { }, }; - let output_view: ID3D11VideoProcessorOutputView = resources + let mut output_view: Option = None; + resources .video_device .CreateVideoProcessorOutputView( &resources.output_texture, &resources.enumerator, &output_view_desc, + Some(&mut output_view), ) .map_err(|e| { ConvertError::ConversionFailed(format!("CreateOutputView failed: {:?}", e)) })?; + let output_view = output_view.ok_or_else(|| { + ConvertError::ConversionFailed("CreateOutputView returned null".to_string()) + })?; let stream = D3D11_VIDEO_PROCESSOR_STREAM { Enable: true.into(), @@ -405,7 +427,7 @@ impl FrameConverter for D3D11Converter { pInputSurface: std::mem::transmute_copy(&input_view), ppFutureSurfaces: ptr::null_mut(), ppPastSurfacesRight: ptr::null_mut(), - pInputSurfaceRight: None, + pInputSurfaceRight: ManuallyDrop::new(None), ppFutureSurfacesRight: ptr::null_mut(), }; @@ -427,9 +449,16 @@ impl FrameConverter for D3D11Converter { .context .CopyResource(&resources.staging_output, &resources.output_texture); - let mapped = resources + let mut mapped = D3D11_MAPPED_SUBRESOURCE::default(); + resources .context - .Map(&resources.staging_output, 0, D3D11_MAP_READ, 0) + .Map( + &resources.staging_output, + 0, + D3D11_MAP_READ, + 0, + Some(&mut mapped), + ) .map_err(|e| { ConvertError::ConversionFailed(format!("Map output failed: {:?}", e)) })?; @@ -494,16 +523,20 @@ fn create_texture( Quality: 0, }, Usage: usage, - BindFlags: windows::Win32::Graphics::Direct3D11::D3D11_BIND_FLAG(bind_flags as i32), - CPUAccessFlags: windows::Win32::Graphics::Direct3D11::D3D11_CPU_ACCESS_FLAG( - cpu_access as i32, - ), - MiscFlags: windows::Win32::Graphics::Direct3D11::D3D11_RESOURCE_MISC_FLAG(0), + BindFlags: bind_flags, + CPUAccessFlags: cpu_access, + MiscFlags: 0, }; unsafe { - device.CreateTexture2D(&desc, None).map_err(|e| { - ConvertError::HardwareUnavailable(format!("CreateTexture2D failed: {:?}", e)) + let mut texture: Option = None; + device + .CreateTexture2D(&desc, None, Some(&mut texture)) + .map_err(|e| { + ConvertError::HardwareUnavailable(format!("CreateTexture2D failed: {:?}", e)) + })?; + texture.ok_or_else(|| { + ConvertError::HardwareUnavailable("CreateTexture2D returned null".to_string()) }) } } @@ -515,29 +548,35 @@ unsafe fn copy_frame_to_mapped(frame: &frame::Video, dst: *mut u8, dst_stride: u match format { Pixel::NV12 => { for y in 0..height { - ptr::copy_nonoverlapping( - frame.data(0).as_ptr().add(y * frame.stride(0)), - dst.add(y * dst_stride), - frame.width() as usize, - ); + unsafe { + ptr::copy_nonoverlapping( + frame.data(0).as_ptr().add(y * frame.stride(0)), + dst.add(y * dst_stride), + frame.width() as usize, + ); + } } let uv_offset = height * dst_stride; for y in 0..height / 2 { - ptr::copy_nonoverlapping( - frame.data(1).as_ptr().add(y * frame.stride(1)), - dst.add(uv_offset + y * dst_stride), - frame.width() as usize, - ); + unsafe { + ptr::copy_nonoverlapping( + frame.data(1).as_ptr().add(y * frame.stride(1)), + dst.add(uv_offset + y * dst_stride), + frame.width() as usize, + ); + } } } Pixel::YUYV422 | Pixel::UYVY422 => { let row_bytes = frame.width() as usize * 2; for y in 0..height { - ptr::copy_nonoverlapping( - frame.data(0).as_ptr().add(y * frame.stride(0)), - dst.add(y * dst_stride), - row_bytes, - ); + unsafe { + ptr::copy_nonoverlapping( + frame.data(0).as_ptr().add(y * frame.stride(0)), + dst.add(y * dst_stride), + row_bytes, + ); + } } } _ => {} @@ -551,30 +590,36 @@ unsafe fn copy_mapped_to_frame(src: *const u8, src_stride: usize, frame: &mut fr match format { Pixel::NV12 => { for y in 0..height { - ptr::copy_nonoverlapping( - src.add(y * src_stride), - frame.data_mut(0).as_mut_ptr().add(y * frame.stride(0)), - frame.width() as usize, - ); + unsafe { + ptr::copy_nonoverlapping( + src.add(y * src_stride), + frame.data_mut(0).as_mut_ptr().add(y * frame.stride(0)), + frame.width() as usize, + ); + } } let uv_offset = height * src_stride; for y in 0..height / 2 { - ptr::copy_nonoverlapping( - src.add(uv_offset + y * src_stride), - frame.data_mut(1).as_mut_ptr().add(y * frame.stride(1)), - frame.width() as usize, - ); + unsafe { + ptr::copy_nonoverlapping( + src.add(uv_offset + y * src_stride), + frame.data_mut(1).as_mut_ptr().add(y * frame.stride(1)), + frame.width() as usize, + ); + } } } Pixel::YUYV422 => { let bytes_per_pixel = 2; let row_bytes = frame.width() as usize * bytes_per_pixel; for y in 0..height { - ptr::copy_nonoverlapping( - src.add(y * src_stride), - frame.data_mut(0).as_mut_ptr().add(y * frame.stride(0)), - row_bytes, - ); + unsafe { + ptr::copy_nonoverlapping( + src.add(y * src_stride), + frame.data_mut(0).as_mut_ptr().add(y * frame.stride(0)), + row_bytes, + ); + } } } _ => {} diff --git a/crates/recording/Cargo.toml b/crates/recording/Cargo.toml index 43e712cd34..36aceb0a2e 100644 --- a/crates/recording/Cargo.toml +++ b/crates/recording/Cargo.toml @@ -74,6 +74,7 @@ windows = { workspace = true, features = [ "Win32_Graphics_Gdi", "Win32_UI_WindowsAndMessaging", "Win32_System_Performance", + "Win32_Storage_Xps", ] } cap-enc-mediafoundation = { path = "../enc-mediafoundation" } diff --git a/crates/recording/src/feeds/camera.rs b/crates/recording/src/feeds/camera.rs index 2c2ffe0f1a..62d4044430 100644 --- a/crates/recording/src/feeds/camera.rs +++ b/crates/recording/src/feeds/camera.rs @@ -562,10 +562,17 @@ async fn setup_camera( use windows::Win32::Media::MediaFoundation::MFCreateMemoryBuffer; let data_len = bytes.len(); - if let Ok(buffer) = (unsafe { MFCreateMemoryBuffer(data_len as u32) }) { - if let Ok(mut lock) = buffer.lock() { - lock.copy_from_slice(&*bytes); - drop(lock); + if let Ok(buffer) = unsafe { MFCreateMemoryBuffer(data_len as u32) } { + let buffer_ready = { + if let Ok(mut lock) = buffer.lock() { + lock.copy_from_slice(&*bytes); + true + } else { + false + } + }; + + if buffer_ready { let _ = unsafe { buffer.SetCurrentLength(data_len as u32) }; let _ = native_recipient diff --git a/crates/recording/src/output_pipeline/win.rs b/crates/recording/src/output_pipeline/win.rs index b1c2cae30d..3dc0dbf184 100644 --- a/crates/recording/src/output_pipeline/win.rs +++ b/crates/recording/src/output_pipeline/win.rs @@ -19,7 +19,7 @@ use windows::{ Graphics::SizeInt32, Win32::Graphics::{ Direct3D11::ID3D11Device, - Dxgi::Common::{DXGI_FORMAT, DXGI_FORMAT_NV12, DXGI_FORMAT_UYVY, DXGI_FORMAT_YUY2}, + Dxgi::Common::{DXGI_FORMAT, DXGI_FORMAT_NV12, DXGI_FORMAT_YUY2}, }, }; @@ -394,6 +394,9 @@ pub struct NativeCameraFrame { pub timestamp: Timestamp, } +unsafe impl Send for NativeCameraFrame {} +unsafe impl Sync for NativeCameraFrame {} + impl VideoFrame for NativeCameraFrame { fn timestamp(&self) -> Timestamp { self.timestamp @@ -404,8 +407,9 @@ impl NativeCameraFrame { pub fn dxgi_format(&self) -> DXGI_FORMAT { match self.pixel_format { cap_camera_windows::PixelFormat::NV12 => DXGI_FORMAT_NV12, - cap_camera_windows::PixelFormat::YUYV422 => DXGI_FORMAT_YUY2, - cap_camera_windows::PixelFormat::UYVY422 => DXGI_FORMAT_UYVY, + cap_camera_windows::PixelFormat::YUYV422 | cap_camera_windows::PixelFormat::UYVY422 => { + DXGI_FORMAT_YUY2 + } _ => DXGI_FORMAT_NV12, } } @@ -511,9 +515,9 @@ impl Muxer for WindowsCameraMuxer { let mut output_guard = match output.lock() { Ok(guard) => guard, Err(poisoned) => { - let err = anyhow!("Failed to lock output mutex: {}", poisoned); - let _ = ready_tx.send(Err(err.clone().into())); - return Err(err); + let msg = format!("Failed to lock output mutex: {}", poisoned); + let _ = ready_tx.send(Err(anyhow!("{}", msg))); + return Err(anyhow!("{}", msg)); } }; @@ -531,16 +535,16 @@ impl Muxer for WindowsCameraMuxer { match muxer { Ok(muxer) => (encoder, muxer), Err(err) => { - let err = anyhow!("Failed to create muxer: {err}"); - let _ = ready_tx.send(Err(err.clone().into())); - return Err(err); + let msg = format!("Failed to create muxer: {err}"); + let _ = ready_tx.send(Err(anyhow!("{}", msg))); + return Err(anyhow!("{}", msg)); } } } Err(err) => { - let err = anyhow!("Failed to create H264 encoder: {err}"); - let _ = ready_tx.send(Err(err.clone().into())); - return Err(err); + let msg = format!("Failed to create H264 encoder: {err}"); + let _ = ready_tx.send(Err(anyhow!("{}", msg))); + return Err(anyhow!("{}", msg)); } }; @@ -562,8 +566,8 @@ impl Muxer for WindowsCameraMuxer { let mut first_timestamp: Option = None; let mut frame_count = 0u64; - let process_frame = |frame: NativeCameraFrame, - timestamp: Duration| + let mut process_frame = |frame: NativeCameraFrame, + timestamp: Duration| -> windows::core::Result< Option<( windows::Win32::Graphics::Direct3D11::ID3D11Texture2D, @@ -667,7 +671,9 @@ impl VideoMuxer for WindowsCameraMuxer { timestamp: Duration, ) -> anyhow::Result<()> { if let Some(timestamp) = self.pause.adjust(timestamp)? { - self.video_tx.send(Some((frame, timestamp)))?; + self.video_tx + .send(Some((frame, timestamp))) + .map_err(|_| anyhow!("Video channel closed"))?; } Ok(()) diff --git a/crates/recording/src/screenshot.rs b/crates/recording/src/screenshot.rs index f3d1dfee05..7fa33ba8a3 100644 --- a/crates/recording/src/screenshot.rs +++ b/crates/recording/src/screenshot.rs @@ -1,5 +1,7 @@ use crate::sources::screen_capture::ScreenCaptureTarget; -use anyhow::{Context, anyhow}; +#[cfg(target_os = "macos")] +use anyhow::Context; +use anyhow::anyhow; use image::RgbImage; #[cfg(target_os = "macos")] use scap_ffmpeg::AsFFmpeg; @@ -29,11 +31,11 @@ use windows::Win32::Graphics::Direct3D11::{ }; #[cfg(target_os = "windows")] use windows::Win32::Graphics::Gdi::{ - BI_RGB, BITMAPINFO, BITMAPINFOHEADER, BitBlt, CAPTUREBLT, CreateCompatibleDC, CreateDIBSection, - DIB_RGB_COLORS, DeleteDC, DeleteObject, GetDC, HBITMAP, HDC, ReleaseDC, SRCCOPY, SelectObject, + BITMAPINFO, BITMAPINFOHEADER, BitBlt, CAPTUREBLT, CreateCompatibleDC, CreateDIBSection, + DIB_RGB_COLORS, DeleteDC, DeleteObject, GetDC, HDC, ReleaseDC, SRCCOPY, SelectObject, }; #[cfg(target_os = "windows")] -use windows::Win32::UI::WindowsAndMessaging::{PW_RENDERFULLCONTENT, PrintWindow}; +use windows::Win32::Storage::Xps::{PRINT_WINDOW_FLAGS, PrintWindow}; #[cfg(target_os = "windows")] const WINDOWS_CAPTURE_UNSUPPORTED: &str = @@ -210,11 +212,11 @@ fn try_fast_capture(target: &ScreenCaptureTarget) -> Option { #[cfg(target_os = "windows")] fn shared_d3d_device() -> anyhow::Result<&'static ID3D11Device> { - static DEVICE: OnceLock = OnceLock::new(); + static DEVICE: OnceLock> = OnceLock::new(); - DEVICE.get_or_try_init(|| { + let device = DEVICE.get_or_init(|| { let mut device = None; - unsafe { + let result = unsafe { D3D11CreateDevice( None, D3D_DRIVER_TYPE_HARDWARE, @@ -226,11 +228,18 @@ fn shared_d3d_device() -> anyhow::Result<&'static ID3D11Device> { None, None, ) + }; + + if result.is_err() { + return None; } - .map_err(|e| anyhow!("Failed to create D3D11 device: {e:?}"))?; - device.ok_or_else(|| anyhow!("D3D11 device unavailable")) - }) + device + }); + + device + .as_ref() + .ok_or_else(|| anyhow!("D3D11 device unavailable")) } #[cfg(target_os = "windows")] @@ -332,12 +341,12 @@ fn capture_bitmap_with( return Err(unsupported_error()); } - if base_dc.0 == 0 { + if base_dc.0.is_null() { return Err(unsupported_error()); } let mem_dc = unsafe { CreateCompatibleDC(Some(base_dc)) }; - if mem_dc.0 == 0 { + if mem_dc.0.is_null() { return Err(unsupported_error()); } @@ -348,7 +357,7 @@ fn capture_bitmap_with( biHeight: -height, biPlanes: 1, biBitCount: 32, - biCompression: BI_RGB as u32, + biCompression: 0, biSizeImage: 0, biXPelsPerMeter: 0, biYPelsPerMeter: 0, @@ -359,13 +368,17 @@ fn capture_bitmap_with( }; let mut data = std::ptr::null_mut(); - let bitmap = unsafe { CreateDIBSection(mem_dc, &mut info, DIB_RGB_COLORS, &mut data, None, 0) }; - if bitmap.0 == 0 || data.is_null() { - unsafe { - DeleteDC(mem_dc); + let bitmap = + unsafe { CreateDIBSection(Some(mem_dc), &mut info, DIB_RGB_COLORS, &mut data, None, 0) }; + let bitmap = match bitmap { + Ok(b) if !b.0.is_null() && !data.is_null() => b, + _ => { + unsafe { + let _ = DeleteDC(mem_dc); + } + return Err(unsupported_error()); } - return Err(unsupported_error()); - } + }; let old_obj = unsafe { SelectObject(mem_dc, bitmap.into()) }; @@ -387,8 +400,8 @@ fn capture_bitmap_with( unsafe { SelectObject(mem_dc, old_obj); - DeleteObject(bitmap.into()); - DeleteDC(mem_dc); + let _ = DeleteObject(bitmap.into()); + let _ = DeleteDC(mem_dc); } result @@ -415,21 +428,20 @@ fn capture_display_bounds( let screen_dc = unsafe { GetDC(None) }; let result = capture_bitmap_with(screen_dc, width, height, |mem_dc| { - let res = unsafe { + unsafe { BitBlt( mem_dc, 0, 0, width, height, - screen_dc, + Some(screen_dc), src_x, src_y, SRCCOPY | CAPTUREBLT, ) - }; - - res.as_bool().then_some(()).ok_or_else(unsupported_error) + } + .map_err(|_| unsupported_error()) }); unsafe { ReleaseDC(None, screen_dc); @@ -443,40 +455,43 @@ fn capture_display_bounds( #[cfg(target_os = "windows")] fn capture_window_bitmap(hwnd: HWND, width: i32, height: i32) -> anyhow::Result> { - let window_dc = unsafe { GetDC(hwnd) }; + let window_dc = unsafe { GetDC(Some(hwnd)) }; let result = capture_bitmap_with(window_dc, width, height, |mem_dc| { - let res = unsafe { + unsafe { BitBlt( mem_dc, 0, 0, width, height, - window_dc, + Some(window_dc), 0, 0, SRCCOPY | CAPTUREBLT, ) - }; - - res.as_bool().then_some(()).ok_or_else(unsupported_error) + } + .map_err(|_| unsupported_error()) }); unsafe { - ReleaseDC(hwnd, window_dc); + ReleaseDC(Some(hwnd), window_dc); } result } #[cfg(target_os = "windows")] fn capture_window_print(hwnd: HWND, width: i32, height: i32) -> anyhow::Result> { - let window_dc = unsafe { GetDC(hwnd) }; + let window_dc = unsafe { GetDC(Some(hwnd)) }; let result = capture_bitmap_with(window_dc, width, height, |mem_dc| { - let res = unsafe { PrintWindow(hwnd, mem_dc, PW_RENDERFULLCONTENT.0 as u32) }; + let res = unsafe { PrintWindow(hwnd, mem_dc, PRINT_WINDOW_FLAGS(2)) }; - res.as_bool().then_some(()).ok_or_else(unsupported_error) + if res.as_bool() { + Ok(()) + } else { + Err(unsupported_error()) + } }); unsafe { - ReleaseDC(hwnd, window_dc); + ReleaseDC(Some(hwnd), window_dc); } result } @@ -596,7 +611,7 @@ fn try_fast_capture(target: &ScreenCaptureTarget) -> Option { let res = rx.recv_timeout(Duration::from_millis(500)); let _ = capturer.stop(); - let image = res.ok()??; + let image = res.ok()?.ok()?; debug!("Windows fast capture completed in {:?}", start.elapsed()); Some(image) }