Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions cts_runner/test.lst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
// ```
unittests:*
webgpu:api,operation,buffers,createBindGroup:buffer_binding_resource:*
webgpu:api,operation,buffers,map:mapAsync,read:*
webgpu:api,operation,command_buffer,basic:*
webgpu:api,operation,command_buffer,copyBufferToBuffer:*
fails-if(vulkan) webgpu:api,operation,command_buffer,copyTextureToTexture:copy_depth_stencil:format="depth24plus"
Expand Down
3 changes: 0 additions & 3 deletions tests/tests/wgpu-gpu/cloneable_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,6 @@ fn cloneable_buffers(ctx: TestingContext) {

buffer.unmap();

// This is actually a bug, we should not need to call submit to make the buffer contents visible.
ctx.queue.submit([]);

let cloned_buffer = buffer.clone();
let cloned_buffer_contents = buffer_contents.clone();

Expand Down
5 changes: 3 additions & 2 deletions wgpu-core/src/conv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,16 @@ pub fn is_valid_external_image_copy_dst_texture_format(format: wgt::TextureForma
}
}

pub fn map_buffer_usage(usage: wgt::BufferUsages) -> wgt::BufferUses {
pub fn map_buffer_usage(usage: wgt::BufferUsages, mapped_at_creation: bool) -> wgt::BufferUses {
let mut u = wgt::BufferUses::empty();
u.set(
wgt::BufferUses::MAP_READ,
usage.contains(wgt::BufferUsages::MAP_READ),
);
u.set(
wgt::BufferUses::MAP_WRITE,
usage.contains(wgt::BufferUsages::MAP_WRITE),
usage.contains(wgt::BufferUsages::MAP_WRITE)
|| (usage.contains(wgt::BufferUsages::MAP_READ) && mapped_at_creation),
);
u.set(
wgt::BufferUses::COPY_SRC,
Expand Down
7 changes: 5 additions & 2 deletions wgpu-core/src/device/resource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -900,7 +900,7 @@ impl Device {
}
}

let mut usage = conv::map_buffer_usage(desc.usage);
let mut usage = conv::map_buffer_usage(desc.usage, desc.mapped_at_creation);

if desc.usage.contains(wgt::BufferUsages::INDIRECT) {
self.require_downlevel_flags(wgt::DownlevelFlags::INDIRECT_EXECUTION)?;
Expand Down Expand Up @@ -990,7 +990,10 @@ impl Device {

let buffer_use = if !desc.mapped_at_creation {
wgt::BufferUses::empty()
} else if desc.usage.contains(wgt::BufferUsages::MAP_WRITE) {
} else if desc
.usage
.intersects(wgt::BufferUsages::MAP_WRITE | wgt::BufferUsages::MAP_READ)
{
// buffer is mappable, so we are just doing that at start
let map_size = buffer.size;
let mapping = if map_size == 0 {
Expand Down
40 changes: 31 additions & 9 deletions wgpu-hal/src/gles/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use core::{mem, ops::Range};

use arrayvec::ArrayVec;

use super::{conv, Command as C};
use super::{conv, BufferBacking, Command as C};

#[derive(Clone, Copy, Debug, Default)]
struct TextureSlotDesc {
Expand Down Expand Up @@ -291,9 +291,13 @@ impl crate::CommandEncoder for super::CommandEncoder {
if !bar.usage.from.contains(wgt::BufferUses::STORAGE_READ_WRITE) {
continue;
}
let buffer = match &bar.buffer.backing {
&BufferBacking::Gl { raw } | &BufferBacking::GlCachedOnHost { raw, .. } => raw,
BufferBacking::Host { .. } => unreachable!(),
};
self.cmd_buffer
.commands
.push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.to));
.push(C::BufferBarrier(buffer, bar.usage.to));
}
}

Expand Down Expand Up @@ -1001,9 +1005,11 @@ impl crate::CommandEncoder for super::CommandEncoder {
) {
self.state.index_offset = binding.offset;
self.state.index_format = format;
self.cmd_buffer
.commands
.push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
let buffer = match &binding.buffer.backing {
&BufferBacking::Gl { raw } | &BufferBacking::GlCachedOnHost { raw, .. } => raw,
BufferBacking::Host { .. } => unreachable!(),
};
self.cmd_buffer.commands.push(C::SetIndexBuffer(buffer));
}
unsafe fn set_vertex_buffer<'a>(
&mut self,
Expand All @@ -1012,8 +1018,12 @@ impl crate::CommandEncoder for super::CommandEncoder {
) {
self.state.dirty_vbuf_mask |= 1 << index;
let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
let raw = match &binding.buffer.backing {
&BufferBacking::Gl { raw } | &BufferBacking::GlCachedOnHost { raw, .. } => raw,
BufferBacking::Host { .. } => unreachable!(),
};
*vb = Some(super::BufferBinding {
raw: binding.buffer.raw.unwrap(),
raw,
offset: binding.offset,
});
}
Expand Down Expand Up @@ -1107,10 +1117,14 @@ impl crate::CommandEncoder for super::CommandEncoder {
for draw in 0..draw_count as wgt::BufferAddress {
let indirect_offset =
offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
let indirect_buf = match &buffer.backing {
&BufferBacking::Gl { raw } | &BufferBacking::GlCachedOnHost { raw, .. } => raw,
BufferBacking::Host { .. } => unreachable!(),
};
#[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
self.cmd_buffer.commands.push(C::DrawIndirect {
topology: self.state.topology,
indirect_buf: buffer.raw.unwrap(),
indirect_buf,
indirect_offset,
first_instance_location: self.state.first_instance_location.clone(),
});
Expand All @@ -1130,11 +1144,15 @@ impl crate::CommandEncoder for super::CommandEncoder {
for draw in 0..draw_count as wgt::BufferAddress {
let indirect_offset =
offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
let indirect_buf = match &buffer.backing {
&BufferBacking::Gl { raw } | &BufferBacking::GlCachedOnHost { raw, .. } => raw,
BufferBacking::Host { .. } => unreachable!(),
};
#[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
topology: self.state.topology,
index_type,
indirect_buf: buffer.raw.unwrap(),
indirect_buf,
indirect_offset,
first_instance_location: self.state.first_instance_location.clone(),
});
Expand Down Expand Up @@ -1221,8 +1239,12 @@ impl crate::CommandEncoder for super::CommandEncoder {
self.cmd_buffer.commands.push(C::Dispatch(count));
}
unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
let indirect_buf = match &buffer.backing {
&BufferBacking::Gl { raw } | &BufferBacking::GlCachedOnHost { raw, .. } => raw,
BufferBacking::Host { .. } => unreachable!(),
};
self.cmd_buffer.commands.push(C::DispatchIndirect {
indirect_buf: buffer.raw.unwrap(),
indirect_buf,
indirect_offset: offset,
});
}
Expand Down
121 changes: 79 additions & 42 deletions wgpu-hal/src/gles/device.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use arrayvec::ArrayVec;
use glow::HasContext;
use naga::FastHashMap;

use super::{conv, lock, MaybeMutex, PrivateCapabilities};
use super::{conv, lock, BufferBacking, MaybeMutex, PrivateCapabilities};
use crate::auxil::map_naga_stage;
use crate::TlasInstance;

Expand Down Expand Up @@ -526,13 +526,19 @@ impl crate::Device for super::Device {
.private_caps
.contains(PrivateCapabilities::BUFFER_ALLOCATION);

if emulate_map && desc.usage.intersects(wgt::BufferUses::MAP_WRITE) {
let host_backed_bytes = || Arc::new(MaybeMutex::new(vec![0; desc.size as usize]));

if emulate_map
&& (desc.usage.intersects(wgt::BufferUses::MAP_WRITE)
&& !desc.usage.intersects(wgt::BufferUses::MAP_READ))
{
return Ok(super::Buffer {
raw: None,
backing: BufferBacking::Host {
data: host_backed_bytes(),
},
target,
size: desc.size,
map_flags: 0,
data: Some(Arc::new(MaybeMutex::new(vec![0; desc.size as usize]))),
offset_of_current_mapping: Arc::new(MaybeMutex::new(0)),
});
}
Expand Down Expand Up @@ -560,8 +566,8 @@ impl crate::Device for super::Device {
map_flags |= glow::MAP_WRITE_BIT;
}

let raw = Some(unsafe { gl.create_buffer() }.map_err(|_| crate::DeviceError::OutOfMemory)?);
unsafe { gl.bind_buffer(target, raw) };
let raw = unsafe { gl.create_buffer() }.map_err(|_| crate::DeviceError::OutOfMemory)?;
unsafe { gl.bind_buffer(target, Some(raw)) };
let raw_size = desc
.size
.try_into()
Expand Down Expand Up @@ -614,33 +620,44 @@ impl crate::Device for super::Device {
.private_caps
.contains(PrivateCapabilities::DEBUG_FNS)
{
let name = raw.map_or(0, |buf| buf.0.get());
let name = raw.0.get();
unsafe { gl.object_label(glow::BUFFER, name, Some(label)) };
}
}

let data = if emulate_map && desc.usage.contains(wgt::BufferUses::MAP_READ) {
Some(Arc::new(MaybeMutex::new(vec![0; desc.size as usize])))
let backing = if emulate_map && desc.usage.contains(wgt::BufferUses::MAP_READ) {
BufferBacking::GlCachedOnHost {
cache: host_backed_bytes(),
raw,
writeable_while_mapped: desc.usage.contains(wgt::BufferUses::MAP_WRITE),
}
} else {
None
BufferBacking::Gl { raw }
};

self.counters.buffers.add(1);

Ok(super::Buffer {
raw,
backing,
target,
size: desc.size,
map_flags,
data,
offset_of_current_mapping: Arc::new(MaybeMutex::new(0)),
})
}

unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
if let Some(raw) = buffer.raw {
let gl = &self.shared.context.lock();
unsafe { gl.delete_buffer(raw) };
match buffer.backing {
BufferBacking::Gl { raw }
| BufferBacking::GlCachedOnHost {
raw,
cache: _,
writeable_while_mapped: _,
} => {
let gl = &self.shared.context.lock();
unsafe { gl.delete_buffer(raw) };
}
BufferBacking::Host { data: _ } => {}
}

self.counters.buffers.sub(1);
Expand All @@ -656,33 +673,36 @@ impl crate::Device for super::Device {
range: crate::MemoryRange,
) -> Result<crate::BufferMapping, crate::DeviceError> {
let is_coherent = buffer.map_flags & glow::MAP_COHERENT_BIT != 0;
let ptr = match buffer.raw {
None => {
let mut vec = lock(buffer.data.as_ref().unwrap());
let ptr = match &buffer.backing {
BufferBacking::Host { data } => {
let mut vec = lock(data);
let slice = &mut vec.as_mut_slice()[range.start as usize..range.end as usize];
slice.as_mut_ptr()
}
Some(raw) => {
&BufferBacking::Gl { raw } => {
let gl = &self.shared.context.lock();
unsafe { gl.bind_buffer(buffer.target, Some(raw)) };
let ptr = if let Some(ref map_read_allocation) = buffer.data {
let mut guard = lock(map_read_allocation);
let slice = guard.as_mut_slice();
unsafe { self.shared.get_buffer_sub_data(gl, buffer.target, 0, slice) };
slice.as_mut_ptr()
} else {
*lock(&buffer.offset_of_current_mapping) = range.start;
unsafe {
gl.map_buffer_range(
buffer.target,
range.start as i32,
(range.end - range.start) as i32,
buffer.map_flags,
)
}
};
unsafe { gl.bind_buffer(buffer.target, None) };
ptr
*lock(&buffer.offset_of_current_mapping) = range.start;
unsafe {
gl.map_buffer_range(
buffer.target,
range.start as i32,
(range.end - range.start) as i32,
buffer.map_flags,
)
}
}
&BufferBacking::GlCachedOnHost {
raw,
ref cache,
writeable_while_mapped: _,
} => {
let gl = &self.shared.context.lock();
unsafe { gl.bind_buffer(buffer.target, Some(raw)) };
let mut guard = lock(cache);
let slice = guard.as_mut_slice();
unsafe { self.shared.get_buffer_sub_data(gl, buffer.target, 0, slice) };
slice.as_mut_ptr()
}
};
Ok(crate::BufferMapping {
Expand All @@ -691,22 +711,34 @@ impl crate::Device for super::Device {
})
}
unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
if let Some(raw) = buffer.raw {
if buffer.data.is_none() {
match &buffer.backing {
&BufferBacking::Gl { raw } => {
let gl = &self.shared.context.lock();
unsafe { gl.bind_buffer(buffer.target, Some(raw)) };
unsafe { gl.unmap_buffer(buffer.target) };
unsafe { gl.bind_buffer(buffer.target, None) };
*lock(&buffer.offset_of_current_mapping) = 0;
}
&BufferBacking::GlCachedOnHost {
raw,
ref cache,
writeable_while_mapped,
} if writeable_while_mapped => {
let gl = &self.shared.context.lock();
let data = lock(cache);
unsafe { gl.bind_buffer(buffer.target, Some(raw)) };
unsafe { gl.buffer_sub_data_u8_slice(buffer.target, 0, &data) };
unsafe { gl.bind_buffer(buffer.target, None) };
}
&BufferBacking::Host { .. } | &BufferBacking::GlCachedOnHost { .. } => {}
}
}
unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
where
I: Iterator<Item = crate::MemoryRange>,
{
if let Some(raw) = buffer.raw {
if buffer.data.is_none() {
match &buffer.backing {
&BufferBacking::Gl { raw } => {
let gl = &self.shared.context.lock();
unsafe { gl.bind_buffer(buffer.target, Some(raw)) };
for range in ranges {
Expand All @@ -720,6 +752,7 @@ impl crate::Device for super::Device {
};
}
}
&BufferBacking::Host { .. } | &BufferBacking::GlCachedOnHost { .. } => {}
}
}
unsafe fn invalidate_mapped_ranges<I>(&self, _buffer: &super::Buffer, _ranges: I) {
Expand Down Expand Up @@ -1261,7 +1294,11 @@ impl crate::Device for super::Device {
wgt::BindingType::Buffer { .. } => {
let bb = &desc.buffers[entry.resource_index as usize];
super::RawBinding::Buffer {
raw: bb.buffer.raw.unwrap(),
raw: match &bb.buffer.backing {
&BufferBacking::Gl { raw }
| &BufferBacking::GlCachedOnHost { raw, .. } => raw,
&BufferBacking::Host { .. } => unreachable!(),
},
offset: bb.offset as i32,
size: match bb.size {
Some(s) => s.get() as i32,
Expand Down
Loading
Loading