diff --git a/Cargo.lock b/Cargo.lock index 3b63ac9f7c..90c4ab1840 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1807,7 +1807,6 @@ dependencies = [ "loom", "naga", "parking_lot", - "range-alloc", "raw-window-handle", "ron", "serde", diff --git a/wgpu-core/Cargo.toml b/wgpu-core/Cargo.toml index 8bdb81c342..453d779c68 100644 --- a/wgpu-core/Cargo.toml +++ b/wgpu-core/Cargo.toml @@ -38,7 +38,6 @@ gpu-descriptor = { version = "0.1", features = ["tracing"] } hal = { package = "gfx-hal", git = "https://github.com/gfx-rs/gfx", rev = "2fd74dbe1562a3eef05b11dcd300c1c9c9bc12a8" } gfx-backend-empty = { git = "https://github.com/gfx-rs/gfx", rev = "2fd74dbe1562a3eef05b11dcd300c1c9c9bc12a8" } -range-alloc = {git = "https://github.com/gfx-rs/gfx", rev = "2fd74dbe1562a3eef05b11dcd300c1c9c9bc12a8"} [target.'cfg(all(not(target_arch = "wasm32"), all(unix, not(target_os = "ios"), not(target_os = "macos"))))'.dependencies] gfx-backend-vulkan = { git = "https://github.com/gfx-rs/gfx", rev = "2fd74dbe1562a3eef05b11dcd300c1c9c9bc12a8", features = ["naga"] } diff --git a/wgpu-core/src/device/mod.rs b/wgpu-core/src/device/mod.rs index 58067f953e..c2cf35b0b3 100644 --- a/wgpu-core/src/device/mod.rs +++ b/wgpu-core/src/device/mod.rs @@ -210,7 +210,7 @@ fn map_buffer( let zero_init_needs_flush_now = !block.is_coherent() && buffer.sync_mapped_writes.is_none(); // No need to flush if it is flushed later anyways. for uninitialized_range in buffer .initialization_status - .drain_uninitialized_ranges(&(offset..(size + offset))) + .drain_uninitialized_ranges(offset..(size + offset)) { let num_bytes = uninitialized_range.end - uninitialized_range.start; unsafe { @@ -2608,11 +2608,11 @@ impl Global { unsafe { ptr::write_bytes(ptr.as_ptr(), 0, buffer.size as usize) }; buffer .initialization_status - .drain_uninitialized_ranges(&(0..buffer.size)) + .drain_uninitialized_ranges(0..buffer.size) .for_each(drop); stage .initialization_status - .drain_uninitialized_ranges(&(0..buffer.size)) + .drain_uninitialized_ranges(0..buffer.size) .for_each(drop); buffer.map_state = resource::BufferMapState::Init { diff --git a/wgpu-core/src/device/queue.rs b/wgpu-core/src/device/queue.rs index 4690099407..87e7c7f672 100644 --- a/wgpu-core/src/device/queue.rs +++ b/wgpu-core/src/device/queue.rs @@ -276,7 +276,7 @@ impl Global { { let dst = buffer_guard.get_mut(buffer_id).unwrap(); dst.initialization_status - .drain_uninitialized_ranges(&(buffer_offset..(buffer_offset + data_size))) + .drain_uninitialized_ranges(buffer_offset..(buffer_offset + data_size)) .for_each(drop); } @@ -502,7 +502,7 @@ impl Global { let uninitialized_ranges = buffer .initialization_status - .drain_uninitialized_ranges(&buffer_use.range); + .drain_uninitialized_ranges(buffer_use.range.clone()); match buffer_use.kind { MemoryInitKind::ImplicitlyInitialized => { uninitialized_ranges.for_each(drop); diff --git a/wgpu-core/src/memory_init_tracker.rs b/wgpu-core/src/memory_init_tracker.rs index ebbf0d7f80..2e9780877d 100644 --- a/wgpu-core/src/memory_init_tracker.rs +++ b/wgpu-core/src/memory_init_tracker.rs @@ -18,69 +18,102 @@ pub(crate) struct MemoryInitTrackerAction { /// Tracks initialization status of a linear range from 0..size #[derive(Debug)] pub(crate) struct MemoryInitTracker { - // TODO: Use a more fitting data structure! - // An allocated range in this allocator means that the range in question is NOT yet initialized. - uninitialized_ranges: range_alloc::RangeAllocator, + uninitialized_ranges: Vec>, +} + +pub(crate) struct MemoryInitTrackerDrain<'a> { + uninitialized_ranges: &'a mut Vec>, + drain_range: Range, + next_index: usize, +} + +impl<'a> Iterator for MemoryInitTrackerDrain<'a> { + type Item = Range; + + fn next(&mut self) -> Option { + let uninitialized_range = match self.uninitialized_ranges.get_mut(self.next_index) { + Some(range) => range, + None => return None, + }; + if uninitialized_range.start >= self.drain_range.end { + // No more cuts possible (we're going left to right!) + None + } else if uninitialized_range.end > self.drain_range.end { + // cut-out / split + if uninitialized_range.start < self.drain_range.start { + let old_start = uninitialized_range.start; + uninitialized_range.start = self.drain_range.end; + self.uninitialized_ranges + .insert(self.next_index, old_start..self.drain_range.start); + self.next_index = std::usize::MAX; + Some(self.drain_range.clone()) + } + // right cut + else { + let result = uninitialized_range.start..self.drain_range.end; + self.next_index = std::usize::MAX; + uninitialized_range.start = self.drain_range.end; + Some(result) + } + } else { + // left cut + if uninitialized_range.start < self.drain_range.start { + let result = self.drain_range.start..uninitialized_range.end; + uninitialized_range.end = self.drain_range.start; + self.next_index = self.next_index + 1; + Some(result) + } + // fully contained. + else { + let result = uninitialized_range.clone(); + self.uninitialized_ranges.remove(self.next_index); + Some(result) + } + } + } } impl MemoryInitTracker { pub(crate) fn new(size: wgt::BufferAddress) -> Self { - let mut uninitialized_ranges = - range_alloc::RangeAllocator::::new(0..size); - let _ = uninitialized_ranges.allocate_range(size); - Self { - uninitialized_ranges, + uninitialized_ranges: vec![0..size], } } - pub(crate) fn is_initialized(&self, range: &Range) -> bool { - self.uninitialized_ranges - .allocated_ranges() - .all(|r: Range| r.start >= range.end || r.end <= range.start) + pub(crate) fn is_initialized(&self, query_range: &Range) -> bool { + match self + .uninitialized_ranges + .iter() + .find(|r| r.end > query_range.start) + { + Some(r) => r.start >= query_range.end, + None => true, + } } #[must_use] pub(crate) fn drain_uninitialized_ranges<'a>( &'a mut self, - range: &Range, - ) -> impl Iterator> + 'a { - let mut uninitialized_ranges: Vec> = self + drain_range: Range, + ) -> MemoryInitTrackerDrain<'a> { + let next_index = self .uninitialized_ranges - .allocated_ranges() - .filter_map(|r: Range| { - if r.end > range.start && r.start < range.end { - Some(Range { - start: range.start.max(r.start), - end: range.end.min(r.end), - }) - } else { - None - } - }) - .collect(); + .iter() + .position(|r| r.end > drain_range.start) + .unwrap_or(std::usize::MAX); - std::iter::from_fn(move || { - let range: Option> = - uninitialized_ranges.last().map(|r| r.clone()); - match range { - Some(range) => { - uninitialized_ranges.pop(); - let result = range.clone(); - self.uninitialized_ranges.free_range(range); - Some(result) - } - None => None, - } - }) + MemoryInitTrackerDrain { + next_index, + drain_range, + uninitialized_ranges: &mut self.uninitialized_ranges, + } } } #[cfg(test)] mod test { - use std::ops::Range; - use super::MemoryInitTracker; + use std::ops::Range; #[test] fn is_initialized_for_empty_tracker() { @@ -94,7 +127,7 @@ mod test { #[test] fn is_initialized_for_filled_tracker() { let mut tracker = MemoryInitTracker::new(10); - tracker.drain_uninitialized_ranges(&(0..10)).for_each(drop); + tracker.drain_uninitialized_ranges(0..10).for_each(drop); assert!(tracker.is_initialized(&(0..10))); assert!(tracker.is_initialized(&(0..3))); assert!(tracker.is_initialized(&(3..4))); @@ -104,7 +137,7 @@ mod test { #[test] fn is_initialized_for_partially_filled_tracker() { let mut tracker = MemoryInitTracker::new(10); - tracker.drain_uninitialized_ranges(&(4..6)).for_each(drop); + tracker.drain_uninitialized_ranges(4..6).for_each(drop); assert!(!tracker.is_initialized(&(0..10))); // entire range assert!(!tracker.is_initialized(&(0..4))); // left non-overlapping assert!(!tracker.is_initialized(&(3..5))); // left overlapping @@ -118,16 +151,16 @@ mod test { #[test] fn drain_uninitialized_ranges_never_returns_ranges_twice_for_same_range() { let mut tracker = MemoryInitTracker::new(19); - assert_eq!(tracker.drain_uninitialized_ranges(&(0..19)).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(&(0..19)).count(), 0); + assert_eq!(tracker.drain_uninitialized_ranges(0..19).count(), 1); + assert_eq!(tracker.drain_uninitialized_ranges(0..19).count(), 0); let mut tracker = MemoryInitTracker::new(17); - assert_eq!(tracker.drain_uninitialized_ranges(&(5..8)).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(&(5..8)).count(), 0); - assert_eq!(tracker.drain_uninitialized_ranges(&(1..3)).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(&(1..3)).count(), 0); - assert_eq!(tracker.drain_uninitialized_ranges(&(7..13)).count(), 1); - assert_eq!(tracker.drain_uninitialized_ranges(&(7..13)).count(), 0); + assert_eq!(tracker.drain_uninitialized_ranges(5..8).count(), 1); + assert_eq!(tracker.drain_uninitialized_ranges(5..8).count(), 0); + assert_eq!(tracker.drain_uninitialized_ranges(1..3).count(), 1); + assert_eq!(tracker.drain_uninitialized_ranges(1..3).count(), 0); + assert_eq!(tracker.drain_uninitialized_ranges(7..13).count(), 1); + assert_eq!(tracker.drain_uninitialized_ranges(7..13).count(), 0); } #[test] @@ -135,13 +168,13 @@ mod test { let mut tracker = MemoryInitTracker::new(1337); assert_eq!( tracker - .drain_uninitialized_ranges(&(21..42)) + .drain_uninitialized_ranges(21..42) .collect::>>(), vec![21..42] ); assert_eq!( tracker - .drain_uninitialized_ranges(&(900..1000)) + .drain_uninitialized_ranges(900..1000) .collect::>>(), vec![900..1000] ); @@ -149,15 +182,15 @@ mod test { // Splitted ranges. assert_eq!( tracker - .drain_uninitialized_ranges(&(5..1003)) + .drain_uninitialized_ranges(5..1003) .collect::>>(), - vec![1000..1003, 42..900, 5..21] + vec![5..21, 42..900, 1000..1003] ); assert_eq!( tracker - .drain_uninitialized_ranges(&(0..1337)) + .drain_uninitialized_ranges(0..1337) .collect::>>(), - vec![1003..1337, 0..5] + vec![0..5, 1003..1337] ); } }