mirror of
https://github.com/gfx-rs/wgpu.git
synced 2026-01-09 19:09:04 -05:00
Extract command encoding to free functions (#8210)
This commit is contained in:
@@ -5,7 +5,7 @@ use core::ops::Range;
|
||||
use crate::command::Command as TraceCommand;
|
||||
use crate::{
|
||||
api_log,
|
||||
command::EncoderStateError,
|
||||
command::{CommandBufferMutable, CommandEncoder, EncoderStateError},
|
||||
device::{DeviceError, MissingFeatures},
|
||||
get_lowest_common_denom,
|
||||
global::Global,
|
||||
@@ -119,74 +119,7 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(command_encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), ClearError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::ClearBuffer { dst, offset, size });
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
let dst_buffer = hub.buffers.get(dst).get()?;
|
||||
|
||||
dst_buffer.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
let dst_pending = cmd_buf_data
|
||||
.trackers
|
||||
.buffers
|
||||
.set_single(&dst_buffer, wgt::BufferUses::COPY_DST);
|
||||
|
||||
let snatch_guard = dst_buffer.device.snatchable_lock.read();
|
||||
let dst_raw = dst_buffer.try_raw(&snatch_guard)?;
|
||||
dst_buffer.check_usage(BufferUsages::COPY_DST)?;
|
||||
|
||||
// Check if offset & size are valid.
|
||||
if offset % wgt::COPY_BUFFER_ALIGNMENT != 0 {
|
||||
return Err(ClearError::UnalignedBufferOffset(offset));
|
||||
}
|
||||
|
||||
let size = size.unwrap_or(dst_buffer.size.saturating_sub(offset));
|
||||
if size % wgt::COPY_BUFFER_ALIGNMENT != 0 {
|
||||
return Err(ClearError::UnalignedFillSize(size));
|
||||
}
|
||||
let end_offset =
|
||||
offset
|
||||
.checked_add(size)
|
||||
.ok_or(ClearError::OffsetPlusSizeExceeds64BitBounds {
|
||||
start_offset: offset,
|
||||
requested_size: size,
|
||||
})?;
|
||||
if end_offset > dst_buffer.size {
|
||||
return Err(ClearError::BufferOverrun {
|
||||
start_offset: offset,
|
||||
end_offset,
|
||||
buffer_size: dst_buffer.size,
|
||||
});
|
||||
}
|
||||
|
||||
if offset == end_offset {
|
||||
log::trace!("Ignoring fill_buffer of size 0");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Mark dest as initialized.
|
||||
cmd_buf_data.buffer_memory_init_actions.extend(
|
||||
dst_buffer.initialization_status.read().create_action(
|
||||
&dst_buffer,
|
||||
offset..end_offset,
|
||||
MemoryInitKind::ImplicitlyInitialized,
|
||||
),
|
||||
);
|
||||
|
||||
// actual hal barrier & operation
|
||||
let dst_barrier =
|
||||
dst_pending.map(|pending| pending.into_hal(&dst_buffer, &snatch_guard));
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
unsafe {
|
||||
cmd_buf_raw.transition_buffers(dst_barrier.as_slice());
|
||||
cmd_buf_raw.clear_buffer(dst_raw, offset..end_offset);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
clear_buffer(cmd_buf_data, hub, &cmd_enc, dst, offset, size)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -204,83 +137,181 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(command_encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), ClearError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::ClearTexture {
|
||||
dst,
|
||||
subresource_range: *subresource_range,
|
||||
});
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
cmd_enc
|
||||
.device
|
||||
.require_features(wgt::Features::CLEAR_TEXTURE)?;
|
||||
|
||||
let dst_texture = hub.textures.get(dst).get()?;
|
||||
|
||||
dst_texture.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
// Check if subresource aspects are valid.
|
||||
let clear_aspects =
|
||||
hal::FormatAspects::new(dst_texture.desc.format, subresource_range.aspect);
|
||||
if clear_aspects.is_empty() {
|
||||
return Err(ClearError::MissingTextureAspect {
|
||||
texture_format: dst_texture.desc.format,
|
||||
subresource_range_aspects: subresource_range.aspect,
|
||||
});
|
||||
};
|
||||
|
||||
// Check if subresource level range is valid
|
||||
let subresource_mip_range =
|
||||
subresource_range.mip_range(dst_texture.full_range.mips.end);
|
||||
if dst_texture.full_range.mips.start > subresource_mip_range.start
|
||||
|| dst_texture.full_range.mips.end < subresource_mip_range.end
|
||||
{
|
||||
return Err(ClearError::InvalidTextureLevelRange {
|
||||
texture_level_range: dst_texture.full_range.mips.clone(),
|
||||
subresource_base_mip_level: subresource_range.base_mip_level,
|
||||
subresource_mip_level_count: subresource_range.mip_level_count,
|
||||
});
|
||||
}
|
||||
// Check if subresource layer range is valid
|
||||
let subresource_layer_range =
|
||||
subresource_range.layer_range(dst_texture.full_range.layers.end);
|
||||
if dst_texture.full_range.layers.start > subresource_layer_range.start
|
||||
|| dst_texture.full_range.layers.end < subresource_layer_range.end
|
||||
{
|
||||
return Err(ClearError::InvalidTextureLayerRange {
|
||||
texture_layer_range: dst_texture.full_range.layers.clone(),
|
||||
subresource_base_array_layer: subresource_range.base_array_layer,
|
||||
subresource_array_layer_count: subresource_range.array_layer_count,
|
||||
});
|
||||
}
|
||||
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid()?;
|
||||
let (encoder, tracker) = cmd_buf_data.open_encoder_and_tracker()?;
|
||||
|
||||
let snatch_guard = device.snatchable_lock.read();
|
||||
clear_texture(
|
||||
&dst_texture,
|
||||
TextureInitRange {
|
||||
mip_range: subresource_mip_range,
|
||||
layer_range: subresource_layer_range,
|
||||
},
|
||||
encoder,
|
||||
&mut tracker.textures,
|
||||
&device.alignments,
|
||||
device.zero_buffer.as_ref(),
|
||||
&snatch_guard,
|
||||
device.instance_flags,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
clear_texture_cmd(cmd_buf_data, hub, &cmd_enc, dst, subresource_range)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn clear_buffer(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
hub: &crate::hub::Hub,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
dst: BufferId,
|
||||
offset: BufferAddress,
|
||||
size: Option<BufferAddress>,
|
||||
) -> Result<(), ClearError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::ClearBuffer { dst, offset, size });
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
let dst_buffer = hub.buffers.get(dst).get()?;
|
||||
|
||||
dst_buffer.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
let dst_pending = cmd_buf_data
|
||||
.trackers
|
||||
.buffers
|
||||
.set_single(&dst_buffer, wgt::BufferUses::COPY_DST);
|
||||
|
||||
let snatch_guard = dst_buffer.device.snatchable_lock.read();
|
||||
let dst_raw = dst_buffer.try_raw(&snatch_guard)?;
|
||||
dst_buffer.check_usage(BufferUsages::COPY_DST)?;
|
||||
|
||||
// Check if offset & size are valid.
|
||||
if offset % wgt::COPY_BUFFER_ALIGNMENT != 0 {
|
||||
return Err(ClearError::UnalignedBufferOffset(offset));
|
||||
}
|
||||
|
||||
let size = size.unwrap_or(dst_buffer.size.saturating_sub(offset));
|
||||
if size % wgt::COPY_BUFFER_ALIGNMENT != 0 {
|
||||
return Err(ClearError::UnalignedFillSize(size));
|
||||
}
|
||||
let end_offset =
|
||||
offset
|
||||
.checked_add(size)
|
||||
.ok_or(ClearError::OffsetPlusSizeExceeds64BitBounds {
|
||||
start_offset: offset,
|
||||
requested_size: size,
|
||||
})?;
|
||||
if end_offset > dst_buffer.size {
|
||||
return Err(ClearError::BufferOverrun {
|
||||
start_offset: offset,
|
||||
end_offset,
|
||||
buffer_size: dst_buffer.size,
|
||||
});
|
||||
}
|
||||
|
||||
if offset == end_offset {
|
||||
log::trace!("Ignoring fill_buffer of size 0");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Mark dest as initialized.
|
||||
cmd_buf_data.buffer_memory_init_actions.extend(
|
||||
dst_buffer.initialization_status.read().create_action(
|
||||
&dst_buffer,
|
||||
offset..end_offset,
|
||||
MemoryInitKind::ImplicitlyInitialized,
|
||||
),
|
||||
);
|
||||
|
||||
// actual hal barrier & operation
|
||||
let dst_barrier = dst_pending.map(|pending| pending.into_hal(&dst_buffer, &snatch_guard));
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
unsafe {
|
||||
cmd_buf_raw.transition_buffers(dst_barrier.as_slice());
|
||||
cmd_buf_raw.clear_buffer(dst_raw, offset..end_offset);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate and encode a "Clear Texture" command.
|
||||
///
|
||||
/// This function implements `CommandEncoder::clear_texture` when invoked via
|
||||
/// the command encoder APIs or trace playback. It has the suffix `_cmd` to
|
||||
/// distinguish it from [`clear_texture`]. [`clear_texture`], used internally by
|
||||
/// this function, is a lower-level function that encodes a texture clear
|
||||
/// operation without validating it.
|
||||
pub(super) fn clear_texture_cmd(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
hub: &crate::hub::Hub,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
dst: TextureId,
|
||||
subresource_range: &ImageSubresourceRange,
|
||||
) -> Result<(), ClearError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::ClearTexture {
|
||||
dst,
|
||||
subresource_range: *subresource_range,
|
||||
});
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
cmd_enc
|
||||
.device
|
||||
.require_features(wgt::Features::CLEAR_TEXTURE)?;
|
||||
|
||||
let dst_texture = hub.textures.get(dst).get()?;
|
||||
|
||||
dst_texture.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
// Check if subresource aspects are valid.
|
||||
let clear_aspects = hal::FormatAspects::new(dst_texture.desc.format, subresource_range.aspect);
|
||||
if clear_aspects.is_empty() {
|
||||
return Err(ClearError::MissingTextureAspect {
|
||||
texture_format: dst_texture.desc.format,
|
||||
subresource_range_aspects: subresource_range.aspect,
|
||||
});
|
||||
};
|
||||
|
||||
// Check if subresource level range is valid
|
||||
let subresource_mip_range = subresource_range.mip_range(dst_texture.full_range.mips.end);
|
||||
if dst_texture.full_range.mips.start > subresource_mip_range.start
|
||||
|| dst_texture.full_range.mips.end < subresource_mip_range.end
|
||||
{
|
||||
return Err(ClearError::InvalidTextureLevelRange {
|
||||
texture_level_range: dst_texture.full_range.mips.clone(),
|
||||
subresource_base_mip_level: subresource_range.base_mip_level,
|
||||
subresource_mip_level_count: subresource_range.mip_level_count,
|
||||
});
|
||||
}
|
||||
// Check if subresource layer range is valid
|
||||
let subresource_layer_range = subresource_range.layer_range(dst_texture.full_range.layers.end);
|
||||
if dst_texture.full_range.layers.start > subresource_layer_range.start
|
||||
|| dst_texture.full_range.layers.end < subresource_layer_range.end
|
||||
{
|
||||
return Err(ClearError::InvalidTextureLayerRange {
|
||||
texture_layer_range: dst_texture.full_range.layers.clone(),
|
||||
subresource_base_array_layer: subresource_range.base_array_layer,
|
||||
subresource_array_layer_count: subresource_range.array_layer_count,
|
||||
});
|
||||
}
|
||||
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid()?;
|
||||
let (encoder, tracker) = cmd_buf_data.open_encoder_and_tracker()?;
|
||||
|
||||
let snatch_guard = device.snatchable_lock.read();
|
||||
clear_texture(
|
||||
&dst_texture,
|
||||
TextureInitRange {
|
||||
mip_range: subresource_mip_range,
|
||||
layer_range: subresource_layer_range,
|
||||
},
|
||||
encoder,
|
||||
&mut tracker.textures,
|
||||
&device.alignments,
|
||||
device.zero_buffer.as_ref(),
|
||||
&snatch_guard,
|
||||
device.instance_flags,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a texture clear operation.
|
||||
///
|
||||
/// This function encodes a texture clear operation without validating it.
|
||||
/// Texture clears requested via the API call this function via
|
||||
/// [`clear_texture_cmd`], which does the validation. This function is also
|
||||
/// called directly from various places within wgpu that need to clear a
|
||||
/// texture.
|
||||
pub(crate) fn clear_texture<T: TextureTrackerSetSingle>(
|
||||
dst_texture: &Arc<Texture>,
|
||||
range: TextureInitRange,
|
||||
|
||||
@@ -8,8 +8,8 @@ use alloc::{borrow::Cow, boxed::Box, sync::Arc, vec::Vec};
|
||||
use core::{fmt, str};
|
||||
|
||||
use crate::command::{
|
||||
encoder::EncodingState, pass, CommandEncoder, DebugGroupError, EncoderStateError,
|
||||
PassStateError, TimestampWritesError,
|
||||
encoder::EncodingState, pass, CommandBufferMutable, CommandEncoder, DebugGroupError,
|
||||
EncoderStateError, PassStateError, TimestampWritesError,
|
||||
};
|
||||
use crate::resource::DestroyedResourceError;
|
||||
use crate::{binding_model::BindError, resource::RawResourceAccess};
|
||||
@@ -486,7 +486,6 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn compute_pass_end(&self, pass: &mut ComputePass) -> Result<(), EncoderStateError> {
|
||||
let pass_scope = PassErrorScope::Pass;
|
||||
profiling::scope!(
|
||||
"CommandEncoder::run_compute_pass {}",
|
||||
pass.base.label.as_deref().unwrap_or("")
|
||||
@@ -518,286 +517,291 @@ impl Global {
|
||||
}
|
||||
|
||||
cmd_buf_data.unlock_and_record(|cmd_buf_data| -> Result<(), ComputePassError> {
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid().map_pass_err(pass_scope)?;
|
||||
encode_compute_pass(cmd_buf_data, &cmd_enc, pass)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let base = &mut pass.base;
|
||||
fn encode_compute_pass(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
pass: &mut ComputePass,
|
||||
) -> Result<(), ComputePassError> {
|
||||
let pass_scope = PassErrorScope::Pass;
|
||||
|
||||
let encoder = &mut cmd_buf_data.encoder;
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid().map_pass_err(pass_scope)?;
|
||||
|
||||
// We automatically keep extending command buffers over time, and because
|
||||
// we want to insert a command buffer _before_ what we're about to record,
|
||||
// we need to make sure to close the previous one.
|
||||
encoder.close_if_open().map_pass_err(pass_scope)?;
|
||||
let raw_encoder = encoder
|
||||
.open_pass(base.label.as_deref())
|
||||
let base = &mut pass.base;
|
||||
|
||||
let encoder = &mut cmd_buf_data.encoder;
|
||||
|
||||
// We automatically keep extending command buffers over time, and because
|
||||
// we want to insert a command buffer _before_ what we're about to record,
|
||||
// we need to make sure to close the previous one.
|
||||
encoder.close_if_open().map_pass_err(pass_scope)?;
|
||||
let raw_encoder = encoder
|
||||
.open_pass(base.label.as_deref())
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let snatch_guard = device.snatchable_lock.read();
|
||||
let mut debug_scope_depth = 0;
|
||||
|
||||
let mut state = State {
|
||||
pipeline: None,
|
||||
|
||||
pass: pass::PassState {
|
||||
base: EncodingState {
|
||||
device,
|
||||
raw_encoder,
|
||||
tracker: &mut cmd_buf_data.trackers,
|
||||
buffer_memory_init_actions: &mut cmd_buf_data.buffer_memory_init_actions,
|
||||
texture_memory_actions: &mut cmd_buf_data.texture_memory_actions,
|
||||
as_actions: &mut cmd_buf_data.as_actions,
|
||||
indirect_draw_validation_resources: &mut cmd_buf_data
|
||||
.indirect_draw_validation_resources,
|
||||
snatch_guard: &snatch_guard,
|
||||
debug_scope_depth: &mut debug_scope_depth,
|
||||
},
|
||||
binder: Binder::new(),
|
||||
temp_offsets: Vec::new(),
|
||||
dynamic_offset_count: 0,
|
||||
|
||||
pending_discard_init_fixups: SurfacesInDiscardState::new(),
|
||||
|
||||
scope: device.new_usage_scope(),
|
||||
|
||||
string_offset: 0,
|
||||
},
|
||||
active_query: None,
|
||||
|
||||
push_constants: Vec::new(),
|
||||
|
||||
intermediate_trackers: Tracker::new(),
|
||||
};
|
||||
|
||||
let indices = &state.pass.base.device.tracker_indices;
|
||||
state
|
||||
.pass
|
||||
.base
|
||||
.tracker
|
||||
.buffers
|
||||
.set_size(indices.buffers.size());
|
||||
state
|
||||
.pass
|
||||
.base
|
||||
.tracker
|
||||
.textures
|
||||
.set_size(indices.textures.size());
|
||||
|
||||
let timestamp_writes: Option<hal::PassTimestampWrites<'_, dyn hal::DynQuerySet>> =
|
||||
if let Some(tw) = pass.timestamp_writes.take() {
|
||||
tw.query_set
|
||||
.same_device_as(cmd_enc.as_ref())
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let snatch_guard = device.snatchable_lock.read();
|
||||
let mut debug_scope_depth = 0;
|
||||
let query_set = state
|
||||
.pass
|
||||
.base
|
||||
.tracker
|
||||
.query_sets
|
||||
.insert_single(tw.query_set);
|
||||
|
||||
let mut state = State {
|
||||
pipeline: None,
|
||||
|
||||
pass: pass::PassState {
|
||||
base: EncodingState {
|
||||
device,
|
||||
raw_encoder,
|
||||
tracker: &mut cmd_buf_data.trackers,
|
||||
buffer_memory_init_actions: &mut cmd_buf_data.buffer_memory_init_actions,
|
||||
texture_memory_actions: &mut cmd_buf_data.texture_memory_actions,
|
||||
as_actions: &mut cmd_buf_data.as_actions,
|
||||
indirect_draw_validation_resources: &mut cmd_buf_data
|
||||
.indirect_draw_validation_resources,
|
||||
snatch_guard: &snatch_guard,
|
||||
debug_scope_depth: &mut debug_scope_depth,
|
||||
},
|
||||
binder: Binder::new(),
|
||||
temp_offsets: Vec::new(),
|
||||
dynamic_offset_count: 0,
|
||||
|
||||
pending_discard_init_fixups: SurfacesInDiscardState::new(),
|
||||
|
||||
scope: device.new_usage_scope(),
|
||||
|
||||
string_offset: 0,
|
||||
},
|
||||
active_query: None,
|
||||
|
||||
push_constants: Vec::new(),
|
||||
|
||||
intermediate_trackers: Tracker::new(),
|
||||
// Unlike in render passes we can't delay resetting the query sets since
|
||||
// there is no auxiliary pass.
|
||||
let range = if let (Some(index_a), Some(index_b)) =
|
||||
(tw.beginning_of_pass_write_index, tw.end_of_pass_write_index)
|
||||
{
|
||||
Some(index_a.min(index_b)..index_a.max(index_b) + 1)
|
||||
} else {
|
||||
tw.beginning_of_pass_write_index
|
||||
.or(tw.end_of_pass_write_index)
|
||||
.map(|i| i..i + 1)
|
||||
};
|
||||
|
||||
let indices = &state.pass.base.device.tracker_indices;
|
||||
state
|
||||
.pass
|
||||
.base
|
||||
.tracker
|
||||
.buffers
|
||||
.set_size(indices.buffers.size());
|
||||
state
|
||||
.pass
|
||||
.base
|
||||
.tracker
|
||||
.textures
|
||||
.set_size(indices.textures.size());
|
||||
|
||||
let timestamp_writes: Option<hal::PassTimestampWrites<'_, dyn hal::DynQuerySet>> =
|
||||
if let Some(tw) = pass.timestamp_writes.take() {
|
||||
tw.query_set
|
||||
.same_device_as(cmd_enc.as_ref())
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let query_set = state
|
||||
// Range should always be Some, both values being None should lead to a validation error.
|
||||
// But no point in erroring over that nuance here!
|
||||
if let Some(range) = range {
|
||||
unsafe {
|
||||
state
|
||||
.pass
|
||||
.base
|
||||
.tracker
|
||||
.query_sets
|
||||
.insert_single(tw.query_set);
|
||||
|
||||
// Unlike in render passes we can't delay resetting the query sets since
|
||||
// there is no auxiliary pass.
|
||||
let range = if let (Some(index_a), Some(index_b)) =
|
||||
(tw.beginning_of_pass_write_index, tw.end_of_pass_write_index)
|
||||
{
|
||||
Some(index_a.min(index_b)..index_a.max(index_b) + 1)
|
||||
} else {
|
||||
tw.beginning_of_pass_write_index
|
||||
.or(tw.end_of_pass_write_index)
|
||||
.map(|i| i..i + 1)
|
||||
};
|
||||
// Range should always be Some, both values being None should lead to a validation error.
|
||||
// But no point in erroring over that nuance here!
|
||||
if let Some(range) = range {
|
||||
unsafe {
|
||||
state
|
||||
.pass
|
||||
.base
|
||||
.raw_encoder
|
||||
.reset_queries(query_set.raw(), range);
|
||||
}
|
||||
}
|
||||
|
||||
Some(hal::PassTimestampWrites {
|
||||
query_set: query_set.raw(),
|
||||
beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
|
||||
end_of_pass_write_index: tw.end_of_pass_write_index,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let hal_desc = hal::ComputePassDescriptor {
|
||||
label: hal_label(base.label.as_deref(), device.instance_flags),
|
||||
timestamp_writes,
|
||||
};
|
||||
|
||||
unsafe {
|
||||
state.pass.base.raw_encoder.begin_compute_pass(&hal_desc);
|
||||
}
|
||||
|
||||
for command in base.commands.drain(..) {
|
||||
match command {
|
||||
ArcComputeCommand::SetBindGroup {
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetBindGroup;
|
||||
pass::set_bind_group::<ComputePassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
&base.dynamic_offsets,
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
false,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::SetPipeline(pipeline) => {
|
||||
let scope = PassErrorScope::SetPipelineCompute;
|
||||
set_pipeline(&mut state, cmd_enc.as_ref(), pipeline).map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::SetPushConstant {
|
||||
offset,
|
||||
size_bytes,
|
||||
values_offset,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetPushConstant;
|
||||
pass::set_push_constant::<ComputePassErrorInner, _>(
|
||||
&mut state.pass,
|
||||
&base.push_constant_data,
|
||||
wgt::ShaderStages::COMPUTE,
|
||||
offset,
|
||||
size_bytes,
|
||||
Some(values_offset),
|
||||
|data_slice| {
|
||||
let offset_in_elements =
|
||||
(offset / wgt::PUSH_CONSTANT_ALIGNMENT) as usize;
|
||||
let size_in_elements =
|
||||
(size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize;
|
||||
state.push_constants[offset_in_elements..][..size_in_elements]
|
||||
.copy_from_slice(data_slice);
|
||||
},
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::Dispatch(groups) => {
|
||||
let scope = PassErrorScope::Dispatch { indirect: false };
|
||||
dispatch(&mut state, groups).map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::DispatchIndirect { buffer, offset } => {
|
||||
let scope = PassErrorScope::Dispatch { indirect: true };
|
||||
dispatch_indirect(&mut state, cmd_enc.as_ref(), buffer, offset)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::PushDebugGroup { color: _, len } => {
|
||||
pass::push_debug_group(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcComputeCommand::PopDebugGroup => {
|
||||
let scope = PassErrorScope::PopDebugGroup;
|
||||
pass::pop_debug_group::<ComputePassErrorInner>(&mut state.pass)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::InsertDebugMarker { color: _, len } => {
|
||||
pass::insert_debug_marker(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcComputeCommand::WriteTimestamp {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
let scope = PassErrorScope::WriteTimestamp;
|
||||
pass::write_timestamp::<ComputePassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
None,
|
||||
query_set,
|
||||
query_index,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::BeginPipelineStatisticsQuery {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
let scope = PassErrorScope::BeginPipelineStatisticsQuery;
|
||||
validate_and_begin_pipeline_statistics_query(
|
||||
query_set,
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.pass.base.tracker.query_sets,
|
||||
cmd_enc.as_ref(),
|
||||
query_index,
|
||||
None,
|
||||
&mut state.active_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::EndPipelineStatisticsQuery => {
|
||||
let scope = PassErrorScope::EndPipelineStatisticsQuery;
|
||||
end_pipeline_statistics_query(
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.active_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
.raw_encoder
|
||||
.reset_queries(query_set.raw(), range);
|
||||
}
|
||||
}
|
||||
|
||||
if *state.pass.base.debug_scope_depth > 0 {
|
||||
Err(
|
||||
ComputePassErrorInner::DebugGroupError(DebugGroupError::MissingPop)
|
||||
.map_pass_err(pass_scope),
|
||||
)?;
|
||||
}
|
||||
Some(hal::PassTimestampWrites {
|
||||
query_set: query_set.raw(),
|
||||
beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
|
||||
end_of_pass_write_index: tw.end_of_pass_write_index,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
unsafe {
|
||||
state.pass.base.raw_encoder.end_compute_pass();
|
||||
}
|
||||
let hal_desc = hal::ComputePassDescriptor {
|
||||
label: hal_label(base.label.as_deref(), device.instance_flags),
|
||||
timestamp_writes,
|
||||
};
|
||||
|
||||
let State {
|
||||
pass:
|
||||
pass::PassState {
|
||||
base: EncodingState { tracker, .. },
|
||||
pending_discard_init_fixups,
|
||||
..
|
||||
},
|
||||
intermediate_trackers,
|
||||
..
|
||||
} = state;
|
||||
|
||||
// Stop the current command encoder.
|
||||
encoder.close().map_pass_err(pass_scope)?;
|
||||
|
||||
// Create a new command encoder, which we will insert _before_ the body of the compute pass.
|
||||
//
|
||||
// Use that buffer to insert barriers and clear discarded images.
|
||||
let transit = encoder
|
||||
.open_pass(hal_label(
|
||||
Some("(wgpu internal) Pre Pass"),
|
||||
self.instance.flags,
|
||||
))
|
||||
.map_pass_err(pass_scope)?;
|
||||
fixup_discarded_surfaces(
|
||||
pending_discard_init_fixups.into_iter(),
|
||||
transit,
|
||||
&mut tracker.textures,
|
||||
device,
|
||||
&snatch_guard,
|
||||
);
|
||||
CommandEncoder::insert_barriers_from_tracker(
|
||||
transit,
|
||||
tracker,
|
||||
&intermediate_trackers,
|
||||
&snatch_guard,
|
||||
);
|
||||
// Close the command encoder, and swap it with the previous.
|
||||
encoder.close_and_swap().map_pass_err(pass_scope)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
unsafe {
|
||||
state.pass.base.raw_encoder.begin_compute_pass(&hal_desc);
|
||||
}
|
||||
|
||||
for command in base.commands.drain(..) {
|
||||
match command {
|
||||
ArcComputeCommand::SetBindGroup {
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetBindGroup;
|
||||
pass::set_bind_group::<ComputePassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
&base.dynamic_offsets,
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
false,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::SetPipeline(pipeline) => {
|
||||
let scope = PassErrorScope::SetPipelineCompute;
|
||||
set_pipeline(&mut state, cmd_enc.as_ref(), pipeline).map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::SetPushConstant {
|
||||
offset,
|
||||
size_bytes,
|
||||
values_offset,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetPushConstant;
|
||||
pass::set_push_constant::<ComputePassErrorInner, _>(
|
||||
&mut state.pass,
|
||||
&base.push_constant_data,
|
||||
wgt::ShaderStages::COMPUTE,
|
||||
offset,
|
||||
size_bytes,
|
||||
Some(values_offset),
|
||||
|data_slice| {
|
||||
let offset_in_elements = (offset / wgt::PUSH_CONSTANT_ALIGNMENT) as usize;
|
||||
let size_in_elements = (size_bytes / wgt::PUSH_CONSTANT_ALIGNMENT) as usize;
|
||||
state.push_constants[offset_in_elements..][..size_in_elements]
|
||||
.copy_from_slice(data_slice);
|
||||
},
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::Dispatch(groups) => {
|
||||
let scope = PassErrorScope::Dispatch { indirect: false };
|
||||
dispatch(&mut state, groups).map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::DispatchIndirect { buffer, offset } => {
|
||||
let scope = PassErrorScope::Dispatch { indirect: true };
|
||||
dispatch_indirect(&mut state, cmd_enc.as_ref(), buffer, offset)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::PushDebugGroup { color: _, len } => {
|
||||
pass::push_debug_group(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcComputeCommand::PopDebugGroup => {
|
||||
let scope = PassErrorScope::PopDebugGroup;
|
||||
pass::pop_debug_group::<ComputePassErrorInner>(&mut state.pass)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::InsertDebugMarker { color: _, len } => {
|
||||
pass::insert_debug_marker(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcComputeCommand::WriteTimestamp {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
let scope = PassErrorScope::WriteTimestamp;
|
||||
pass::write_timestamp::<ComputePassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
None,
|
||||
query_set,
|
||||
query_index,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::BeginPipelineStatisticsQuery {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
let scope = PassErrorScope::BeginPipelineStatisticsQuery;
|
||||
validate_and_begin_pipeline_statistics_query(
|
||||
query_set,
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.pass.base.tracker.query_sets,
|
||||
cmd_enc.as_ref(),
|
||||
query_index,
|
||||
None,
|
||||
&mut state.active_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcComputeCommand::EndPipelineStatisticsQuery => {
|
||||
let scope = PassErrorScope::EndPipelineStatisticsQuery;
|
||||
end_pipeline_statistics_query(state.pass.base.raw_encoder, &mut state.active_query)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if *state.pass.base.debug_scope_depth > 0 {
|
||||
Err(
|
||||
ComputePassErrorInner::DebugGroupError(DebugGroupError::MissingPop)
|
||||
.map_pass_err(pass_scope),
|
||||
)?;
|
||||
}
|
||||
|
||||
unsafe {
|
||||
state.pass.base.raw_encoder.end_compute_pass();
|
||||
}
|
||||
|
||||
let State {
|
||||
pass:
|
||||
pass::PassState {
|
||||
base: EncodingState { tracker, .. },
|
||||
pending_discard_init_fixups,
|
||||
..
|
||||
},
|
||||
intermediate_trackers,
|
||||
..
|
||||
} = state;
|
||||
|
||||
// Stop the current command encoder.
|
||||
encoder.close().map_pass_err(pass_scope)?;
|
||||
|
||||
// Create a new command encoder, which we will insert _before_ the body of the compute pass.
|
||||
//
|
||||
// Use that buffer to insert barriers and clear discarded images.
|
||||
let transit = encoder
|
||||
.open_pass(hal_label(
|
||||
Some("(wgpu internal) Pre Pass"),
|
||||
device.instance_flags,
|
||||
))
|
||||
.map_pass_err(pass_scope)?;
|
||||
fixup_discarded_surfaces(
|
||||
pending_discard_init_fixups.into_iter(),
|
||||
transit,
|
||||
&mut tracker.textures,
|
||||
device,
|
||||
&snatch_guard,
|
||||
);
|
||||
CommandEncoder::insert_barriers_from_tracker(
|
||||
transit,
|
||||
tracker,
|
||||
&intermediate_trackers,
|
||||
&snatch_guard,
|
||||
);
|
||||
// Close the command encoder, and swap it with the previous.
|
||||
encoder.close_and_swap().map_pass_err(pass_scope)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_pipeline(
|
||||
|
||||
@@ -15,7 +15,7 @@ use crate::{
|
||||
FastHashMap,
|
||||
};
|
||||
|
||||
use super::{clear::clear_texture, BakedCommands, ClearError};
|
||||
use super::{clear_texture, BakedCommands, ClearError};
|
||||
|
||||
/// Surface that was discarded by `StoreOp::Discard` of a preceding renderpass.
|
||||
/// Any read access to this surface needs to be preceded by a texture initialization.
|
||||
|
||||
@@ -1230,27 +1230,7 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), CommandEncoderError> {
|
||||
cmd_buf_data.debug_scope_depth += 1;
|
||||
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::PushDebugGroup(label.to_owned()));
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
if !cmd_enc
|
||||
.device
|
||||
.instance_flags
|
||||
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
|
||||
{
|
||||
unsafe {
|
||||
cmd_buf_raw.begin_debug_marker(label);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
push_debug_group(cmd_buf_data, &cmd_enc, label)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1267,25 +1247,7 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), CommandEncoderError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::InsertDebugMarker(label.to_owned()));
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
if !cmd_enc
|
||||
.device
|
||||
.instance_flags
|
||||
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
|
||||
{
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
unsafe {
|
||||
cmd_buf_raw.insert_debug_marker(label);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
insert_debug_marker(cmd_buf_data, &cmd_enc, label)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1301,30 +1263,7 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), CommandEncoderError> {
|
||||
if cmd_buf_data.debug_scope_depth == 0 {
|
||||
return Err(DebugGroupError::InvalidPop.into());
|
||||
}
|
||||
cmd_buf_data.debug_scope_depth -= 1;
|
||||
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::PopDebugGroup);
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
if !cmd_enc
|
||||
.device
|
||||
.instance_flags
|
||||
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
|
||||
{
|
||||
unsafe {
|
||||
cmd_buf_raw.end_debug_marker();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
pop_debug_group(cmd_buf_data, &cmd_enc)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1380,6 +1319,90 @@ impl Global {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn push_debug_group(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
label: &str,
|
||||
) -> Result<(), CommandEncoderError> {
|
||||
cmd_buf_data.debug_scope_depth += 1;
|
||||
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::PushDebugGroup(label.to_owned()));
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
if !cmd_enc
|
||||
.device
|
||||
.instance_flags
|
||||
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
|
||||
{
|
||||
unsafe {
|
||||
cmd_buf_raw.begin_debug_marker(label);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn insert_debug_marker(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
label: &str,
|
||||
) -> Result<(), CommandEncoderError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::InsertDebugMarker(label.to_owned()));
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
if !cmd_enc
|
||||
.device
|
||||
.instance_flags
|
||||
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
|
||||
{
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
unsafe {
|
||||
cmd_buf_raw.insert_debug_marker(label);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn pop_debug_group(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
) -> Result<(), CommandEncoderError> {
|
||||
if cmd_buf_data.debug_scope_depth == 0 {
|
||||
return Err(DebugGroupError::InvalidPop.into());
|
||||
}
|
||||
cmd_buf_data.debug_scope_depth -= 1;
|
||||
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::PopDebugGroup);
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
if !cmd_enc
|
||||
.device
|
||||
.instance_flags
|
||||
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
|
||||
{
|
||||
unsafe {
|
||||
cmd_buf_raw.end_debug_marker();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn push_constant_clear<PushFn>(offset: u32, size_bytes: u32, mut push_fn: PushFn)
|
||||
where
|
||||
PushFn: FnMut(u32, &[u32]),
|
||||
|
||||
@@ -4,7 +4,7 @@ use core::{iter, mem};
|
||||
#[cfg(feature = "trace")]
|
||||
use crate::command::Command as TraceCommand;
|
||||
use crate::{
|
||||
command::{CommandEncoder, EncoderStateError},
|
||||
command::{CommandBufferMutable, CommandEncoder, EncoderStateError},
|
||||
device::{DeviceError, MissingFeatures},
|
||||
global::Global,
|
||||
id,
|
||||
@@ -366,30 +366,7 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(command_encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), QueryError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::WriteTimestamp {
|
||||
query_set_id,
|
||||
query_index,
|
||||
});
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
cmd_enc
|
||||
.device
|
||||
.require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_ENCODERS)?;
|
||||
|
||||
let raw_encoder = cmd_buf_data.encoder.open()?;
|
||||
|
||||
let query_set = hub.query_sets.get(query_set_id).get()?;
|
||||
query_set.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
query_set.validate_and_write_timestamp(raw_encoder, query_index, None)?;
|
||||
|
||||
cmd_buf_data.trackers.query_sets.insert_single(query_set);
|
||||
|
||||
Ok(())
|
||||
write_timestamp(cmd_buf_data, hub, &cmd_enc, query_set_id, query_index)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -407,129 +384,182 @@ impl Global {
|
||||
let cmd_enc = hub.command_encoders.get(command_encoder_id);
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| -> Result<(), QueryError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::ResolveQuerySet {
|
||||
query_set_id,
|
||||
start_query,
|
||||
query_count,
|
||||
destination,
|
||||
destination_offset,
|
||||
});
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
if destination_offset % wgt::QUERY_RESOLVE_BUFFER_ALIGNMENT != 0 {
|
||||
return Err(QueryError::Resolve(ResolveError::BufferOffsetAlignment));
|
||||
}
|
||||
|
||||
let query_set = hub.query_sets.get(query_set_id).get()?;
|
||||
|
||||
query_set.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
let dst_buffer = hub.buffers.get(destination).get()?;
|
||||
|
||||
dst_buffer.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
let snatch_guard = dst_buffer.device.snatchable_lock.read();
|
||||
dst_buffer.check_destroyed(&snatch_guard)?;
|
||||
|
||||
let dst_pending = cmd_buf_data
|
||||
.trackers
|
||||
.buffers
|
||||
.set_single(&dst_buffer, wgt::BufferUses::COPY_DST);
|
||||
|
||||
let dst_barrier =
|
||||
dst_pending.map(|pending| pending.into_hal(&dst_buffer, &snatch_guard));
|
||||
|
||||
dst_buffer
|
||||
.check_usage(wgt::BufferUsages::QUERY_RESOLVE)
|
||||
.map_err(ResolveError::MissingBufferUsage)?;
|
||||
|
||||
let end_query = u64::from(start_query)
|
||||
.checked_add(u64::from(query_count))
|
||||
.expect("`u64` overflow from adding two `u32`s, should be unreachable");
|
||||
if end_query > u64::from(query_set.desc.count) {
|
||||
return Err(ResolveError::QueryOverrun {
|
||||
start_query,
|
||||
end_query,
|
||||
query_set_size: query_set.desc.count,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let end_query = u32::try_from(end_query)
|
||||
.expect("`u32` overflow for `end_query`, which should be `u32`");
|
||||
|
||||
let elements_per_query = match query_set.desc.ty {
|
||||
wgt::QueryType::Occlusion => 1,
|
||||
wgt::QueryType::PipelineStatistics(ps) => ps.bits().count_ones(),
|
||||
wgt::QueryType::Timestamp => 1,
|
||||
};
|
||||
let stride = elements_per_query * wgt::QUERY_SIZE;
|
||||
let bytes_used: BufferAddress = u64::from(stride)
|
||||
.checked_mul(u64::from(query_count))
|
||||
.expect("`stride` * `query_count` overflowed `u32`, should be unreachable");
|
||||
|
||||
let buffer_start_offset = destination_offset;
|
||||
let buffer_end_offset = buffer_start_offset
|
||||
.checked_add(bytes_used)
|
||||
.filter(|buffer_end_offset| *buffer_end_offset <= dst_buffer.size)
|
||||
.ok_or(ResolveError::BufferOverrun {
|
||||
start_query,
|
||||
end_query,
|
||||
stride,
|
||||
buffer_size: dst_buffer.size,
|
||||
buffer_start_offset,
|
||||
bytes_used,
|
||||
})?;
|
||||
|
||||
// TODO(https://github.com/gfx-rs/wgpu/issues/3993): Need to track initialization state.
|
||||
cmd_buf_data.buffer_memory_init_actions.extend(
|
||||
dst_buffer.initialization_status.read().create_action(
|
||||
&dst_buffer,
|
||||
buffer_start_offset..buffer_end_offset,
|
||||
MemoryInitKind::ImplicitlyInitialized,
|
||||
),
|
||||
);
|
||||
|
||||
let raw_dst_buffer = dst_buffer.try_raw(&snatch_guard)?;
|
||||
let raw_encoder = cmd_buf_data.encoder.open()?;
|
||||
unsafe {
|
||||
raw_encoder.transition_buffers(dst_barrier.as_slice());
|
||||
raw_encoder.copy_query_results(
|
||||
query_set.raw(),
|
||||
start_query..end_query,
|
||||
raw_dst_buffer,
|
||||
destination_offset,
|
||||
wgt::BufferSize::new_unchecked(stride as u64),
|
||||
);
|
||||
}
|
||||
|
||||
if matches!(query_set.desc.ty, wgt::QueryType::Timestamp) {
|
||||
// Timestamp normalization is only needed for timestamps.
|
||||
cmd_enc
|
||||
.device
|
||||
.timestamp_normalizer
|
||||
.get()
|
||||
.unwrap()
|
||||
.normalize(
|
||||
&snatch_guard,
|
||||
raw_encoder,
|
||||
&mut cmd_buf_data.trackers.buffers,
|
||||
dst_buffer
|
||||
.timestamp_normalization_bind_group
|
||||
.get(&snatch_guard)
|
||||
.unwrap(),
|
||||
&dst_buffer,
|
||||
destination_offset,
|
||||
query_count,
|
||||
);
|
||||
}
|
||||
|
||||
cmd_buf_data.trackers.query_sets.insert_single(query_set);
|
||||
|
||||
Ok(())
|
||||
resolve_query_set(
|
||||
cmd_buf_data,
|
||||
hub,
|
||||
&cmd_enc,
|
||||
query_set_id,
|
||||
start_query,
|
||||
query_count,
|
||||
destination,
|
||||
destination_offset,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn write_timestamp(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
hub: &crate::hub::Hub,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
query_set_id: id::QuerySetId,
|
||||
query_index: u32,
|
||||
) -> Result<(), QueryError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::WriteTimestamp {
|
||||
query_set_id,
|
||||
query_index,
|
||||
});
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
cmd_enc
|
||||
.device
|
||||
.require_features(wgt::Features::TIMESTAMP_QUERY_INSIDE_ENCODERS)?;
|
||||
|
||||
let raw_encoder = cmd_buf_data.encoder.open()?;
|
||||
|
||||
let query_set = hub.query_sets.get(query_set_id).get()?;
|
||||
query_set.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
query_set.validate_and_write_timestamp(raw_encoder, query_index, None)?;
|
||||
|
||||
cmd_buf_data.trackers.query_sets.insert_single(query_set);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(super) fn resolve_query_set(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
hub: &crate::hub::Hub,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
query_set_id: id::QuerySetId,
|
||||
start_query: u32,
|
||||
query_count: u32,
|
||||
destination: id::BufferId,
|
||||
destination_offset: BufferAddress,
|
||||
) -> Result<(), QueryError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(TraceCommand::ResolveQuerySet {
|
||||
query_set_id,
|
||||
start_query,
|
||||
query_count,
|
||||
destination,
|
||||
destination_offset,
|
||||
});
|
||||
}
|
||||
|
||||
cmd_enc.device.check_is_valid()?;
|
||||
|
||||
if destination_offset % wgt::QUERY_RESOLVE_BUFFER_ALIGNMENT != 0 {
|
||||
return Err(QueryError::Resolve(ResolveError::BufferOffsetAlignment));
|
||||
}
|
||||
|
||||
let query_set = hub.query_sets.get(query_set_id).get()?;
|
||||
|
||||
query_set.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
let dst_buffer = hub.buffers.get(destination).get()?;
|
||||
|
||||
dst_buffer.same_device_as(cmd_enc.as_ref())?;
|
||||
|
||||
let snatch_guard = dst_buffer.device.snatchable_lock.read();
|
||||
dst_buffer.check_destroyed(&snatch_guard)?;
|
||||
|
||||
let dst_pending = cmd_buf_data
|
||||
.trackers
|
||||
.buffers
|
||||
.set_single(&dst_buffer, wgt::BufferUses::COPY_DST);
|
||||
let dst_barrier = dst_pending.map(|pending| pending.into_hal(&dst_buffer, &snatch_guard));
|
||||
|
||||
dst_buffer
|
||||
.check_usage(wgt::BufferUsages::QUERY_RESOLVE)
|
||||
.map_err(ResolveError::MissingBufferUsage)?;
|
||||
|
||||
let end_query = u64::from(start_query)
|
||||
.checked_add(u64::from(query_count))
|
||||
.expect("`u64` overflow from adding two `u32`s, should be unreachable");
|
||||
if end_query > u64::from(query_set.desc.count) {
|
||||
return Err(ResolveError::QueryOverrun {
|
||||
start_query,
|
||||
end_query,
|
||||
query_set_size: query_set.desc.count,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let end_query =
|
||||
u32::try_from(end_query).expect("`u32` overflow for `end_query`, which should be `u32`");
|
||||
|
||||
let elements_per_query = match query_set.desc.ty {
|
||||
wgt::QueryType::Occlusion => 1,
|
||||
wgt::QueryType::PipelineStatistics(ps) => ps.bits().count_ones(),
|
||||
wgt::QueryType::Timestamp => 1,
|
||||
};
|
||||
let stride = elements_per_query * wgt::QUERY_SIZE;
|
||||
let bytes_used: BufferAddress = u64::from(stride)
|
||||
.checked_mul(u64::from(query_count))
|
||||
.expect("`stride` * `query_count` overflowed `u32`, should be unreachable");
|
||||
|
||||
let buffer_start_offset = destination_offset;
|
||||
let buffer_end_offset = buffer_start_offset
|
||||
.checked_add(bytes_used)
|
||||
.filter(|buffer_end_offset| *buffer_end_offset <= dst_buffer.size)
|
||||
.ok_or(ResolveError::BufferOverrun {
|
||||
start_query,
|
||||
end_query,
|
||||
stride,
|
||||
buffer_size: dst_buffer.size,
|
||||
buffer_start_offset,
|
||||
bytes_used,
|
||||
})?;
|
||||
|
||||
// TODO(https://github.com/gfx-rs/wgpu/issues/3993): Need to track initialization state.
|
||||
cmd_buf_data.buffer_memory_init_actions.extend(
|
||||
dst_buffer.initialization_status.read().create_action(
|
||||
&dst_buffer,
|
||||
buffer_start_offset..buffer_end_offset,
|
||||
MemoryInitKind::ImplicitlyInitialized,
|
||||
),
|
||||
);
|
||||
|
||||
let raw_dst_buffer = dst_buffer.try_raw(&snatch_guard)?;
|
||||
let raw_encoder = cmd_buf_data.encoder.open()?;
|
||||
unsafe {
|
||||
raw_encoder.transition_buffers(dst_barrier.as_slice());
|
||||
raw_encoder.copy_query_results(
|
||||
query_set.raw(),
|
||||
start_query..end_query,
|
||||
raw_dst_buffer,
|
||||
destination_offset,
|
||||
wgt::BufferSize::new_unchecked(stride as u64),
|
||||
);
|
||||
}
|
||||
|
||||
if matches!(query_set.desc.ty, wgt::QueryType::Timestamp) {
|
||||
// Timestamp normalization is only needed for timestamps.
|
||||
cmd_enc
|
||||
.device
|
||||
.timestamp_normalizer
|
||||
.get()
|
||||
.unwrap()
|
||||
.normalize(
|
||||
&snatch_guard,
|
||||
raw_encoder,
|
||||
&mut cmd_buf_data.trackers.buffers,
|
||||
dst_buffer
|
||||
.timestamp_normalization_bind_group
|
||||
.get(&snatch_guard)
|
||||
.unwrap(),
|
||||
&dst_buffer,
|
||||
destination_offset,
|
||||
query_count,
|
||||
);
|
||||
}
|
||||
|
||||
cmd_buf_data.trackers.query_sets.insert_single(query_set);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ use core::{
|
||||
|
||||
use wgt::{math::align_to, BufferUsages, BufferUses, Features};
|
||||
|
||||
use crate::ray_tracing::{AsAction, AsBuild, TlasBuild, ValidateAsActionsError};
|
||||
use crate::{
|
||||
command::CommandBufferMutable,
|
||||
device::queue::TempResource,
|
||||
@@ -25,6 +24,10 @@ use crate::{
|
||||
snatch::SnatchGuard,
|
||||
track::PendingTransition,
|
||||
};
|
||||
use crate::{
|
||||
command::CommandEncoder,
|
||||
ray_tracing::{AsAction, AsBuild, TlasBuild, ValidateAsActionsError},
|
||||
};
|
||||
use crate::{command::EncoderStateError, device::resource::CommandIndices};
|
||||
use crate::{lock::RwLockWriteGuard, resource::RawResourceAccess};
|
||||
|
||||
@@ -109,8 +112,6 @@ impl Global {
|
||||
|
||||
let cmd_enc = hub.command_encoders.get(command_encoder_id);
|
||||
|
||||
let mut build_command = AsBuild::default();
|
||||
|
||||
let trace_blas: Vec<TraceBlasBuildEntry> = blas_iter
|
||||
.map(|blas_entry| {
|
||||
let geometries = match blas_entry.geometries {
|
||||
@@ -159,347 +160,369 @@ impl Global {
|
||||
})
|
||||
.collect();
|
||||
|
||||
let blas_iter = trace_blas.iter().map(|blas_entry| {
|
||||
let geometries = match &blas_entry.geometries {
|
||||
TraceBlasGeometries::TriangleGeometries(triangle_geometries) => {
|
||||
let iter = triangle_geometries.iter().map(|tg| BlasTriangleGeometry {
|
||||
size: &tg.size,
|
||||
vertex_buffer: tg.vertex_buffer,
|
||||
index_buffer: tg.index_buffer,
|
||||
transform_buffer: tg.transform_buffer,
|
||||
first_vertex: tg.first_vertex,
|
||||
vertex_stride: tg.vertex_stride,
|
||||
first_index: tg.first_index,
|
||||
transform_buffer_offset: tg.transform_buffer_offset,
|
||||
});
|
||||
BlasGeometries::TriangleGeometries(Box::new(iter))
|
||||
}
|
||||
};
|
||||
BlasBuildEntry {
|
||||
blas_id: blas_entry.blas_id,
|
||||
geometries,
|
||||
}
|
||||
});
|
||||
|
||||
let tlas_iter = trace_tlas.iter().map(|tlas_package| {
|
||||
let instances = tlas_package.instances.iter().map(|instance| {
|
||||
instance.as_ref().map(|instance| TlasInstance {
|
||||
blas_id: instance.blas_id,
|
||||
transform: &instance.transform,
|
||||
custom_data: instance.custom_data,
|
||||
mask: instance.mask,
|
||||
})
|
||||
});
|
||||
TlasPackage {
|
||||
tlas_id: tlas_package.tlas_id,
|
||||
instances: Box::new(instances),
|
||||
lowest_unmodified: tlas_package.lowest_unmodified,
|
||||
}
|
||||
});
|
||||
|
||||
let mut cmd_buf_data = cmd_enc.data.lock();
|
||||
cmd_buf_data.record_with(|cmd_buf_data| {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(crate::command::Command::BuildAccelerationStructures {
|
||||
blas: trace_blas.clone(),
|
||||
tlas: trace_tlas.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid()?;
|
||||
device.require_features(Features::EXPERIMENTAL_RAY_QUERY)?;
|
||||
|
||||
let mut buf_storage = Vec::new();
|
||||
iter_blas(
|
||||
blas_iter,
|
||||
cmd_buf_data,
|
||||
&mut build_command,
|
||||
&mut buf_storage,
|
||||
hub,
|
||||
)?;
|
||||
|
||||
let snatch_guard = device.snatchable_lock.read();
|
||||
let mut input_barriers = Vec::<hal::BufferBarrier<dyn hal::DynBuffer>>::new();
|
||||
let mut scratch_buffer_blas_size = 0;
|
||||
let mut blas_storage = Vec::new();
|
||||
iter_buffers(
|
||||
&mut buf_storage,
|
||||
&snatch_guard,
|
||||
&mut input_barriers,
|
||||
cmd_buf_data,
|
||||
&mut scratch_buffer_blas_size,
|
||||
&mut blas_storage,
|
||||
hub,
|
||||
device.alignments.ray_tracing_scratch_buffer_alignment,
|
||||
)?;
|
||||
let mut tlas_lock_store = Vec::<(Option<TlasPackage>, Arc<Tlas>)>::new();
|
||||
|
||||
for package in tlas_iter {
|
||||
let tlas = hub.tlas_s.get(package.tlas_id).get()?;
|
||||
|
||||
cmd_buf_data.trackers.tlas_s.insert_single(tlas.clone());
|
||||
|
||||
tlas_lock_store.push((Some(package), tlas))
|
||||
}
|
||||
|
||||
let mut scratch_buffer_tlas_size = 0;
|
||||
let mut tlas_storage = Vec::<TlasStore>::new();
|
||||
let mut instance_buffer_staging_source = Vec::<u8>::new();
|
||||
|
||||
for (package, tlas) in &mut tlas_lock_store {
|
||||
let package = package.take().unwrap();
|
||||
|
||||
let scratch_buffer_offset = scratch_buffer_tlas_size;
|
||||
scratch_buffer_tlas_size += align_to(
|
||||
tlas.size_info.build_scratch_size as u32,
|
||||
device.alignments.ray_tracing_scratch_buffer_alignment,
|
||||
) as u64;
|
||||
|
||||
let first_byte_index = instance_buffer_staging_source.len();
|
||||
|
||||
let mut dependencies = Vec::new();
|
||||
|
||||
let mut instance_count = 0;
|
||||
for instance in package.instances.flatten() {
|
||||
if instance.custom_data >= (1u32 << 24u32) {
|
||||
return Err(BuildAccelerationStructureError::TlasInvalidCustomIndex(
|
||||
tlas.error_ident(),
|
||||
));
|
||||
let blas_iter = trace_blas.iter().map(|blas_entry| {
|
||||
let geometries = match &blas_entry.geometries {
|
||||
TraceBlasGeometries::TriangleGeometries(triangle_geometries) => {
|
||||
let iter = triangle_geometries.iter().map(|tg| BlasTriangleGeometry {
|
||||
size: &tg.size,
|
||||
vertex_buffer: tg.vertex_buffer,
|
||||
index_buffer: tg.index_buffer,
|
||||
transform_buffer: tg.transform_buffer,
|
||||
first_vertex: tg.first_vertex,
|
||||
vertex_stride: tg.vertex_stride,
|
||||
first_index: tg.first_index,
|
||||
transform_buffer_offset: tg.transform_buffer_offset,
|
||||
});
|
||||
BlasGeometries::TriangleGeometries(Box::new(iter))
|
||||
}
|
||||
let blas = hub.blas_s.get(instance.blas_id).get()?;
|
||||
|
||||
cmd_buf_data.trackers.blas_s.insert_single(blas.clone());
|
||||
|
||||
instance_buffer_staging_source.extend(device.raw().tlas_instance_to_bytes(
|
||||
hal::TlasInstance {
|
||||
transform: *instance.transform,
|
||||
custom_data: instance.custom_data,
|
||||
mask: instance.mask,
|
||||
blas_address: blas.handle,
|
||||
},
|
||||
));
|
||||
|
||||
if tlas.flags.contains(
|
||||
wgpu_types::AccelerationStructureFlags::ALLOW_RAY_HIT_VERTEX_RETURN,
|
||||
) && !blas.flags.contains(
|
||||
wgpu_types::AccelerationStructureFlags::ALLOW_RAY_HIT_VERTEX_RETURN,
|
||||
) {
|
||||
return Err(
|
||||
BuildAccelerationStructureError::TlasDependentMissingVertexReturn(
|
||||
tlas.error_ident(),
|
||||
blas.error_ident(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
instance_count += 1;
|
||||
|
||||
dependencies.push(blas.clone());
|
||||
}
|
||||
|
||||
build_command.tlas_s_built.push(TlasBuild {
|
||||
tlas: tlas.clone(),
|
||||
dependencies,
|
||||
});
|
||||
|
||||
if instance_count > tlas.max_instance_count {
|
||||
return Err(BuildAccelerationStructureError::TlasInstanceCountExceeded(
|
||||
tlas.error_ident(),
|
||||
instance_count,
|
||||
tlas.max_instance_count,
|
||||
));
|
||||
}
|
||||
|
||||
tlas_storage.push(TlasStore {
|
||||
internal: UnsafeTlasStore {
|
||||
tlas: tlas.clone(),
|
||||
entries: hal::AccelerationStructureEntries::Instances(
|
||||
hal::AccelerationStructureInstances {
|
||||
buffer: Some(tlas.instance_buffer.as_ref()),
|
||||
offset: 0,
|
||||
count: instance_count,
|
||||
},
|
||||
),
|
||||
scratch_buffer_offset,
|
||||
},
|
||||
range: first_byte_index..instance_buffer_staging_source.len(),
|
||||
});
|
||||
}
|
||||
|
||||
let Some(scratch_size) =
|
||||
wgt::BufferSize::new(max(scratch_buffer_blas_size, scratch_buffer_tlas_size))
|
||||
else {
|
||||
// if the size is zero there is nothing to build
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let scratch_buffer = ScratchBuffer::new(device, scratch_size)?;
|
||||
|
||||
let scratch_buffer_barrier = hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: scratch_buffer.raw(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::ACCELERATION_STRUCTURE_SCRATCH,
|
||||
to: BufferUses::ACCELERATION_STRUCTURE_SCRATCH,
|
||||
},
|
||||
};
|
||||
|
||||
let mut tlas_descriptors = Vec::with_capacity(tlas_storage.len());
|
||||
|
||||
for &TlasStore {
|
||||
internal:
|
||||
UnsafeTlasStore {
|
||||
ref tlas,
|
||||
ref entries,
|
||||
ref scratch_buffer_offset,
|
||||
},
|
||||
..
|
||||
} in &tlas_storage
|
||||
{
|
||||
if tlas.update_mode == wgt::AccelerationStructureUpdateMode::PreferUpdate {
|
||||
log::info!("only rebuild implemented")
|
||||
}
|
||||
tlas_descriptors.push(hal::BuildAccelerationStructureDescriptor {
|
||||
entries,
|
||||
mode: hal::AccelerationStructureBuildMode::Build,
|
||||
flags: tlas.flags,
|
||||
source_acceleration_structure: None,
|
||||
destination_acceleration_structure: tlas.try_raw(&snatch_guard)?,
|
||||
scratch_buffer: scratch_buffer.raw(),
|
||||
scratch_buffer_offset: *scratch_buffer_offset,
|
||||
})
|
||||
}
|
||||
|
||||
let blas_present = !blas_storage.is_empty();
|
||||
let tlas_present = !tlas_storage.is_empty();
|
||||
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
|
||||
let mut blas_s_compactable = Vec::new();
|
||||
let mut descriptors = Vec::new();
|
||||
|
||||
for storage in &blas_storage {
|
||||
descriptors.push(map_blas(
|
||||
storage,
|
||||
scratch_buffer.raw(),
|
||||
&snatch_guard,
|
||||
&mut blas_s_compactable,
|
||||
)?);
|
||||
}
|
||||
|
||||
build_blas(
|
||||
cmd_buf_raw,
|
||||
blas_present,
|
||||
tlas_present,
|
||||
input_barriers,
|
||||
&descriptors,
|
||||
scratch_buffer_barrier,
|
||||
blas_s_compactable,
|
||||
);
|
||||
|
||||
if tlas_present {
|
||||
let staging_buffer = if !instance_buffer_staging_source.is_empty() {
|
||||
let mut staging_buffer = StagingBuffer::new(
|
||||
device,
|
||||
wgt::BufferSize::new(instance_buffer_staging_source.len() as u64).unwrap(),
|
||||
)?;
|
||||
staging_buffer.write(&instance_buffer_staging_source);
|
||||
let flushed = staging_buffer.flush();
|
||||
Some(flushed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
unsafe {
|
||||
if let Some(ref staging_buffer) = staging_buffer {
|
||||
cmd_buf_raw.transition_buffers(&[
|
||||
hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: staging_buffer.raw(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::MAP_WRITE,
|
||||
to: BufferUses::COPY_SRC,
|
||||
},
|
||||
},
|
||||
]);
|
||||
}
|
||||
BlasBuildEntry {
|
||||
blas_id: blas_entry.blas_id,
|
||||
geometries,
|
||||
}
|
||||
});
|
||||
|
||||
let mut instance_buffer_barriers = Vec::new();
|
||||
for &TlasStore {
|
||||
internal: UnsafeTlasStore { ref tlas, .. },
|
||||
ref range,
|
||||
} in &tlas_storage
|
||||
{
|
||||
let size = match wgt::BufferSize::new((range.end - range.start) as u64) {
|
||||
None => continue,
|
||||
Some(size) => size,
|
||||
};
|
||||
instance_buffer_barriers.push(hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: tlas.instance_buffer.as_ref(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::COPY_DST,
|
||||
to: BufferUses::TOP_LEVEL_ACCELERATION_STRUCTURE_INPUT,
|
||||
},
|
||||
});
|
||||
unsafe {
|
||||
cmd_buf_raw.transition_buffers(&[
|
||||
hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: tlas.instance_buffer.as_ref(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::TOP_LEVEL_ACCELERATION_STRUCTURE_INPUT,
|
||||
to: BufferUses::COPY_DST,
|
||||
},
|
||||
},
|
||||
]);
|
||||
let temp = hal::BufferCopy {
|
||||
src_offset: range.start as u64,
|
||||
dst_offset: 0,
|
||||
size,
|
||||
};
|
||||
cmd_buf_raw.copy_buffer_to_buffer(
|
||||
// the range whose size we just checked end is at (at that point in time) instance_buffer_staging_source.len()
|
||||
// and since instance_buffer_staging_source doesn't shrink we can un wrap this without a panic
|
||||
staging_buffer.as_ref().unwrap().raw(),
|
||||
tlas.instance_buffer.as_ref(),
|
||||
&[temp],
|
||||
);
|
||||
}
|
||||
let tlas_iter = trace_tlas.iter().map(|tlas_package| {
|
||||
let instances = tlas_package.instances.iter().map(|instance| {
|
||||
instance.as_ref().map(|instance| TlasInstance {
|
||||
blas_id: instance.blas_id,
|
||||
transform: &instance.transform,
|
||||
custom_data: instance.custom_data,
|
||||
mask: instance.mask,
|
||||
})
|
||||
});
|
||||
TlasPackage {
|
||||
tlas_id: tlas_package.tlas_id,
|
||||
instances: Box::new(instances),
|
||||
lowest_unmodified: tlas_package.lowest_unmodified,
|
||||
}
|
||||
});
|
||||
|
||||
unsafe {
|
||||
cmd_buf_raw.transition_buffers(&instance_buffer_barriers);
|
||||
|
||||
cmd_buf_raw.build_acceleration_structures(&tlas_descriptors);
|
||||
|
||||
cmd_buf_raw.place_acceleration_structure_barrier(
|
||||
hal::AccelerationStructureBarrier {
|
||||
usage: hal::StateTransition {
|
||||
from: hal::AccelerationStructureUses::BUILD_OUTPUT,
|
||||
to: hal::AccelerationStructureUses::SHADER_INPUT,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(staging_buffer) = staging_buffer {
|
||||
cmd_buf_data
|
||||
.temp_resources
|
||||
.push(TempResource::StagingBuffer(staging_buffer));
|
||||
}
|
||||
}
|
||||
|
||||
cmd_buf_data
|
||||
.temp_resources
|
||||
.push(TempResource::ScratchBuffer(scratch_buffer));
|
||||
|
||||
cmd_buf_data.as_actions.push(AsAction::Build(build_command));
|
||||
|
||||
Ok(())
|
||||
build_acceleration_structures(
|
||||
cmd_buf_data,
|
||||
hub,
|
||||
&cmd_enc,
|
||||
trace_blas.clone(),
|
||||
trace_tlas.clone(),
|
||||
blas_iter,
|
||||
tlas_iter,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_acceleration_structures<'a>(
|
||||
cmd_buf_data: &'a mut CommandBufferMutable,
|
||||
hub: &'a Hub,
|
||||
cmd_enc: &'a Arc<CommandEncoder>,
|
||||
trace_blas: Vec<TraceBlasBuildEntry>,
|
||||
trace_tlas: Vec<TraceTlasPackage>,
|
||||
blas_iter: impl Iterator<Item = BlasBuildEntry<'a>>,
|
||||
tlas_iter: impl Iterator<Item = TlasPackage<'a>>,
|
||||
) -> Result<(), BuildAccelerationStructureError> {
|
||||
#[cfg(feature = "trace")]
|
||||
if let Some(ref mut list) = cmd_buf_data.trace_commands {
|
||||
list.push(crate::command::Command::BuildAccelerationStructures {
|
||||
blas: trace_blas,
|
||||
tlas: trace_tlas,
|
||||
});
|
||||
}
|
||||
#[cfg(not(feature = "trace"))]
|
||||
{
|
||||
let _ = trace_blas;
|
||||
let _ = trace_tlas;
|
||||
}
|
||||
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid()?;
|
||||
device.require_features(Features::EXPERIMENTAL_RAY_QUERY)?;
|
||||
|
||||
let mut build_command = AsBuild::default();
|
||||
let mut buf_storage = Vec::new();
|
||||
iter_blas(
|
||||
blas_iter,
|
||||
cmd_buf_data,
|
||||
&mut build_command,
|
||||
&mut buf_storage,
|
||||
hub,
|
||||
)?;
|
||||
|
||||
let snatch_guard = device.snatchable_lock.read();
|
||||
let mut input_barriers = Vec::<hal::BufferBarrier<dyn hal::DynBuffer>>::new();
|
||||
let mut scratch_buffer_blas_size = 0;
|
||||
let mut blas_storage = Vec::new();
|
||||
iter_buffers(
|
||||
&mut buf_storage,
|
||||
&snatch_guard,
|
||||
&mut input_barriers,
|
||||
cmd_buf_data,
|
||||
&mut scratch_buffer_blas_size,
|
||||
&mut blas_storage,
|
||||
hub,
|
||||
device.alignments.ray_tracing_scratch_buffer_alignment,
|
||||
)?;
|
||||
let mut tlas_lock_store = Vec::<(Option<TlasPackage>, Arc<Tlas>)>::new();
|
||||
|
||||
for package in tlas_iter {
|
||||
let tlas = hub.tlas_s.get(package.tlas_id).get()?;
|
||||
|
||||
cmd_buf_data.trackers.tlas_s.insert_single(tlas.clone());
|
||||
|
||||
tlas_lock_store.push((Some(package), tlas))
|
||||
}
|
||||
|
||||
let mut scratch_buffer_tlas_size = 0;
|
||||
let mut tlas_storage = Vec::<TlasStore>::new();
|
||||
let mut instance_buffer_staging_source = Vec::<u8>::new();
|
||||
|
||||
for (package, tlas) in &mut tlas_lock_store {
|
||||
let package = package.take().unwrap();
|
||||
|
||||
let scratch_buffer_offset = scratch_buffer_tlas_size;
|
||||
scratch_buffer_tlas_size += align_to(
|
||||
tlas.size_info.build_scratch_size as u32,
|
||||
device.alignments.ray_tracing_scratch_buffer_alignment,
|
||||
) as u64;
|
||||
|
||||
let first_byte_index = instance_buffer_staging_source.len();
|
||||
|
||||
let mut dependencies = Vec::new();
|
||||
|
||||
let mut instance_count = 0;
|
||||
for instance in package.instances.flatten() {
|
||||
if instance.custom_data >= (1u32 << 24u32) {
|
||||
return Err(BuildAccelerationStructureError::TlasInvalidCustomIndex(
|
||||
tlas.error_ident(),
|
||||
));
|
||||
}
|
||||
let blas = hub.blas_s.get(instance.blas_id).get()?;
|
||||
|
||||
cmd_buf_data.trackers.blas_s.insert_single(blas.clone());
|
||||
|
||||
instance_buffer_staging_source.extend(device.raw().tlas_instance_to_bytes(
|
||||
hal::TlasInstance {
|
||||
transform: *instance.transform,
|
||||
custom_data: instance.custom_data,
|
||||
mask: instance.mask,
|
||||
blas_address: blas.handle,
|
||||
},
|
||||
));
|
||||
|
||||
if tlas
|
||||
.flags
|
||||
.contains(wgpu_types::AccelerationStructureFlags::ALLOW_RAY_HIT_VERTEX_RETURN)
|
||||
&& !blas
|
||||
.flags
|
||||
.contains(wgpu_types::AccelerationStructureFlags::ALLOW_RAY_HIT_VERTEX_RETURN)
|
||||
{
|
||||
return Err(
|
||||
BuildAccelerationStructureError::TlasDependentMissingVertexReturn(
|
||||
tlas.error_ident(),
|
||||
blas.error_ident(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
instance_count += 1;
|
||||
|
||||
dependencies.push(blas.clone());
|
||||
}
|
||||
|
||||
build_command.tlas_s_built.push(TlasBuild {
|
||||
tlas: tlas.clone(),
|
||||
dependencies,
|
||||
});
|
||||
|
||||
if instance_count > tlas.max_instance_count {
|
||||
return Err(BuildAccelerationStructureError::TlasInstanceCountExceeded(
|
||||
tlas.error_ident(),
|
||||
instance_count,
|
||||
tlas.max_instance_count,
|
||||
));
|
||||
}
|
||||
|
||||
tlas_storage.push(TlasStore {
|
||||
internal: UnsafeTlasStore {
|
||||
tlas: tlas.clone(),
|
||||
entries: hal::AccelerationStructureEntries::Instances(
|
||||
hal::AccelerationStructureInstances {
|
||||
buffer: Some(tlas.instance_buffer.as_ref()),
|
||||
offset: 0,
|
||||
count: instance_count,
|
||||
},
|
||||
),
|
||||
scratch_buffer_offset,
|
||||
},
|
||||
range: first_byte_index..instance_buffer_staging_source.len(),
|
||||
});
|
||||
}
|
||||
|
||||
let Some(scratch_size) =
|
||||
wgt::BufferSize::new(max(scratch_buffer_blas_size, scratch_buffer_tlas_size))
|
||||
else {
|
||||
// if the size is zero there is nothing to build
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let scratch_buffer = ScratchBuffer::new(device, scratch_size)?;
|
||||
|
||||
let scratch_buffer_barrier = hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: scratch_buffer.raw(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::ACCELERATION_STRUCTURE_SCRATCH,
|
||||
to: BufferUses::ACCELERATION_STRUCTURE_SCRATCH,
|
||||
},
|
||||
};
|
||||
|
||||
let mut tlas_descriptors = Vec::with_capacity(tlas_storage.len());
|
||||
|
||||
for &TlasStore {
|
||||
internal:
|
||||
UnsafeTlasStore {
|
||||
ref tlas,
|
||||
ref entries,
|
||||
ref scratch_buffer_offset,
|
||||
},
|
||||
..
|
||||
} in &tlas_storage
|
||||
{
|
||||
if tlas.update_mode == wgt::AccelerationStructureUpdateMode::PreferUpdate {
|
||||
log::info!("only rebuild implemented")
|
||||
}
|
||||
tlas_descriptors.push(hal::BuildAccelerationStructureDescriptor {
|
||||
entries,
|
||||
mode: hal::AccelerationStructureBuildMode::Build,
|
||||
flags: tlas.flags,
|
||||
source_acceleration_structure: None,
|
||||
destination_acceleration_structure: tlas.try_raw(&snatch_guard)?,
|
||||
scratch_buffer: scratch_buffer.raw(),
|
||||
scratch_buffer_offset: *scratch_buffer_offset,
|
||||
})
|
||||
}
|
||||
|
||||
let blas_present = !blas_storage.is_empty();
|
||||
let tlas_present = !tlas_storage.is_empty();
|
||||
|
||||
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
|
||||
|
||||
let mut blas_s_compactable = Vec::new();
|
||||
let mut descriptors = Vec::new();
|
||||
|
||||
for storage in &blas_storage {
|
||||
descriptors.push(map_blas(
|
||||
storage,
|
||||
scratch_buffer.raw(),
|
||||
&snatch_guard,
|
||||
&mut blas_s_compactable,
|
||||
)?);
|
||||
}
|
||||
|
||||
build_blas(
|
||||
cmd_buf_raw,
|
||||
blas_present,
|
||||
tlas_present,
|
||||
input_barriers,
|
||||
&descriptors,
|
||||
scratch_buffer_barrier,
|
||||
blas_s_compactable,
|
||||
);
|
||||
|
||||
if tlas_present {
|
||||
let staging_buffer = if !instance_buffer_staging_source.is_empty() {
|
||||
let mut staging_buffer = StagingBuffer::new(
|
||||
device,
|
||||
wgt::BufferSize::new(instance_buffer_staging_source.len() as u64).unwrap(),
|
||||
)?;
|
||||
staging_buffer.write(&instance_buffer_staging_source);
|
||||
let flushed = staging_buffer.flush();
|
||||
Some(flushed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
unsafe {
|
||||
if let Some(ref staging_buffer) = staging_buffer {
|
||||
cmd_buf_raw.transition_buffers(&[hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: staging_buffer.raw(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::MAP_WRITE,
|
||||
to: BufferUses::COPY_SRC,
|
||||
},
|
||||
}]);
|
||||
}
|
||||
}
|
||||
|
||||
let mut instance_buffer_barriers = Vec::new();
|
||||
for &TlasStore {
|
||||
internal: UnsafeTlasStore { ref tlas, .. },
|
||||
ref range,
|
||||
} in &tlas_storage
|
||||
{
|
||||
let size = match wgt::BufferSize::new((range.end - range.start) as u64) {
|
||||
None => continue,
|
||||
Some(size) => size,
|
||||
};
|
||||
instance_buffer_barriers.push(hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: tlas.instance_buffer.as_ref(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::COPY_DST,
|
||||
to: BufferUses::TOP_LEVEL_ACCELERATION_STRUCTURE_INPUT,
|
||||
},
|
||||
});
|
||||
unsafe {
|
||||
cmd_buf_raw.transition_buffers(&[hal::BufferBarrier::<dyn hal::DynBuffer> {
|
||||
buffer: tlas.instance_buffer.as_ref(),
|
||||
usage: hal::StateTransition {
|
||||
from: BufferUses::TOP_LEVEL_ACCELERATION_STRUCTURE_INPUT,
|
||||
to: BufferUses::COPY_DST,
|
||||
},
|
||||
}]);
|
||||
let temp = hal::BufferCopy {
|
||||
src_offset: range.start as u64,
|
||||
dst_offset: 0,
|
||||
size,
|
||||
};
|
||||
cmd_buf_raw.copy_buffer_to_buffer(
|
||||
// the range whose size we just checked end is at (at that point in time) instance_buffer_staging_source.len()
|
||||
// and since instance_buffer_staging_source doesn't shrink we can un wrap this without a panic
|
||||
staging_buffer.as_ref().unwrap().raw(),
|
||||
tlas.instance_buffer.as_ref(),
|
||||
&[temp],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
cmd_buf_raw.transition_buffers(&instance_buffer_barriers);
|
||||
|
||||
cmd_buf_raw.build_acceleration_structures(&tlas_descriptors);
|
||||
|
||||
cmd_buf_raw.place_acceleration_structure_barrier(hal::AccelerationStructureBarrier {
|
||||
usage: hal::StateTransition {
|
||||
from: hal::AccelerationStructureUses::BUILD_OUTPUT,
|
||||
to: hal::AccelerationStructureUses::SHADER_INPUT,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(staging_buffer) = staging_buffer {
|
||||
cmd_buf_data
|
||||
.temp_resources
|
||||
.push(TempResource::StagingBuffer(staging_buffer));
|
||||
}
|
||||
}
|
||||
|
||||
cmd_buf_data
|
||||
.temp_resources
|
||||
.push(TempResource::ScratchBuffer(scratch_buffer));
|
||||
|
||||
cmd_buf_data.as_actions.push(AsAction::Build(build_command));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl CommandBufferMutable {
|
||||
pub(crate) fn validate_acceleration_structure_actions(
|
||||
&self,
|
||||
|
||||
@@ -11,8 +11,8 @@ use wgt::{
|
||||
|
||||
use crate::command::{
|
||||
encoder::EncodingState, pass, pass_base, pass_try, validate_and_begin_occlusion_query,
|
||||
validate_and_begin_pipeline_statistics_query, DebugGroupError, EncoderStateError,
|
||||
InnerCommandEncoder, PassStateError, TimestampWritesError,
|
||||
validate_and_begin_pipeline_statistics_query, CommandBufferMutable, DebugGroupError,
|
||||
EncoderStateError, InnerCommandEncoder, PassStateError, TimestampWritesError,
|
||||
};
|
||||
use crate::pipeline::{RenderPipeline, VertexStep};
|
||||
use crate::resource::RawResourceAccess;
|
||||
@@ -1807,7 +1807,6 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn render_pass_end(&self, pass: &mut RenderPass) -> Result<(), EncoderStateError> {
|
||||
let pass_scope = PassErrorScope::Pass;
|
||||
profiling::scope!(
|
||||
"CommandEncoder::run_render_pass {}",
|
||||
pass.base.label.as_deref().unwrap_or("")
|
||||
@@ -1839,459 +1838,455 @@ impl Global {
|
||||
}
|
||||
|
||||
cmd_buf_data.unlock_and_record(|cmd_buf_data| -> Result<(), RenderPassError> {
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid().map_pass_err(pass_scope)?;
|
||||
let snatch_guard = &device.snatchable_lock.read();
|
||||
|
||||
let base = &mut pass.base;
|
||||
|
||||
let mut indirect_draw_validation_batcher =
|
||||
crate::indirect_validation::DrawBatcher::new();
|
||||
|
||||
let (scope, pending_discard_init_fixups, mut pending_query_resets) = {
|
||||
let encoder = &mut cmd_buf_data.encoder;
|
||||
let tracker = &mut cmd_buf_data.trackers;
|
||||
let buffer_memory_init_actions = &mut cmd_buf_data.buffer_memory_init_actions;
|
||||
let texture_memory_actions = &mut cmd_buf_data.texture_memory_actions;
|
||||
|
||||
// We automatically keep extending command buffers over time, and because
|
||||
// we want to insert a command buffer _before_ what we're about to record,
|
||||
// we need to make sure to close the previous one.
|
||||
encoder.close_if_open().map_pass_err(pass_scope)?;
|
||||
encoder
|
||||
.open_pass(base.label.as_deref())
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let mut pending_query_resets = QueryResetMap::new();
|
||||
let mut pending_discard_init_fixups = SurfacesInDiscardState::new();
|
||||
|
||||
let info = RenderPassInfo::start(
|
||||
device,
|
||||
hal_label(base.label.as_deref(), device.instance_flags),
|
||||
pass.color_attachments.take(),
|
||||
pass.depth_stencil_attachment.take(),
|
||||
pass.timestamp_writes.take(),
|
||||
// Still needed down the line.
|
||||
// TODO(wumpf): by restructuring the code, we could get rid of some of this Arc clone.
|
||||
pass.occlusion_query_set.clone(),
|
||||
encoder,
|
||||
tracker,
|
||||
texture_memory_actions,
|
||||
&mut pending_query_resets,
|
||||
&mut pending_discard_init_fixups,
|
||||
snatch_guard,
|
||||
)
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let indices = &device.tracker_indices;
|
||||
tracker.buffers.set_size(indices.buffers.size());
|
||||
tracker.textures.set_size(indices.textures.size());
|
||||
|
||||
let mut debug_scope_depth = 0;
|
||||
|
||||
let mut state = State {
|
||||
pipeline_flags: PipelineFlags::empty(),
|
||||
blend_constant: OptionalState::Unused,
|
||||
stencil_reference: 0,
|
||||
pipeline: None,
|
||||
index: IndexState::default(),
|
||||
vertex: VertexState::default(),
|
||||
|
||||
info,
|
||||
|
||||
pass: pass::PassState {
|
||||
base: EncodingState {
|
||||
device,
|
||||
raw_encoder: encoder.raw.as_mut(),
|
||||
tracker,
|
||||
buffer_memory_init_actions,
|
||||
texture_memory_actions,
|
||||
as_actions: &mut cmd_buf_data.as_actions,
|
||||
indirect_draw_validation_resources: &mut cmd_buf_data
|
||||
.indirect_draw_validation_resources,
|
||||
snatch_guard,
|
||||
debug_scope_depth: &mut debug_scope_depth,
|
||||
},
|
||||
pending_discard_init_fixups,
|
||||
scope: device.new_usage_scope(),
|
||||
binder: Binder::new(),
|
||||
|
||||
temp_offsets: Vec::new(),
|
||||
dynamic_offset_count: 0,
|
||||
|
||||
string_offset: 0,
|
||||
},
|
||||
|
||||
active_occlusion_query: None,
|
||||
active_pipeline_statistics_query: None,
|
||||
};
|
||||
|
||||
for command in base.commands.drain(..) {
|
||||
match command {
|
||||
ArcRenderCommand::SetBindGroup {
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetBindGroup;
|
||||
pass::set_bind_group::<RenderPassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
&base.dynamic_offsets,
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
true,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetPipeline(pipeline) => {
|
||||
let scope = PassErrorScope::SetPipelineRender;
|
||||
set_pipeline(&mut state, &cmd_enc, pipeline).map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetIndexBuffer {
|
||||
buffer,
|
||||
index_format,
|
||||
offset,
|
||||
size,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetIndexBuffer;
|
||||
set_index_buffer(
|
||||
&mut state,
|
||||
&cmd_enc,
|
||||
buffer,
|
||||
index_format,
|
||||
offset,
|
||||
size,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetVertexBuffer {
|
||||
slot,
|
||||
buffer,
|
||||
offset,
|
||||
size,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetVertexBuffer;
|
||||
set_vertex_buffer(&mut state, &cmd_enc, slot, buffer, offset, size)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetBlendConstant(ref color) => {
|
||||
set_blend_constant(&mut state, color);
|
||||
}
|
||||
ArcRenderCommand::SetStencilReference(value) => {
|
||||
set_stencil_reference(&mut state, value);
|
||||
}
|
||||
ArcRenderCommand::SetViewport {
|
||||
rect,
|
||||
depth_min,
|
||||
depth_max,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetViewport;
|
||||
set_viewport(&mut state, rect, depth_min, depth_max)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetPushConstant {
|
||||
stages,
|
||||
offset,
|
||||
size_bytes,
|
||||
values_offset,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetPushConstant;
|
||||
pass::set_push_constant::<RenderPassErrorInner, _>(
|
||||
&mut state.pass,
|
||||
&base.push_constant_data,
|
||||
stages,
|
||||
offset,
|
||||
size_bytes,
|
||||
values_offset,
|
||||
|_| {},
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetScissor(rect) => {
|
||||
let scope = PassErrorScope::SetScissorRect;
|
||||
set_scissor(&mut state, rect).map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::Draw {
|
||||
vertex_count,
|
||||
instance_count,
|
||||
first_vertex,
|
||||
first_instance,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::Draw,
|
||||
family: DrawCommandFamily::Draw,
|
||||
};
|
||||
draw(
|
||||
&mut state,
|
||||
vertex_count,
|
||||
instance_count,
|
||||
first_vertex,
|
||||
first_instance,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::DrawIndexed {
|
||||
index_count,
|
||||
instance_count,
|
||||
first_index,
|
||||
base_vertex,
|
||||
first_instance,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::Draw,
|
||||
family: DrawCommandFamily::DrawIndexed,
|
||||
};
|
||||
draw_indexed(
|
||||
&mut state,
|
||||
index_count,
|
||||
instance_count,
|
||||
first_index,
|
||||
base_vertex,
|
||||
first_instance,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::DrawMeshTasks {
|
||||
group_count_x,
|
||||
group_count_y,
|
||||
group_count_z,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::Draw,
|
||||
family: DrawCommandFamily::DrawMeshTasks,
|
||||
};
|
||||
draw_mesh_tasks(
|
||||
&mut state,
|
||||
group_count_x,
|
||||
group_count_y,
|
||||
group_count_z,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::DrawIndirect {
|
||||
buffer,
|
||||
offset,
|
||||
count,
|
||||
family,
|
||||
|
||||
vertex_or_index_limit: _,
|
||||
instance_limit: _,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: if count != 1 {
|
||||
DrawKind::MultiDrawIndirect
|
||||
} else {
|
||||
DrawKind::DrawIndirect
|
||||
},
|
||||
family,
|
||||
};
|
||||
multi_draw_indirect(
|
||||
&mut state,
|
||||
&mut indirect_draw_validation_batcher,
|
||||
&cmd_enc,
|
||||
buffer,
|
||||
offset,
|
||||
count,
|
||||
family,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::MultiDrawIndirectCount {
|
||||
buffer,
|
||||
offset,
|
||||
count_buffer,
|
||||
count_buffer_offset,
|
||||
max_count,
|
||||
family,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::MultiDrawIndirectCount,
|
||||
family,
|
||||
};
|
||||
multi_draw_indirect_count(
|
||||
&mut state,
|
||||
&cmd_enc,
|
||||
buffer,
|
||||
offset,
|
||||
count_buffer,
|
||||
count_buffer_offset,
|
||||
max_count,
|
||||
family,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::PushDebugGroup { color: _, len } => {
|
||||
pass::push_debug_group(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcRenderCommand::PopDebugGroup => {
|
||||
let scope = PassErrorScope::PopDebugGroup;
|
||||
pass::pop_debug_group::<RenderPassErrorInner>(&mut state.pass)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::InsertDebugMarker { color: _, len } => {
|
||||
pass::insert_debug_marker(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcRenderCommand::WriteTimestamp {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
let scope = PassErrorScope::WriteTimestamp;
|
||||
pass::write_timestamp::<RenderPassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
Some(&mut pending_query_resets),
|
||||
query_set,
|
||||
query_index,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::BeginOcclusionQuery { query_index } => {
|
||||
api_log!("RenderPass::begin_occlusion_query {query_index}");
|
||||
let scope = PassErrorScope::BeginOcclusionQuery;
|
||||
|
||||
let query_set = pass
|
||||
.occlusion_query_set
|
||||
.clone()
|
||||
.ok_or(RenderPassErrorInner::MissingOcclusionQuerySet)
|
||||
.map_pass_err(scope)?;
|
||||
|
||||
validate_and_begin_occlusion_query(
|
||||
query_set,
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.pass.base.tracker.query_sets,
|
||||
query_index,
|
||||
Some(&mut pending_query_resets),
|
||||
&mut state.active_occlusion_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::EndOcclusionQuery => {
|
||||
api_log!("RenderPass::end_occlusion_query");
|
||||
let scope = PassErrorScope::EndOcclusionQuery;
|
||||
|
||||
end_occlusion_query(
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.active_occlusion_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::BeginPipelineStatisticsQuery {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
api_log!(
|
||||
"RenderPass::begin_pipeline_statistics_query {query_index} {}",
|
||||
query_set.error_ident()
|
||||
);
|
||||
let scope = PassErrorScope::BeginPipelineStatisticsQuery;
|
||||
|
||||
validate_and_begin_pipeline_statistics_query(
|
||||
query_set,
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.pass.base.tracker.query_sets,
|
||||
cmd_enc.as_ref(),
|
||||
query_index,
|
||||
Some(&mut pending_query_resets),
|
||||
&mut state.active_pipeline_statistics_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::EndPipelineStatisticsQuery => {
|
||||
api_log!("RenderPass::end_pipeline_statistics_query");
|
||||
let scope = PassErrorScope::EndPipelineStatisticsQuery;
|
||||
|
||||
end_pipeline_statistics_query(
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.active_pipeline_statistics_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::ExecuteBundle(bundle) => {
|
||||
let scope = PassErrorScope::ExecuteBundle;
|
||||
execute_bundle(
|
||||
&mut state,
|
||||
&mut indirect_draw_validation_batcher,
|
||||
&cmd_enc,
|
||||
bundle,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if *state.pass.base.debug_scope_depth > 0 {
|
||||
Err(
|
||||
RenderPassErrorInner::DebugGroupError(DebugGroupError::MissingPop)
|
||||
.map_pass_err(pass_scope),
|
||||
)?;
|
||||
}
|
||||
|
||||
state
|
||||
.info
|
||||
.finish(
|
||||
device,
|
||||
state.pass.base.raw_encoder,
|
||||
state.pass.base.snatch_guard,
|
||||
&mut state.pass.scope,
|
||||
self.instance.flags,
|
||||
)
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let trackers = state.pass.scope;
|
||||
|
||||
let pending_discard_init_fixups = state.pass.pending_discard_init_fixups;
|
||||
|
||||
encoder.close().map_pass_err(pass_scope)?;
|
||||
(trackers, pending_discard_init_fixups, pending_query_resets)
|
||||
};
|
||||
|
||||
let encoder = &mut cmd_buf_data.encoder;
|
||||
let tracker = &mut cmd_buf_data.trackers;
|
||||
|
||||
{
|
||||
let transit = encoder
|
||||
.open_pass(hal_label(
|
||||
Some("(wgpu internal) Pre Pass"),
|
||||
self.instance.flags,
|
||||
))
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
fixup_discarded_surfaces(
|
||||
pending_discard_init_fixups.into_iter(),
|
||||
transit,
|
||||
&mut tracker.textures,
|
||||
&cmd_enc.device,
|
||||
snatch_guard,
|
||||
);
|
||||
|
||||
pending_query_resets.reset_queries(transit);
|
||||
|
||||
CommandEncoder::insert_barriers_from_scope(transit, tracker, &scope, snatch_guard);
|
||||
|
||||
if let Some(ref indirect_validation) = device.indirect_validation {
|
||||
indirect_validation
|
||||
.draw
|
||||
.inject_validation_pass(
|
||||
device,
|
||||
snatch_guard,
|
||||
&mut cmd_buf_data.indirect_draw_validation_resources,
|
||||
&mut cmd_buf_data.temp_resources,
|
||||
transit,
|
||||
indirect_draw_validation_batcher,
|
||||
)
|
||||
.map_pass_err(pass_scope)?;
|
||||
}
|
||||
}
|
||||
|
||||
encoder.close_and_swap().map_pass_err(pass_scope)?;
|
||||
|
||||
Ok(())
|
||||
encode_render_pass(cmd_buf_data, &cmd_enc, pass)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_render_pass(
|
||||
cmd_buf_data: &mut CommandBufferMutable,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
pass: &mut RenderPass,
|
||||
) -> Result<(), RenderPassError> {
|
||||
let pass_scope = PassErrorScope::Pass;
|
||||
|
||||
let device = &cmd_enc.device;
|
||||
device.check_is_valid().map_pass_err(pass_scope)?;
|
||||
let snatch_guard = &device.snatchable_lock.read();
|
||||
|
||||
let base = &mut pass.base;
|
||||
|
||||
let mut indirect_draw_validation_batcher = crate::indirect_validation::DrawBatcher::new();
|
||||
|
||||
let (scope, pending_discard_init_fixups, mut pending_query_resets) = {
|
||||
let encoder = &mut cmd_buf_data.encoder;
|
||||
let tracker = &mut cmd_buf_data.trackers;
|
||||
let buffer_memory_init_actions = &mut cmd_buf_data.buffer_memory_init_actions;
|
||||
let texture_memory_actions = &mut cmd_buf_data.texture_memory_actions;
|
||||
|
||||
// We automatically keep extending command buffers over time, and because
|
||||
// we want to insert a command buffer _before_ what we're about to record,
|
||||
// we need to make sure to close the previous one.
|
||||
encoder.close_if_open().map_pass_err(pass_scope)?;
|
||||
encoder
|
||||
.open_pass(base.label.as_deref())
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let mut pending_query_resets = QueryResetMap::new();
|
||||
let mut pending_discard_init_fixups = SurfacesInDiscardState::new();
|
||||
|
||||
let info = RenderPassInfo::start(
|
||||
device,
|
||||
hal_label(base.label.as_deref(), device.instance_flags),
|
||||
pass.color_attachments.take(),
|
||||
pass.depth_stencil_attachment.take(),
|
||||
pass.timestamp_writes.take(),
|
||||
// Still needed down the line.
|
||||
// TODO(wumpf): by restructuring the code, we could get rid of some of this Arc clone.
|
||||
pass.occlusion_query_set.clone(),
|
||||
encoder,
|
||||
tracker,
|
||||
texture_memory_actions,
|
||||
&mut pending_query_resets,
|
||||
&mut pending_discard_init_fixups,
|
||||
snatch_guard,
|
||||
)
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let indices = &device.tracker_indices;
|
||||
tracker.buffers.set_size(indices.buffers.size());
|
||||
tracker.textures.set_size(indices.textures.size());
|
||||
|
||||
let mut debug_scope_depth = 0;
|
||||
|
||||
let mut state = State {
|
||||
pipeline_flags: PipelineFlags::empty(),
|
||||
blend_constant: OptionalState::Unused,
|
||||
stencil_reference: 0,
|
||||
pipeline: None,
|
||||
index: IndexState::default(),
|
||||
vertex: VertexState::default(),
|
||||
|
||||
info,
|
||||
|
||||
pass: pass::PassState {
|
||||
base: EncodingState {
|
||||
device,
|
||||
raw_encoder: encoder.raw.as_mut(),
|
||||
tracker,
|
||||
buffer_memory_init_actions,
|
||||
texture_memory_actions,
|
||||
as_actions: &mut cmd_buf_data.as_actions,
|
||||
indirect_draw_validation_resources: &mut cmd_buf_data
|
||||
.indirect_draw_validation_resources,
|
||||
snatch_guard,
|
||||
debug_scope_depth: &mut debug_scope_depth,
|
||||
},
|
||||
pending_discard_init_fixups,
|
||||
scope: device.new_usage_scope(),
|
||||
binder: Binder::new(),
|
||||
|
||||
temp_offsets: Vec::new(),
|
||||
dynamic_offset_count: 0,
|
||||
|
||||
string_offset: 0,
|
||||
},
|
||||
|
||||
active_occlusion_query: None,
|
||||
active_pipeline_statistics_query: None,
|
||||
};
|
||||
|
||||
for command in base.commands.drain(..) {
|
||||
match command {
|
||||
ArcRenderCommand::SetBindGroup {
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetBindGroup;
|
||||
pass::set_bind_group::<RenderPassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
&base.dynamic_offsets,
|
||||
index,
|
||||
num_dynamic_offsets,
|
||||
bind_group,
|
||||
true,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetPipeline(pipeline) => {
|
||||
let scope = PassErrorScope::SetPipelineRender;
|
||||
set_pipeline(&mut state, cmd_enc, pipeline).map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetIndexBuffer {
|
||||
buffer,
|
||||
index_format,
|
||||
offset,
|
||||
size,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetIndexBuffer;
|
||||
set_index_buffer(&mut state, cmd_enc, buffer, index_format, offset, size)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetVertexBuffer {
|
||||
slot,
|
||||
buffer,
|
||||
offset,
|
||||
size,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetVertexBuffer;
|
||||
set_vertex_buffer(&mut state, cmd_enc, slot, buffer, offset, size)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetBlendConstant(ref color) => {
|
||||
set_blend_constant(&mut state, color);
|
||||
}
|
||||
ArcRenderCommand::SetStencilReference(value) => {
|
||||
set_stencil_reference(&mut state, value);
|
||||
}
|
||||
ArcRenderCommand::SetViewport {
|
||||
rect,
|
||||
depth_min,
|
||||
depth_max,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetViewport;
|
||||
set_viewport(&mut state, rect, depth_min, depth_max).map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetPushConstant {
|
||||
stages,
|
||||
offset,
|
||||
size_bytes,
|
||||
values_offset,
|
||||
} => {
|
||||
let scope = PassErrorScope::SetPushConstant;
|
||||
pass::set_push_constant::<RenderPassErrorInner, _>(
|
||||
&mut state.pass,
|
||||
&base.push_constant_data,
|
||||
stages,
|
||||
offset,
|
||||
size_bytes,
|
||||
values_offset,
|
||||
|_| {},
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::SetScissor(rect) => {
|
||||
let scope = PassErrorScope::SetScissorRect;
|
||||
set_scissor(&mut state, rect).map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::Draw {
|
||||
vertex_count,
|
||||
instance_count,
|
||||
first_vertex,
|
||||
first_instance,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::Draw,
|
||||
family: DrawCommandFamily::Draw,
|
||||
};
|
||||
draw(
|
||||
&mut state,
|
||||
vertex_count,
|
||||
instance_count,
|
||||
first_vertex,
|
||||
first_instance,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::DrawIndexed {
|
||||
index_count,
|
||||
instance_count,
|
||||
first_index,
|
||||
base_vertex,
|
||||
first_instance,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::Draw,
|
||||
family: DrawCommandFamily::DrawIndexed,
|
||||
};
|
||||
draw_indexed(
|
||||
&mut state,
|
||||
index_count,
|
||||
instance_count,
|
||||
first_index,
|
||||
base_vertex,
|
||||
first_instance,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::DrawMeshTasks {
|
||||
group_count_x,
|
||||
group_count_y,
|
||||
group_count_z,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::Draw,
|
||||
family: DrawCommandFamily::DrawMeshTasks,
|
||||
};
|
||||
draw_mesh_tasks(&mut state, group_count_x, group_count_y, group_count_z)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::DrawIndirect {
|
||||
buffer,
|
||||
offset,
|
||||
count,
|
||||
family,
|
||||
|
||||
vertex_or_index_limit: _,
|
||||
instance_limit: _,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: if count != 1 {
|
||||
DrawKind::MultiDrawIndirect
|
||||
} else {
|
||||
DrawKind::DrawIndirect
|
||||
},
|
||||
family,
|
||||
};
|
||||
multi_draw_indirect(
|
||||
&mut state,
|
||||
&mut indirect_draw_validation_batcher,
|
||||
cmd_enc,
|
||||
buffer,
|
||||
offset,
|
||||
count,
|
||||
family,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::MultiDrawIndirectCount {
|
||||
buffer,
|
||||
offset,
|
||||
count_buffer,
|
||||
count_buffer_offset,
|
||||
max_count,
|
||||
family,
|
||||
} => {
|
||||
let scope = PassErrorScope::Draw {
|
||||
kind: DrawKind::MultiDrawIndirectCount,
|
||||
family,
|
||||
};
|
||||
multi_draw_indirect_count(
|
||||
&mut state,
|
||||
cmd_enc,
|
||||
buffer,
|
||||
offset,
|
||||
count_buffer,
|
||||
count_buffer_offset,
|
||||
max_count,
|
||||
family,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::PushDebugGroup { color: _, len } => {
|
||||
pass::push_debug_group(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcRenderCommand::PopDebugGroup => {
|
||||
let scope = PassErrorScope::PopDebugGroup;
|
||||
pass::pop_debug_group::<RenderPassErrorInner>(&mut state.pass)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::InsertDebugMarker { color: _, len } => {
|
||||
pass::insert_debug_marker(&mut state.pass, &base.string_data, len);
|
||||
}
|
||||
ArcRenderCommand::WriteTimestamp {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
let scope = PassErrorScope::WriteTimestamp;
|
||||
pass::write_timestamp::<RenderPassErrorInner>(
|
||||
&mut state.pass,
|
||||
cmd_enc.as_ref(),
|
||||
Some(&mut pending_query_resets),
|
||||
query_set,
|
||||
query_index,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::BeginOcclusionQuery { query_index } => {
|
||||
api_log!("RenderPass::begin_occlusion_query {query_index}");
|
||||
let scope = PassErrorScope::BeginOcclusionQuery;
|
||||
|
||||
let query_set = pass
|
||||
.occlusion_query_set
|
||||
.clone()
|
||||
.ok_or(RenderPassErrorInner::MissingOcclusionQuerySet)
|
||||
.map_pass_err(scope)?;
|
||||
|
||||
validate_and_begin_occlusion_query(
|
||||
query_set,
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.pass.base.tracker.query_sets,
|
||||
query_index,
|
||||
Some(&mut pending_query_resets),
|
||||
&mut state.active_occlusion_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::EndOcclusionQuery => {
|
||||
api_log!("RenderPass::end_occlusion_query");
|
||||
let scope = PassErrorScope::EndOcclusionQuery;
|
||||
|
||||
end_occlusion_query(
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.active_occlusion_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::BeginPipelineStatisticsQuery {
|
||||
query_set,
|
||||
query_index,
|
||||
} => {
|
||||
api_log!(
|
||||
"RenderPass::begin_pipeline_statistics_query {query_index} {}",
|
||||
query_set.error_ident()
|
||||
);
|
||||
let scope = PassErrorScope::BeginPipelineStatisticsQuery;
|
||||
|
||||
validate_and_begin_pipeline_statistics_query(
|
||||
query_set,
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.pass.base.tracker.query_sets,
|
||||
cmd_enc.as_ref(),
|
||||
query_index,
|
||||
Some(&mut pending_query_resets),
|
||||
&mut state.active_pipeline_statistics_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::EndPipelineStatisticsQuery => {
|
||||
api_log!("RenderPass::end_pipeline_statistics_query");
|
||||
let scope = PassErrorScope::EndPipelineStatisticsQuery;
|
||||
|
||||
end_pipeline_statistics_query(
|
||||
state.pass.base.raw_encoder,
|
||||
&mut state.active_pipeline_statistics_query,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
ArcRenderCommand::ExecuteBundle(bundle) => {
|
||||
let scope = PassErrorScope::ExecuteBundle;
|
||||
execute_bundle(
|
||||
&mut state,
|
||||
&mut indirect_draw_validation_batcher,
|
||||
cmd_enc,
|
||||
bundle,
|
||||
)
|
||||
.map_pass_err(scope)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if *state.pass.base.debug_scope_depth > 0 {
|
||||
Err(
|
||||
RenderPassErrorInner::DebugGroupError(DebugGroupError::MissingPop)
|
||||
.map_pass_err(pass_scope),
|
||||
)?;
|
||||
}
|
||||
|
||||
state
|
||||
.info
|
||||
.finish(
|
||||
device,
|
||||
state.pass.base.raw_encoder,
|
||||
state.pass.base.snatch_guard,
|
||||
&mut state.pass.scope,
|
||||
device.instance_flags,
|
||||
)
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
let trackers = state.pass.scope;
|
||||
|
||||
let pending_discard_init_fixups = state.pass.pending_discard_init_fixups;
|
||||
|
||||
encoder.close().map_pass_err(pass_scope)?;
|
||||
(trackers, pending_discard_init_fixups, pending_query_resets)
|
||||
};
|
||||
|
||||
let encoder = &mut cmd_buf_data.encoder;
|
||||
let tracker = &mut cmd_buf_data.trackers;
|
||||
|
||||
{
|
||||
let transit = encoder
|
||||
.open_pass(hal_label(
|
||||
Some("(wgpu internal) Pre Pass"),
|
||||
device.instance_flags,
|
||||
))
|
||||
.map_pass_err(pass_scope)?;
|
||||
|
||||
fixup_discarded_surfaces(
|
||||
pending_discard_init_fixups.into_iter(),
|
||||
transit,
|
||||
&mut tracker.textures,
|
||||
&cmd_enc.device,
|
||||
snatch_guard,
|
||||
);
|
||||
|
||||
pending_query_resets.reset_queries(transit);
|
||||
|
||||
CommandEncoder::insert_barriers_from_scope(transit, tracker, &scope, snatch_guard);
|
||||
|
||||
if let Some(ref indirect_validation) = device.indirect_validation {
|
||||
indirect_validation
|
||||
.draw
|
||||
.inject_validation_pass(
|
||||
device,
|
||||
snatch_guard,
|
||||
&mut cmd_buf_data.indirect_draw_validation_resources,
|
||||
&mut cmd_buf_data.temp_resources,
|
||||
transit,
|
||||
indirect_draw_validation_batcher,
|
||||
)
|
||||
.map_pass_err(pass_scope)?;
|
||||
}
|
||||
}
|
||||
|
||||
encoder.close_and_swap().map_pass_err(pass_scope)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_pipeline(
|
||||
state: &mut State,
|
||||
cmd_enc: &Arc<CommandEncoder>,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user