Allow Tests to Expect Certain Panic or Validation Messages (#4748)

This commit is contained in:
Connor Fitzgerald
2023-11-23 03:49:01 -05:00
committed by GitHub
parent 1a7db86917
commit 3c261f3652
19 changed files with 705 additions and 319 deletions

View File

@@ -395,7 +395,7 @@ static TEST: wgpu_example::framework::ExampleTestParams =
wgpu_test::ComparisonType::Mean(0.05),
wgpu_test::ComparisonType::Percentile {
percentile: 0.99,
threshold: 0.19,
threshold: 0.37,
},
],
_phantom: std::marker::PhantomData::<Example>,

562
tests/src/expectations.rs Normal file
View File

@@ -0,0 +1,562 @@
/// Conditions under which a test should fail or be skipped.
///
/// By passing a `FailureCase` to [`TestParameters::expect_fail`][expect_fail], you can
/// mark a test as expected to fail under the indicated conditions. By
/// passing it to [`TestParameters::skip`][skip], you can request that the
/// test be skipped altogether.
///
/// If a field is `None`, then that field does not restrict matches. For
/// example:
///
/// ```
/// # use wgpu_test::*;
/// FailureCase {
/// backends: Some(wgpu::Backends::DX11 | wgpu::Backends::DX12),
/// vendor: None,
/// adapter: Some("RTX"),
/// driver: None,
/// reasons: vec![FailureReason::ValidationError(Some("Some error substring"))],
/// behavior: FailureBehavior::AssertFailure,
/// }
/// # ;
/// ```
///
/// This applies to all cards with `"RTX'` in their name on either
/// Direct3D backend, no matter the vendor ID or driver name.
///
/// The strings given here need only appear as a substring in the
/// corresponding [`AdapterInfo`] fields. The comparison is
/// case-insensitive.
///
/// The default value of `FailureCase` applies to any test case. That
/// is, there are no criteria to constrain the match.
///
/// [skip]: super::TestParameters::skip
/// [expect_fail]: super::TestParameters::expect_fail
/// [`AdapterInfo`]: wgt::AdapterInfo
#[derive(Default, Clone)]
pub struct FailureCase {
/// Backends expected to fail, or `None` for any backend.
///
/// If this is `None`, or if the test is using one of the backends
/// in `backends`, then this `FailureCase` applies.
pub backends: Option<wgpu::Backends>,
/// Vendor expected to fail, or `None` for any vendor.
///
/// If `Some`, this must match [`AdapterInfo::device`], which is
/// usually the PCI device id. Otherwise, this `FailureCase`
/// applies regardless of vendor.
///
/// [`AdapterInfo::device`]: wgt::AdapterInfo::device
pub vendor: Option<u32>,
/// Name of adaper expected to fail, or `None` for any adapter name.
///
/// If this is `Some(s)` and `s` is a substring of
/// [`AdapterInfo::name`], then this `FailureCase` applies. If
/// this is `None`, the adapter name isn't considered.
///
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
pub adapter: Option<&'static str>,
/// Name of driver expected to fail, or `None` for any driver name.
///
/// If this is `Some(s)` and `s` is a substring of
/// [`AdapterInfo::driver`], then this `FailureCase` applies. If
/// this is `None`, the driver name isn't considered.
///
/// [`AdapterInfo::driver`]: wgt::AdapterInfo::driver
pub driver: Option<&'static str>,
/// Reason why the test is expected to fail.
///
/// If this does not match, the failure will not match this case.
///
/// If no reasons are pushed, will match any failure.
pub reasons: Vec<FailureReason>,
/// Behavior after this case matches a failure.
pub behavior: FailureBehavior,
}
impl FailureCase {
/// Create a new failure case.
pub fn new() -> Self {
Self::default()
}
/// This case applies to all tests.
pub fn always() -> Self {
FailureCase::default()
}
/// This case applies to no tests.
pub fn never() -> Self {
FailureCase {
backends: Some(wgpu::Backends::empty()),
..FailureCase::default()
}
}
/// Tests running on any of the given backends.
pub fn backend(backends: wgpu::Backends) -> Self {
FailureCase {
backends: Some(backends),
..FailureCase::default()
}
}
/// Tests running on `adapter`.
///
/// For this case to apply, the `adapter` string must appear as a substring
/// of the adapter's [`AdapterInfo::name`]. The comparison is
/// case-insensitive.
///
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
pub fn adapter(adapter: &'static str) -> Self {
FailureCase {
adapter: Some(adapter),
..FailureCase::default()
}
}
/// Tests running on `backend` and `adapter`.
///
/// For this case to apply, the test must be using an adapter for one of the
/// given `backend` bits, and `adapter` string must appear as a substring of
/// the adapter's [`AdapterInfo::name`]. The string comparison is
/// case-insensitive.
///
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
pub fn backend_adapter(backends: wgpu::Backends, adapter: &'static str) -> Self {
FailureCase {
backends: Some(backends),
adapter: Some(adapter),
..FailureCase::default()
}
}
/// Tests running under WebGL.
pub fn webgl2() -> Self {
#[cfg(target_arch = "wasm32")]
let case = FailureCase::backend(wgpu::Backends::GL);
#[cfg(not(target_arch = "wasm32"))]
let case = FailureCase::never();
case
}
/// Tests running on the MoltenVK Vulkan driver on macOS.
pub fn molten_vk() -> Self {
FailureCase {
backends: Some(wgpu::Backends::VULKAN),
driver: Some("MoltenVK"),
..FailureCase::default()
}
}
/// Return the reasons why this case should fail.
pub fn reasons(&self) -> &[FailureReason] {
if self.reasons.is_empty() {
std::array::from_ref(&FailureReason::Any)
} else {
&self.reasons
}
}
/// Matches this failure case against the given validation error substring.
///
/// Substrings are matched case-insensitively.
///
/// If multiple reasons are pushed, will match any of them.
pub fn validation_error(mut self, msg: &'static str) -> Self {
self.reasons.push(FailureReason::ValidationError(Some(msg)));
self
}
/// Matches this failure case against the given panic substring.
///
/// Substrings are matched case-insensitively.
///
/// If multiple reasons are pushed, will match any of them.
pub fn panic(mut self, msg: &'static str) -> Self {
self.reasons.push(FailureReason::Panic(Some(msg)));
self
}
/// Test is flaky with the given configuration. Do not assert failure.
///
/// Use this _very_ sparyingly, and match as tightly as you can, including giving a specific failure message.
pub fn flaky(self) -> Self {
FailureCase {
behavior: FailureBehavior::Ignore,
..self
}
}
/// Test whether `self` applies to `info`.
///
/// If it does, return a `FailureReasons` whose set bits indicate
/// why. If it doesn't, return `None`.
///
/// The caller is responsible for converting the string-valued
/// fields of `info` to lower case, to ensure case-insensitive
/// matching.
pub(crate) fn applies_to_adapter(
&self,
info: &wgt::AdapterInfo,
) -> Option<FailureApplicationReasons> {
let mut reasons = FailureApplicationReasons::empty();
if let Some(backends) = self.backends {
if !backends.contains(wgpu::Backends::from(info.backend)) {
return None;
}
reasons.set(FailureApplicationReasons::BACKEND, true);
}
if let Some(vendor) = self.vendor {
if vendor != info.vendor {
return None;
}
reasons.set(FailureApplicationReasons::VENDOR, true);
}
if let Some(adapter) = self.adapter {
let adapter = adapter.to_lowercase();
if !info.name.contains(&adapter) {
return None;
}
reasons.set(FailureApplicationReasons::ADAPTER, true);
}
if let Some(driver) = self.driver {
let driver = driver.to_lowercase();
if !info.driver.contains(&driver) {
return None;
}
reasons.set(FailureApplicationReasons::DRIVER, true);
}
// If we got this far but no specific reasons were triggered, then this
// must be a wildcard.
if reasons.is_empty() {
Some(FailureApplicationReasons::ALWAYS)
} else {
Some(reasons)
}
}
/// Returns true if the given failure "satisfies" this failure case.
pub(crate) fn matches_failure(&self, failure: &FailureResult) -> bool {
for reason in self.reasons() {
let result = match (reason, failure) {
(FailureReason::Any, _) => {
log::error!("Matched failure case: Wildcard");
true
}
(FailureReason::ValidationError(None), FailureResult::ValidationError(_)) => {
log::error!("Matched failure case: Any Validation Error");
true
}
(
FailureReason::ValidationError(Some(expected)),
FailureResult::ValidationError(Some(actual)),
) => {
let result = actual.to_lowercase().contains(&expected.to_lowercase());
if result {
log::error!(
"Matched failure case: Validation Error containing \"{}\"",
expected
);
}
result
}
(FailureReason::Panic(None), FailureResult::Panic(_)) => {
log::error!("Matched failure case: Any Panic");
true
}
(FailureReason::Panic(Some(expected)), FailureResult::Panic(Some(actual))) => {
let result = actual.to_lowercase().contains(&expected.to_lowercase());
if result {
log::error!("Matched failure case: Panic containing \"{}\"", expected);
}
result
}
_ => false,
};
if result {
return true;
}
}
false
}
}
bitflags::bitflags! {
/// Reason why a test matches a given failure case.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct FailureApplicationReasons: u8 {
const BACKEND = 1 << 0;
const VENDOR = 1 << 1;
const ADAPTER = 1 << 2;
const DRIVER = 1 << 3;
const ALWAYS = 1 << 4;
}
}
/// Reason why a test is expected to fail.
///
/// If the test fails for a different reason, the given FailureCase will be ignored.
#[derive(Default, Debug, Clone, PartialEq)]
pub enum FailureReason {
/// Matches any failure.
#[default]
Any,
/// Matches validation errors raised from the backend validation.
///
/// If a string is provided, matches only validation errors that contain the string.
ValidationError(Option<&'static str>),
/// A panic was raised.
///
/// If a string is provided, matches only panics that contain the string.
Panic(Option<&'static str>),
}
#[derive(Default, Clone)]
pub enum FailureBehavior {
/// Assert that the test fails for the given reason.
///
/// If the test passes, the test harness will panic.
#[default]
AssertFailure,
/// Ignore the matching failure.
///
/// This is useful for tests that flake in a very specific way,
/// but sometimes succeed, so we can't assert that they always fail.
Ignore,
}
#[derive(Debug)]
pub(crate) enum FailureResult {
#[allow(dead_code)] // Not constructed on wasm
ValidationError(Option<String>),
Panic(Option<String>),
}
#[derive(PartialEq, Clone, Copy, Debug)]
pub(crate) enum ExpectationMatchResult {
Panic,
Complete,
}
/// Compares if the actual failures match the expected failures.
pub(crate) fn expectations_match_failures(
expectations: &[FailureCase],
mut actual: Vec<FailureResult>,
) -> ExpectationMatchResult {
// Start with the assumption that we will pass.
let mut result = ExpectationMatchResult::Complete;
// Run through all expected failures.
for expected_failure in expectations {
// If any of the failures match.
let mut matched = false;
// Iterate through the failures.
//
// In reverse, to be able to use swap_remove.
actual.retain(|failure| {
// If the failure matches, remove it from the list of failures, as we expected it.
let matches = expected_failure.matches_failure(failure);
if matches {
matched = true;
}
// Retain removes on false, so flip the bool so we remove on failure.
!matches
});
// If we didn't match our expected failure against any of the actual failures,
// and this failure is not flaky, then we need to panic, as we got an unexpected success.
if !matched && matches!(expected_failure.behavior, FailureBehavior::AssertFailure) {
result = ExpectationMatchResult::Panic;
log::error!(
"Expected to fail due to {:?}, but did not fail",
expected_failure.reasons()
);
}
}
// If we have any failures left, then we got an unexpected failure
// and we need to panic.
if !actual.is_empty() {
result = ExpectationMatchResult::Panic;
for failure in actual {
log::error!("Unexpected failure due to: {:?}", failure);
}
}
result
}
#[cfg(test)]
mod test {
use crate::{
expectations::{ExpectationMatchResult, FailureResult},
init::init_logger,
FailureCase,
};
fn validation_err(msg: &'static str) -> FailureResult {
FailureResult::ValidationError(Some(String::from(msg)))
}
fn panic(msg: &'static str) -> FailureResult {
FailureResult::Panic(Some(String::from(msg)))
}
#[test]
fn simple_match() {
init_logger();
// -- Unexpected failure --
let expectation = vec![];
let actual = vec![FailureResult::ValidationError(None)];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Panic
);
// -- Missing expected failure --
let expectation = vec![FailureCase::always()];
let actual = vec![];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Panic
);
// -- Expected failure (validation) --
let expectation = vec![FailureCase::always()];
let actual = vec![FailureResult::ValidationError(None)];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
// -- Expected failure (panic) --
let expectation = vec![FailureCase::always()];
let actual = vec![FailureResult::Panic(None)];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
}
#[test]
fn substring_match() {
init_logger();
// -- Matching Substring --
let expectation: Vec<FailureCase> =
vec![FailureCase::always().validation_error("Some StrIng")];
let actual = vec![FailureResult::ValidationError(Some(String::from(
"a very long string that contains sOmE sTrInG of different capitalization",
)))];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
// -- Non-Matching Substring --
let expectation = vec![FailureCase::always().validation_error("Some String")];
let actual = vec![validation_err("a very long string that doesn't contain it")];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Panic
);
}
#[test]
fn ignore_flaky() {
init_logger();
let expectation = vec![FailureCase::always().validation_error("blah").flaky()];
let actual = vec![validation_err("some blah")];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
let expectation = vec![FailureCase::always().validation_error("blah").flaky()];
let actual = vec![];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
}
#[test]
fn matches_multiple_errors() {
init_logger();
// -- matches all matching errors --
let expectation = vec![FailureCase::always().validation_error("blah")];
let actual = vec![
validation_err("some blah"),
validation_err("some other blah"),
];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
// -- but not all errors --
let expectation = vec![FailureCase::always().validation_error("blah")];
let actual = vec![
validation_err("some blah"),
validation_err("some other blah"),
validation_err("something else"),
];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Panic
);
}
#[test]
fn multi_reason_error() {
init_logger();
let expectation = vec![FailureCase::default()
.validation_error("blah")
.panic("panik")];
let actual = vec![
validation_err("my blah blah validation error"),
panic("my panik"),
];
assert_eq!(
super::expectations_match_failures(&expectation, actual),
ExpectationMatchResult::Complete
);
}
}

View File

@@ -1,6 +1,15 @@
use wgpu::{Adapter, Device, Instance, Queue};
use wgt::{Backends, Features, Limits};
/// Initialize the logger for the test runner.
pub fn init_logger() {
// We don't actually care if it fails
#[cfg(not(target_arch = "wasm32"))]
let _ = env_logger::try_init();
#[cfg(target_arch = "wasm32")]
let _ = console_log::init_with_level(log::Level::Info);
}
/// Initialize a wgpu instance with the options from the environment.
pub fn initialize_instance() -> Instance {
// We ignore `WGPU_BACKEND` for now, merely using test filtering to only run a single backend's tests.

View File

@@ -1,6 +1,7 @@
//! Test utilities for the wgpu repository.
mod config;
mod expectations;
pub mod image;
mod init;
mod isolation;
@@ -16,8 +17,9 @@ pub use self::image::ComparisonType;
pub use config::GpuTestConfiguration;
#[doc(hidden)]
pub use ctor::ctor;
pub use expectations::{FailureApplicationReasons, FailureBehavior, FailureCase, FailureReason};
pub use init::{initialize_adapter, initialize_device, initialize_instance};
pub use params::{FailureCase, FailureReasons, TestParameters};
pub use params::TestParameters;
pub use run::{execute_test, TestingContext};
pub use wgpu_macros::gpu_test;

View File

@@ -1,198 +1,10 @@
use arrayvec::ArrayVec;
use wgt::{DownlevelCapabilities, DownlevelFlags, Features, Limits};
use crate::{report::AdapterReport, GpuTestConfiguration};
/// Conditions under which a test should fail or be skipped.
///
/// By passing a `FailureCase` to [`TestParameters::expect_fail`], you can
/// mark a test as expected to fail under the indicated conditions. By
/// passing it to [`TestParameters::skip`], you can request that the
/// test be skipped altogether.
///
/// If a field is `None`, then that field does not restrict matches. For
/// example:
///
/// ```
/// # use wgpu_test::FailureCase;
/// FailureCase {
/// backends: Some(wgpu::Backends::DX11 | wgpu::Backends::DX12),
/// vendor: None,
/// adapter: Some("RTX"),
/// driver: None,
/// }
/// # ;
/// ```
///
/// This applies to all cards with `"RTX'` in their name on either
/// Direct3D backend, no matter the vendor ID or driver name.
///
/// The strings given here need only appear as a substring in the
/// corresponding [`AdapterInfo`] fields. The comparison is
/// case-insensitive.
///
/// The default value of `FailureCase` applies to any test case. That
/// is, there are no criteria to constrain the match.
///
/// [`AdapterInfo`]: wgt::AdapterInfo
#[derive(Default, Clone)]
pub struct FailureCase {
/// Backends expected to fail, or `None` for any backend.
///
/// If this is `None`, or if the test is using one of the backends
/// in `backends`, then this `FailureCase` applies.
pub backends: Option<wgpu::Backends>,
/// Vendor expected to fail, or `None` for any vendor.
///
/// If `Some`, this must match [`AdapterInfo::device`], which is
/// usually the PCI device id. Otherwise, this `FailureCase`
/// applies regardless of vendor.
///
/// [`AdapterInfo::device`]: wgt::AdapterInfo::device
pub vendor: Option<u32>,
/// Name of adaper expected to fail, or `None` for any adapter name.
///
/// If this is `Some(s)` and `s` is a substring of
/// [`AdapterInfo::name`], then this `FailureCase` applies. If
/// this is `None`, the adapter name isn't considered.
///
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
pub adapter: Option<&'static str>,
/// Name of driver expected to fail, or `None` for any driver name.
///
/// If this is `Some(s)` and `s` is a substring of
/// [`AdapterInfo::driver`], then this `FailureCase` applies. If
/// this is `None`, the driver name isn't considered.
///
/// [`AdapterInfo::driver`]: wgt::AdapterInfo::driver
pub driver: Option<&'static str>,
}
impl FailureCase {
/// This case applies to all tests.
pub fn always() -> Self {
FailureCase::default()
}
/// This case applies to no tests.
pub fn never() -> Self {
FailureCase {
backends: Some(wgpu::Backends::empty()),
..FailureCase::default()
}
}
/// Tests running on any of the given backends.
pub fn backend(backends: wgpu::Backends) -> Self {
FailureCase {
backends: Some(backends),
..FailureCase::default()
}
}
/// Tests running on `adapter`.
///
/// For this case to apply, the `adapter` string must appear as a substring
/// of the adapter's [`AdapterInfo::name`]. The comparison is
/// case-insensitive.
///
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
pub fn adapter(adapter: &'static str) -> Self {
FailureCase {
adapter: Some(adapter),
..FailureCase::default()
}
}
/// Tests running on `backend` and `adapter`.
///
/// For this case to apply, the test must be using an adapter for one of the
/// given `backend` bits, and `adapter` string must appear as a substring of
/// the adapter's [`AdapterInfo::name`]. The string comparison is
/// case-insensitive.
///
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
pub fn backend_adapter(backends: wgpu::Backends, adapter: &'static str) -> Self {
FailureCase {
backends: Some(backends),
adapter: Some(adapter),
..FailureCase::default()
}
}
/// Tests running under WebGL.
///
/// Because of wasm's limited ability to recover from errors, we
/// usually need to skip the test altogether if it's not
/// supported, so this should be usually used with
/// [`TestParameters::skip`].
pub fn webgl2() -> Self {
#[cfg(target_arch = "wasm32")]
let case = FailureCase::backend(wgpu::Backends::GL);
#[cfg(not(target_arch = "wasm32"))]
let case = FailureCase::never();
case
}
/// Tests running on the MoltenVK Vulkan driver on macOS.
pub fn molten_vk() -> Self {
FailureCase {
backends: Some(wgpu::Backends::VULKAN),
driver: Some("MoltenVK"),
..FailureCase::default()
}
}
/// Test whether `self` applies to `info`.
///
/// If it does, return a `FailureReasons` whose set bits indicate
/// why. If it doesn't, return `None`.
///
/// The caller is responsible for converting the string-valued
/// fields of `info` to lower case, to ensure case-insensitive
/// matching.
pub(crate) fn applies_to(&self, info: &wgt::AdapterInfo) -> Option<FailureReasons> {
let mut reasons = FailureReasons::empty();
if let Some(backends) = self.backends {
if !backends.contains(wgpu::Backends::from(info.backend)) {
return None;
}
reasons.set(FailureReasons::BACKEND, true);
}
if let Some(vendor) = self.vendor {
if vendor != info.vendor {
return None;
}
reasons.set(FailureReasons::VENDOR, true);
}
if let Some(adapter) = self.adapter {
let adapter = adapter.to_lowercase();
if !info.name.contains(&adapter) {
return None;
}
reasons.set(FailureReasons::ADAPTER, true);
}
if let Some(driver) = self.driver {
let driver = driver.to_lowercase();
if !info.driver.contains(&driver) {
return None;
}
reasons.set(FailureReasons::DRIVER, true);
}
// If we got this far but no specific reasons were triggered, then this
// must be a wildcard.
if reasons.is_empty() {
Some(FailureReasons::ALWAYS)
} else {
Some(reasons)
}
}
}
use crate::{
report::AdapterReport, FailureApplicationReasons, FailureBehavior, FailureCase,
GpuTestConfiguration,
};
const LOWEST_DOWNLEVEL_PROPERTIES: wgpu::DownlevelCapabilities = DownlevelCapabilities {
flags: wgt::DownlevelFlags::empty(),
@@ -226,18 +38,6 @@ impl Default for TestParameters {
}
}
bitflags::bitflags! {
/// Ways that a given test can be expected to fail.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct FailureReasons: u8 {
const BACKEND = 1 << 0;
const VENDOR = 1 << 1;
const ADAPTER = 1 << 2;
const DRIVER = 1 << 3;
const ALWAYS = 1 << 4;
}
}
// Builder pattern to make it easier
impl TestParameters {
/// Set of common features that most internal tests require for compute and readback.
@@ -279,31 +79,32 @@ impl TestParameters {
/// Information about a test, including if if it should be skipped.
pub struct TestInfo {
pub skip: bool,
pub expected_failure_reason: Option<FailureReasons>,
pub failure_application_reasons: FailureApplicationReasons,
pub failures: Vec<FailureCase>,
pub running_msg: String,
}
impl TestInfo {
pub(crate) fn from_configuration(test: &GpuTestConfiguration, adapter: &AdapterReport) -> Self {
// Figure out if we should skip the test and if so, why.
let mut skipped_reasons: ArrayVec<_, 4> = ArrayVec::new();
// Figure out if a test is unsupported, and why.
let mut unsupported_reasons: ArrayVec<_, 4> = ArrayVec::new();
let missing_features = test.params.required_features - adapter.features;
if !missing_features.is_empty() {
skipped_reasons.push("Features");
unsupported_reasons.push("Features");
}
if !test.params.required_limits.check_limits(&adapter.limits) {
skipped_reasons.push("Limits");
unsupported_reasons.push("Limits");
}
let missing_downlevel_flags =
test.params.required_downlevel_caps.flags - adapter.downlevel_caps.flags;
if !missing_downlevel_flags.is_empty() {
skipped_reasons.push("Downlevel Flags");
unsupported_reasons.push("Downlevel Flags");
}
if test.params.required_downlevel_caps.shader_model > adapter.downlevel_caps.shader_model {
skipped_reasons.push("Shader Model");
unsupported_reasons.push("Shader Model");
}
// Produce a lower-case version of the adapter info, for comparison against
@@ -315,48 +116,55 @@ impl TestInfo {
};
// Check if we should skip the test altogether.
let skip_reason = test
let skip_application_reason = test
.params
.skips
.iter()
.find_map(|case| case.applies_to(&adapter_lowercase_info));
.find_map(|case| case.applies_to_adapter(&adapter_lowercase_info));
let expected_failure_reason = test
.params
.failures
.iter()
.find_map(|case| case.applies_to(&adapter_lowercase_info));
let mut applicable_cases = Vec::with_capacity(test.params.failures.len());
let mut failure_application_reasons = FailureApplicationReasons::empty();
let mut flaky = false;
for failure in &test.params.failures {
if let Some(reasons) = failure.applies_to_adapter(&adapter_lowercase_info) {
failure_application_reasons.insert(reasons);
applicable_cases.push(failure.clone());
flaky |= matches!(failure.behavior, FailureBehavior::Ignore);
}
}
let mut skip = false;
let running_msg = if let Some(reasons) = skip_reason {
let running_msg = if let Some(reasons) = skip_application_reason {
skip = true;
let names: ArrayVec<_, 4> = reasons.iter_names().map(|(name, _)| name).collect();
let names_text = names.join(" | ");
format!("Skipped Failure: {}", names_text)
} else if !skipped_reasons.is_empty() {
} else if !unsupported_reasons.is_empty() {
skip = true;
format!("Skipped: {}", skipped_reasons.join(" | "))
} else if let Some(failure_resasons) = expected_failure_reason {
format!("Unsupported: {}", unsupported_reasons.join(" | "))
} else if !failure_application_reasons.is_empty() {
if cfg!(target_arch = "wasm32") {
skip = true;
}
let names: ArrayVec<_, 4> = failure_resasons
let names: ArrayVec<_, 4> = failure_application_reasons
.iter_names()
.map(|(name, _)| name)
.collect();
let names_text = names.join(" | ");
let names_text = names.join(" & ");
let flaky_text = if flaky { " Flaky " } else { " " };
format!("Executed Failure: {}", names_text)
format!("Executed{flaky_text}Failure: {names_text}")
} else {
String::from("Executed")
};
Self {
skip,
expected_failure_reason,
failure_application_reasons,
failures: applicable_cases,
running_msg,
}
}

View File

@@ -4,7 +4,8 @@ use futures_lite::FutureExt;
use wgpu::{Adapter, Device, Instance, Queue};
use crate::{
init::{initialize_adapter, initialize_device},
expectations::{expectations_match_failures, ExpectationMatchResult, FailureResult},
init::{init_logger, initialize_adapter, initialize_device},
isolation,
params::TestInfo,
report::AdapterReport,
@@ -37,11 +38,7 @@ pub async fn execute_test(
return;
}
// We don't actually care if it fails
#[cfg(not(target_arch = "wasm32"))]
let _ = env_logger::try_init();
#[cfg(target_arch = "wasm32")]
let _ = console_log::init_with_level(log::Level::Info);
init_logger();
let _test_guard = isolation::OneTestPerProcessGuard::new();
@@ -78,51 +75,38 @@ pub async fn execute_test(
queue,
};
let mut failures = Vec::new();
// Run the test, and catch panics (possibly due to failed assertions).
let panicked = AssertUnwindSafe((config.test.as_ref().unwrap())(context))
let panic_res = AssertUnwindSafe((config.test.as_ref().unwrap())(context))
.catch_unwind()
.await
.is_err();
.await;
if let Err(panic) = panic_res {
let panic_str = panic.downcast_ref::<&'static str>();
let panic_string = if let Some(&panic_str) = panic_str {
Some(panic_str.to_string())
} else {
panic.downcast_ref::<String>().cloned()
};
failures.push(FailureResult::Panic(panic_string))
}
// Check whether any validation errors were reported during the test run.
cfg_if::cfg_if!(
if #[cfg(any(not(target_arch = "wasm32"), target_os = "emscripten"))] {
let canary_set = wgpu::hal::VALIDATION_CANARY.get_and_reset();
failures.extend(wgpu::hal::VALIDATION_CANARY.get_and_reset().into_iter().map(|msg| FailureResult::ValidationError(Some(msg))));
} else if #[cfg(all(target_arch = "wasm32", feature = "webgl"))] {
let canary_set = _surface_guard.unwrap().check_for_unreported_errors();
if _surface_guard.unwrap().check_for_unreported_errors() {
failures.push(FailureResult::ValidationError(None));
}
} else {
// TODO: WebGPU
let canary_set = false;
}
);
// Summarize reasons for actual failure, if any.
let failure_cause = match (panicked, canary_set) {
(true, true) => Some("PANIC AND VALIDATION ERROR"),
(true, false) => Some("PANIC"),
(false, true) => Some("VALIDATION ERROR"),
(false, false) => None,
};
// Compare actual results against expectations.
match (failure_cause, test_info.expected_failure_reason) {
// The test passed, as expected.
(None, None) => log::info!("TEST RESULT: PASSED"),
// The test failed unexpectedly.
(Some(cause), None) => {
panic!("UNEXPECTED TEST FAILURE DUE TO {cause}")
}
// The test passed unexpectedly.
(None, Some(reason)) => {
panic!("UNEXPECTED TEST PASS: {reason:?}");
}
// The test failed, as expected.
(Some(cause), Some(reason_expected)) => {
log::info!(
"TEST RESULT: EXPECTED FAILURE DUE TO {} (expected because of {:?})",
cause,
reason_expected
);
}
// The call to matches_failure will log.
if expectations_match_failures(&test_info.failures, failures) == ExpectationMatchResult::Panic {
panic!();
}
}

View File

@@ -341,7 +341,13 @@ static CLEAR_TEXTURE_UNCOMPRESSED_GLES: GpuTestConfiguration = GpuTestConfigurat
static CLEAR_TEXTURE_UNCOMPRESSED: GpuTestConfiguration = GpuTestConfiguration::new()
.parameters(
TestParameters::default()
.expect_fail(FailureCase::backend(wgpu::Backends::GL))
.expect_fail(
FailureCase::backend(wgpu::Backends::GL)
.panic("texture with format Rg8Snorm was not fully cleared")
.panic("texture with format Rgb9e5Ufloat was not fully cleared")
.validation_error("GL_INVALID_FRAMEBUFFER_OPERATION")
.validation_error("GL_INVALID_OPERATION"),
)
.features(wgpu::Features::CLEAR_TEXTURE),
)
.run_sync(|ctx| {

View File

@@ -8,7 +8,7 @@ static PIPELINE_DEFAULT_LAYOUT_BAD_MODULE: GpuTestConfiguration = GpuTestConfigu
.parameters(
TestParameters::default()
// https://github.com/gfx-rs/wgpu/issues/4167
.expect_fail(FailureCase::always()),
.expect_fail(FailureCase::always().panic("Pipeline is invalid")),
)
.run_sync(|ctx| {
ctx.device.push_error_scope(wgpu::ErrorFilter::Validation);

View File

@@ -224,8 +224,11 @@ static UNIFORM_INPUT: GpuTestConfiguration = GpuTestConfiguration::new()
.parameters(
TestParameters::default()
.downlevel_flags(DownlevelFlags::COMPUTE_SHADERS)
// Validation errors thrown by the SPIR-V validator https://github.com/gfx-rs/naga/issues/2034
.expect_fail(FailureCase::backend(wgpu::Backends::VULKAN))
// Validation errors thrown by the SPIR-V validator https://github.com/gfx-rs/wgpu/issues/4371
.expect_fail(
FailureCase::backend(wgpu::Backends::VULKAN)
.validation_error("a matrix with stride 8 not satisfying alignment to 16"),
)
.limits(Limits::downlevel_defaults()),
)
.run_sync(|ctx| {

View File

@@ -22,8 +22,7 @@ static ZERO_INIT_WORKGROUP_MEMORY: GpuTestConfiguration = GpuTestConfiguration::
vendor: Some(5140),
adapter: Some("Microsoft Basic Render Driver"),
..FailureCase::default()
})
.skip(FailureCase::backend_adapter(Backends::VULKAN, "llvmpipe")),
}),
)
.run_sync(|ctx| {
let bgl = ctx

View File

@@ -4,7 +4,13 @@ use wgpu_test::{gpu_test, FailureCase, GpuTestConfiguration, TestParameters};
#[gpu_test]
static WRITE_TEXTURE_SUBSET_2D: GpuTestConfiguration = GpuTestConfiguration::new()
.parameters(TestParameters::default().expect_fail(FailureCase::backend(wgpu::Backends::DX12)))
.parameters(
TestParameters::default()
// This just totally removes the device due to invalid api call.
//
// https://github.com/gfx-rs/wgpu/issues/3072
.expect_fail(FailureCase::backend(wgpu::Backends::DX12)),
)
.run_sync(|ctx| {
let size = 256;

View File

@@ -72,7 +72,11 @@ static DISCARDING_EITHER_DEPTH_OR_STENCIL_ASPECT_TEST: GpuTestConfiguration =
)
.limits(Limits::downlevel_defaults())
// https://github.com/gfx-rs/wgpu/issues/4740
.skip(FailureCase::backend_adapter(Backends::VULKAN, "llvmpipe")),
.expect_fail(
FailureCase::backend_adapter(Backends::VULKAN, "llvmpipe")
.panic("texture was not fully cleared")
.flaky(),
),
)
.run_sync(|mut ctx| {
for format in [

View File

@@ -98,7 +98,7 @@ unsafe extern "system" fn output_debug_string_handler(
if cfg!(debug_assertions) && level == log::Level::Error {
// Set canary and continue
crate::VALIDATION_CANARY.set();
crate::VALIDATION_CANARY.add(message.to_string());
}
excpt::EXCEPTION_CONTINUE_EXECUTION

View File

@@ -603,6 +603,7 @@ impl super::Adapter {
},
);
private_caps.set(super::PrivateCapabilities::QUERY_BUFFERS, query_buffers);
private_caps.set(super::PrivateCapabilities::QUERY_64BIT, full_ver.is_some());
private_caps.set(
super::PrivateCapabilities::TEXTURE_STORAGE,
supported((3, 0), (4, 2)),

View File

@@ -1357,31 +1357,18 @@ impl crate::Device<super::Api> for super::Device {
desc: &wgt::QuerySetDescriptor<crate::Label>,
) -> Result<super::QuerySet, crate::DeviceError> {
let gl = &self.shared.context.lock();
let mut temp_string = String::new();
let mut queries = Vec::with_capacity(desc.count as usize);
for i in 0..desc.count {
for _ in 0..desc.count {
let query =
unsafe { gl.create_query() }.map_err(|_| crate::DeviceError::OutOfMemory)?;
#[cfg(not(target_arch = "wasm32"))]
if gl.supports_debug() {
use std::fmt::Write;
// Initialize the query so we can label it
match desc.ty {
wgt::QueryType::Timestamp => unsafe {
gl.query_counter(query, glow::TIMESTAMP)
},
_ => (),
}
// We aren't really able to, in general, label queries.
//
// We could take a timestamp here to "initialize" the query,
// but that's a bit of a hack, and we don't want to insert
// random timestamps into the command stream of we don't have to.
if let Some(label) = desc.label {
temp_string.clear();
let _ = write!(temp_string, "{label}[{i}]");
let name = unsafe { mem::transmute(query) };
unsafe { gl.object_label(glow::QUERY, name, Some(&temp_string)) };
}
}
queries.push(query);
}

View File

@@ -190,16 +190,18 @@ bitflags::bitflags! {
const TEXTURE_FLOAT_LINEAR = 1 << 10;
/// Supports query buffer objects.
const QUERY_BUFFERS = 1 << 11;
/// Supports 64 bit queries via `glGetQueryObjectui64v`
const QUERY_64BIT = 1 << 12;
/// Supports `glTexStorage2D`, etc.
const TEXTURE_STORAGE = 1 << 12;
const TEXTURE_STORAGE = 1 << 13;
/// Supports `push_debug_group`, `pop_debug_group` and `debug_message_insert`.
const DEBUG_FNS = 1 << 13;
const DEBUG_FNS = 1 << 14;
/// Supports framebuffer invalidation.
const INVALIDATE_FRAMEBUFFER = 1 << 14;
const INVALIDATE_FRAMEBUFFER = 1 << 15;
/// Indicates support for `glDrawElementsInstancedBaseVertexBaseInstance` and `ARB_shader_draw_parameters`
///
/// When this is true, instance offset emulation via vertex buffer rebinding and a shader uniform will be disabled.
const FULLY_FEATURED_INSTANCING = 1 << 15;
const FULLY_FEATURED_INSTANCING = 1 << 16;
}
}
@@ -1040,6 +1042,6 @@ fn gl_debug_message_callback(source: u32, gltype: u32, id: u32, severity: u32, m
if cfg!(debug_assertions) && log_severity == log::Level::Error {
// Set canary and continue
crate::VALIDATION_CANARY.set();
crate::VALIDATION_CANARY.add(message.to_string());
}
}

View File

@@ -940,12 +940,21 @@ impl super::Queue {
{
let mut result: u64 = 0;
unsafe {
let result: *mut u64 = &mut result;
gl.get_query_parameter_u64_with_offset(
query,
glow::QUERY_RESULT,
result as usize,
)
if self
.shared
.private_caps
.contains(PrivateCapabilities::QUERY_64BIT)
{
let result: *mut u64 = &mut result;
gl.get_query_parameter_u64_with_offset(
query,
glow::QUERY_RESULT,
result as usize,
)
} else {
result =
gl.get_query_parameter_u32(query, glow::QUERY_RESULT) as u64;
}
};
temp_query_results.push(result);
}

View File

@@ -90,10 +90,11 @@ use std::{
num::NonZeroU32,
ops::{Range, RangeInclusive},
ptr::NonNull,
sync::{atomic::AtomicBool, Arc},
sync::Arc,
};
use bitflags::bitflags;
use parking_lot::Mutex;
use thiserror::Error;
use wgt::WasmNotSendSync;
@@ -1384,8 +1385,11 @@ pub struct ComputePassDescriptor<'a, A: Api> {
pub timestamp_writes: Option<ComputePassTimestampWrites<'a, A>>,
}
/// Stores if any API validation error has occurred in this process
/// since it was last reset.
/// Stores the text of any validation errors that have occurred since
/// the last call to `get_and_reset`.
///
/// Each value is a validation error and a message associated with it,
/// or `None` if the error has no message from the api.
///
/// This is used for internal wgpu testing only and _must not_ be used
/// as a way to check for errors.
@@ -1396,24 +1400,24 @@ pub struct ComputePassDescriptor<'a, A: Api> {
/// This prevents the issue of one validation error terminating the
/// entire process.
pub static VALIDATION_CANARY: ValidationCanary = ValidationCanary {
inner: AtomicBool::new(false),
inner: Mutex::new(Vec::new()),
};
/// Flag for internal testing.
pub struct ValidationCanary {
inner: AtomicBool,
inner: Mutex<Vec<String>>,
}
impl ValidationCanary {
#[allow(dead_code)] // in some configurations this function is dead
fn set(&self) {
self.inner.store(true, std::sync::atomic::Ordering::SeqCst);
fn add(&self, msg: String) {
self.inner.lock().push(msg);
}
/// Returns true if any API validation error has occurred in this process
/// Returns any API validation errors that hav occurred in this process
/// since the last call to this function.
pub fn get_and_reset(&self) -> bool {
self.inner.swap(false, std::sync::atomic::Ordering::SeqCst)
pub fn get_and_reset(&self) -> Vec<String> {
self.inner.lock().drain(..).collect()
}
}

View File

@@ -146,7 +146,7 @@ unsafe extern "system" fn debug_utils_messenger_callback(
if cfg!(debug_assertions) && level == log::Level::Error {
// Set canary and continue
crate::VALIDATION_CANARY.set();
crate::VALIDATION_CANARY.add(message.to_string());
}
vk::FALSE