2021-06-15 09:07:06 +00:00
|
|
|
//! This module contains common test-only code that needs to be shared between the examples and the tests.
|
2021-06-20 02:55:57 +00:00
|
|
|
#![allow(dead_code)] // This module is used in a lot of contexts and only parts of it will be used
|
2021-06-08 06:40:33 +00:00
|
|
|
|
2021-06-15 09:07:06 +00:00
|
|
|
use std::panic::{catch_unwind, AssertUnwindSafe};
|
2021-06-08 06:40:33 +00:00
|
|
|
|
2022-12-09 01:02:39 +00:00
|
|
|
use wgpu::{Adapter, Device, DownlevelFlags, Instance, Queue, Surface};
|
2021-06-30 18:43:36 +00:00
|
|
|
use wgt::{Backends, DeviceDescriptor, DownlevelCapabilities, Features, Limits};
|
2021-06-15 09:07:06 +00:00
|
|
|
|
2021-06-20 02:55:57 +00:00
|
|
|
pub mod image;
|
2022-12-15 00:09:04 +00:00
|
|
|
mod isolation;
|
2021-06-20 02:55:57 +00:00
|
|
|
|
2023-06-10 18:35:46 +00:00
|
|
|
pub use self::image::ComparisonType;
|
|
|
|
|
2022-12-09 01:02:39 +00:00
|
|
|
const CANVAS_ID: &str = "test-canvas";
|
|
|
|
|
2021-06-15 09:07:06 +00:00
|
|
|
async fn initialize_device(
|
|
|
|
adapter: &Adapter,
|
|
|
|
features: Features,
|
|
|
|
limits: Limits,
|
|
|
|
) -> (Device, Queue) {
|
|
|
|
let bundle = adapter
|
|
|
|
.request_device(
|
|
|
|
&DeviceDescriptor {
|
|
|
|
label: None,
|
|
|
|
features,
|
|
|
|
limits,
|
|
|
|
},
|
|
|
|
None,
|
|
|
|
)
|
|
|
|
.await;
|
2021-06-08 06:40:33 +00:00
|
|
|
|
|
|
|
match bundle {
|
|
|
|
Ok(b) => b,
|
2023-02-01 22:06:03 +00:00
|
|
|
Err(e) => panic!("Failed to initialize device: {e}"),
|
2021-06-08 06:40:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-16 18:18:41 +00:00
|
|
|
pub struct TestingContext {
|
|
|
|
pub adapter: Adapter,
|
2021-06-08 06:40:33 +00:00
|
|
|
pub adapter_info: wgt::AdapterInfo,
|
2022-02-23 20:58:40 +00:00
|
|
|
pub adapter_downlevel_capabilities: wgt::DownlevelCapabilities,
|
2021-06-16 18:18:41 +00:00
|
|
|
pub device: Device,
|
2022-02-23 20:58:40 +00:00
|
|
|
pub device_features: wgt::Features,
|
|
|
|
pub device_limits: wgt::Limits,
|
2021-06-16 18:18:41 +00:00
|
|
|
pub queue: Queue,
|
2021-06-08 06:40:33 +00:00
|
|
|
}
|
|
|
|
|
2021-06-22 02:34:00 +00:00
|
|
|
fn lowest_downlevel_properties() -> DownlevelCapabilities {
|
|
|
|
DownlevelCapabilities {
|
2021-06-08 06:40:33 +00:00
|
|
|
flags: wgt::DownlevelFlags::empty(),
|
2021-06-30 03:40:14 +00:00
|
|
|
limits: wgt::DownlevelLimits {},
|
2021-06-08 06:40:33 +00:00
|
|
|
shader_model: wgt::ShaderModel::Sm2,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
/// Conditions under which a test should fail or be skipped.
|
|
|
|
///
|
|
|
|
/// By passing a `FailureCase` to [`TestParameters::expect_fail`], you can
|
|
|
|
/// mark a test as expected to fail under the indicated conditions. By
|
|
|
|
/// passing it to [`TestParameters::skip`], you can request that the
|
|
|
|
/// test be skipped altogether.
|
|
|
|
///
|
|
|
|
/// If a field is `None`, then that field does not restrict matches. For
|
|
|
|
/// example:
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
/// # use wgpu_test::FailureCase;
|
|
|
|
/// FailureCase {
|
|
|
|
/// backends: Some(wgpu::Backends::DX11 | wgpu::Backends::DX12),
|
|
|
|
/// vendor: None,
|
|
|
|
/// adapter: Some("RTX"),
|
|
|
|
/// driver: None,
|
|
|
|
/// }
|
|
|
|
/// # ;
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// This applies to all cards with `"RTX'` in their name on either
|
|
|
|
/// Direct3D backend, no matter the vendor ID or driver name.
|
|
|
|
///
|
|
|
|
/// The strings given here need only appear as a substring in the
|
|
|
|
/// corresponding [`AdapterInfo`] fields. The comparison is
|
|
|
|
/// case-insensitive.
|
|
|
|
///
|
|
|
|
/// The default value of `FailureCase` applies to any test case. That
|
|
|
|
/// is, there are no criteria to constrain the match.
|
|
|
|
///
|
|
|
|
/// [`AdapterInfo`]: wgt::AdapterInfo
|
|
|
|
#[derive(Default)]
|
2021-07-04 02:08:05 +00:00
|
|
|
pub struct FailureCase {
|
2023-09-04 03:54:39 +00:00
|
|
|
/// Backends expected to fail, or `None` for any backend.
|
|
|
|
///
|
|
|
|
/// If this is `None`, or if the test is using one of the backends
|
|
|
|
/// in `backends`, then this `FailureCase` applies.
|
|
|
|
pub backends: Option<wgpu::Backends>,
|
|
|
|
|
|
|
|
/// Vendor expected to fail, or `None` for any vendor.
|
|
|
|
///
|
|
|
|
/// If `Some`, this must match [`AdapterInfo::device`], which is
|
|
|
|
/// usually the PCI device id. Otherwise, this `FailureCase`
|
|
|
|
/// applies regardless of vendor.
|
|
|
|
///
|
|
|
|
/// [`AdapterInfo::device`]: wgt::AdapterInfo::device
|
|
|
|
pub vendor: Option<u32>,
|
|
|
|
|
|
|
|
/// Name of adaper expected to fail, or `None` for any adapter name.
|
|
|
|
///
|
|
|
|
/// If this is `Some(s)` and `s` is a substring of
|
|
|
|
/// [`AdapterInfo::name`], then this `FailureCase` applies. If
|
|
|
|
/// this is `None`, the adapter name isn't considered.
|
|
|
|
///
|
|
|
|
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
|
|
|
|
pub adapter: Option<&'static str>,
|
|
|
|
|
|
|
|
/// Name of driver expected to fail, or `None` for any driver name.
|
|
|
|
///
|
|
|
|
/// If this is `Some(s)` and `s` is a substring of
|
|
|
|
/// [`AdapterInfo::driver`], then this `FailureCase` applies. If
|
|
|
|
/// this is `None`, the driver name isn't considered.
|
|
|
|
///
|
|
|
|
/// [`AdapterInfo::driver`]: wgt::AdapterInfo::driver
|
|
|
|
pub driver: Option<&'static str>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FailureCase {
|
|
|
|
/// This case applies to all tests.
|
|
|
|
pub fn always() -> Self {
|
|
|
|
FailureCase::default()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// This case applies to no tests.
|
|
|
|
pub fn never() -> Self {
|
|
|
|
FailureCase {
|
|
|
|
backends: Some(wgpu::Backends::empty()),
|
|
|
|
..FailureCase::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Tests running on any of the given backends.
|
|
|
|
pub fn backend(backends: wgpu::Backends) -> Self {
|
|
|
|
FailureCase {
|
|
|
|
backends: Some(backends),
|
|
|
|
..FailureCase::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Tests running on `adapter`.
|
|
|
|
///
|
|
|
|
/// For this case to apply, the `adapter` string must appear as a substring
|
|
|
|
/// of the adapter's [`AdapterInfo::name`]. The comparison is
|
|
|
|
/// case-insensitive.
|
|
|
|
///
|
|
|
|
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
|
|
|
|
pub fn adapter(adapter: &'static str) -> Self {
|
|
|
|
FailureCase {
|
|
|
|
adapter: Some(adapter),
|
|
|
|
..FailureCase::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Tests running on `backend` and `adapter`.
|
|
|
|
///
|
|
|
|
/// For this case to apply, the test must be using an adapter for one of the
|
|
|
|
/// given `backend` bits, and `adapter` string must appear as a substring of
|
|
|
|
/// the adapter's [`AdapterInfo::name`]. The string comparison is
|
|
|
|
/// case-insensitive.
|
|
|
|
///
|
|
|
|
/// [`AdapterInfo::name`]: wgt::AdapterInfo::name
|
|
|
|
pub fn backend_adapter(backends: wgpu::Backends, adapter: &'static str) -> Self {
|
|
|
|
FailureCase {
|
|
|
|
backends: Some(backends),
|
|
|
|
adapter: Some(adapter),
|
|
|
|
..FailureCase::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Tests running under WebGL.
|
|
|
|
///
|
|
|
|
/// Because of wasm's limited ability to recover from errors, we
|
|
|
|
/// usually need to skip the test altogether if it's not
|
|
|
|
/// supported, so this should be usually used with
|
|
|
|
/// [`TestParameters::skip`].
|
|
|
|
pub fn webgl2() -> Self {
|
|
|
|
#[cfg(target_arch = "wasm32")]
|
|
|
|
let case = FailureCase::backend(wgpu::Backends::GL);
|
|
|
|
#[cfg(not(target_arch = "wasm32"))]
|
|
|
|
let case = FailureCase::never();
|
|
|
|
case
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Tests running on the MoltenVK Vulkan driver on macOS.
|
|
|
|
pub fn molten_vk() -> Self {
|
|
|
|
FailureCase {
|
|
|
|
backends: Some(wgpu::Backends::VULKAN),
|
|
|
|
driver: Some("MoltenVK"),
|
|
|
|
..FailureCase::default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Test whether `self` applies to `info`.
|
|
|
|
///
|
|
|
|
/// If it does, return a `FailureReasons` whose set bits indicate
|
|
|
|
/// why. If it doesn't, return `None`.
|
|
|
|
///
|
|
|
|
/// The caller is responsible for converting the string-valued
|
|
|
|
/// fields of `info` to lower case, to ensure case-insensitive
|
|
|
|
/// matching.
|
|
|
|
fn applies_to(&self, info: &wgt::AdapterInfo) -> Option<FailureReasons> {
|
|
|
|
let mut reasons = FailureReasons::empty();
|
|
|
|
|
|
|
|
if let Some(backends) = self.backends {
|
|
|
|
if !backends.contains(wgpu::Backends::from(info.backend)) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
reasons.set(FailureReasons::BACKEND, true);
|
|
|
|
}
|
|
|
|
if let Some(vendor) = self.vendor {
|
|
|
|
if vendor != info.vendor {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
reasons.set(FailureReasons::VENDOR, true);
|
|
|
|
}
|
|
|
|
if let Some(adapter) = self.adapter {
|
|
|
|
let adapter = adapter.to_lowercase();
|
|
|
|
if !info.name.contains(&adapter) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
reasons.set(FailureReasons::ADAPTER, true);
|
|
|
|
}
|
|
|
|
if let Some(driver) = self.driver {
|
|
|
|
let driver = driver.to_lowercase();
|
|
|
|
if !info.driver.contains(&driver) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
reasons.set(FailureReasons::DRIVER, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we got this far but no specific reasons were triggered, then this
|
|
|
|
// must be a wildcard.
|
|
|
|
if reasons.is_empty() {
|
|
|
|
Some(FailureReasons::ALWAYS)
|
|
|
|
} else {
|
|
|
|
Some(reasons)
|
|
|
|
}
|
|
|
|
}
|
2021-07-04 02:08:05 +00:00
|
|
|
}
|
|
|
|
|
2021-06-08 06:40:33 +00:00
|
|
|
// This information determines if a test should run.
|
|
|
|
pub struct TestParameters {
|
2021-06-15 09:07:06 +00:00
|
|
|
pub required_features: Features,
|
2021-06-22 02:34:00 +00:00
|
|
|
pub required_downlevel_properties: DownlevelCapabilities,
|
2022-02-23 21:32:24 +00:00
|
|
|
pub required_limits: Limits,
|
2023-09-04 03:54:39 +00:00
|
|
|
|
|
|
|
/// Conditions under which this test should be skipped.
|
|
|
|
pub skips: Vec<FailureCase>,
|
|
|
|
|
|
|
|
/// Conditions under which this test should be run, but is expected to fail.
|
2021-07-04 02:08:05 +00:00
|
|
|
pub failures: Vec<FailureCase>,
|
2021-06-08 06:40:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for TestParameters {
|
|
|
|
fn default() -> Self {
|
|
|
|
Self {
|
2021-06-15 09:07:06 +00:00
|
|
|
required_features: Features::empty(),
|
2021-06-08 06:40:33 +00:00
|
|
|
required_downlevel_properties: lowest_downlevel_properties(),
|
2022-02-23 21:32:24 +00:00
|
|
|
required_limits: Limits::downlevel_webgl2_defaults(),
|
2023-09-04 03:54:39 +00:00
|
|
|
skips: Vec::new(),
|
2021-06-22 02:34:00 +00:00
|
|
|
failures: Vec::new(),
|
2021-06-08 06:40:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-22 02:34:00 +00:00
|
|
|
bitflags::bitflags! {
|
2023-04-05 22:24:59 +00:00
|
|
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
2021-06-30 18:43:36 +00:00
|
|
|
pub struct FailureReasons: u8 {
|
2021-07-27 21:05:15 +00:00
|
|
|
const BACKEND = 1 << 0;
|
|
|
|
const VENDOR = 1 << 1;
|
|
|
|
const ADAPTER = 1 << 2;
|
2023-09-04 03:54:39 +00:00
|
|
|
const DRIVER = 1 << 3;
|
|
|
|
const ALWAYS = 1 << 4;
|
2021-06-22 02:34:00 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-08 06:40:33 +00:00
|
|
|
// Builder pattern to make it easier
|
|
|
|
impl TestParameters {
|
2022-02-23 21:32:24 +00:00
|
|
|
/// Set of common features that most internal tests require for readback.
|
|
|
|
pub fn test_features_limits(self) -> Self {
|
2021-06-19 21:58:29 +00:00
|
|
|
self.features(Features::MAPPABLE_PRIMARY_BUFFERS | Features::VERTEX_WRITABLE_STORAGE)
|
2022-02-23 21:32:24 +00:00
|
|
|
.limits(wgpu::Limits::downlevel_defaults())
|
2021-06-19 21:58:29 +00:00
|
|
|
}
|
|
|
|
|
2021-06-22 02:34:00 +00:00
|
|
|
/// Set the list of features this test requires.
|
2021-06-15 09:07:06 +00:00
|
|
|
pub fn features(mut self, features: Features) -> Self {
|
2021-06-16 18:18:41 +00:00
|
|
|
self.required_features |= features;
|
2021-06-08 06:40:33 +00:00
|
|
|
self
|
|
|
|
}
|
2021-06-15 09:07:06 +00:00
|
|
|
|
2021-07-02 03:20:20 +00:00
|
|
|
pub fn downlevel_flags(mut self, downlevel_flags: DownlevelFlags) -> Self {
|
|
|
|
self.required_downlevel_properties.flags |= downlevel_flags;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:32:24 +00:00
|
|
|
/// Set the limits needed for the test.
|
|
|
|
pub fn limits(mut self, limits: Limits) -> Self {
|
|
|
|
self.required_limits = limits;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
/// Mark the test as always failing, but not to be skipped.
|
|
|
|
pub fn expect_fail(mut self, when: FailureCase) -> Self {
|
|
|
|
self.failures.push(when);
|
2022-06-05 21:22:25 +00:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
/// Mark the test as always failing, and needing to be skipped.
|
|
|
|
pub fn skip(mut self, when: FailureCase) -> Self {
|
|
|
|
self.skips.push(when);
|
2021-06-22 02:34:00 +00:00
|
|
|
self
|
|
|
|
}
|
2021-06-08 06:40:33 +00:00
|
|
|
}
|
2022-12-15 00:09:04 +00:00
|
|
|
|
2021-06-19 21:58:29 +00:00
|
|
|
pub fn initialize_test(parameters: TestParameters, test_function: impl FnOnce(TestingContext)) {
|
|
|
|
// We don't actually care if it fails
|
2022-12-09 01:02:39 +00:00
|
|
|
#[cfg(not(target_arch = "wasm32"))]
|
2021-06-19 21:58:29 +00:00
|
|
|
let _ = env_logger::try_init();
|
2022-12-09 01:02:39 +00:00
|
|
|
#[cfg(target_arch = "wasm32")]
|
|
|
|
let _ = console_log::init_with_level(log::Level::Info);
|
2021-06-16 18:18:41 +00:00
|
|
|
|
2022-12-15 00:09:04 +00:00
|
|
|
let _test_guard = isolation::OneTestPerProcessGuard::new();
|
|
|
|
|
2022-12-20 20:51:17 +00:00
|
|
|
let (adapter, _surface_guard) = initialize_adapter();
|
2021-06-08 06:40:33 +00:00
|
|
|
|
|
|
|
let adapter_info = adapter.get_info();
|
2023-09-04 03:54:39 +00:00
|
|
|
|
|
|
|
// Produce a lower-case version of the adapter info, for comparison against
|
|
|
|
// `parameters.skips` and `parameters.failures`.
|
|
|
|
let adapter_lowercase_info = wgt::AdapterInfo {
|
|
|
|
name: adapter_info.name.to_lowercase(),
|
|
|
|
driver: adapter_info.driver.to_lowercase(),
|
|
|
|
..adapter_info.clone()
|
|
|
|
};
|
|
|
|
|
2021-06-08 06:40:33 +00:00
|
|
|
let adapter_features = adapter.features();
|
|
|
|
let adapter_limits = adapter.limits();
|
2022-02-23 20:56:34 +00:00
|
|
|
let adapter_downlevel_capabilities = adapter.get_downlevel_capabilities();
|
2021-06-08 06:40:33 +00:00
|
|
|
|
|
|
|
let missing_features = parameters.required_features - adapter_features;
|
|
|
|
if !missing_features.is_empty() {
|
2022-12-09 01:02:39 +00:00
|
|
|
log::info!("TEST SKIPPED: MISSING FEATURES {:?}", missing_features);
|
2021-06-08 06:40:33 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-02-23 21:32:24 +00:00
|
|
|
if !parameters.required_limits.check_limits(&adapter_limits) {
|
2022-12-09 01:02:39 +00:00
|
|
|
log::info!("TEST SKIPPED: LIMIT TOO LOW");
|
2021-06-08 06:40:33 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let missing_downlevel_flags =
|
2022-02-23 20:56:34 +00:00
|
|
|
parameters.required_downlevel_properties.flags - adapter_downlevel_capabilities.flags;
|
2021-06-08 06:40:33 +00:00
|
|
|
if !missing_downlevel_flags.is_empty() {
|
2022-12-09 01:02:39 +00:00
|
|
|
log::info!(
|
2021-06-08 06:40:33 +00:00
|
|
|
"TEST SKIPPED: MISSING DOWNLEVEL FLAGS {:?}",
|
|
|
|
missing_downlevel_flags
|
|
|
|
);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2022-02-23 20:56:34 +00:00
|
|
|
if adapter_downlevel_capabilities.shader_model
|
2021-06-08 06:40:33 +00:00
|
|
|
< parameters.required_downlevel_properties.shader_model
|
|
|
|
{
|
2022-12-09 01:02:39 +00:00
|
|
|
log::info!(
|
2021-06-08 06:40:33 +00:00
|
|
|
"TEST SKIPPED: LOW SHADER MODEL {:?}",
|
2022-02-23 20:56:34 +00:00
|
|
|
adapter_downlevel_capabilities.shader_model
|
2021-06-08 06:40:33 +00:00
|
|
|
);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-06-16 18:18:41 +00:00
|
|
|
let (device, queue) = pollster::block_on(initialize_device(
|
2021-06-08 06:40:33 +00:00
|
|
|
&adapter,
|
|
|
|
parameters.required_features,
|
2022-02-23 21:32:24 +00:00
|
|
|
parameters.required_limits.clone(),
|
2021-06-16 18:18:41 +00:00
|
|
|
));
|
2021-06-08 06:40:33 +00:00
|
|
|
|
2021-06-16 18:18:41 +00:00
|
|
|
let context = TestingContext {
|
|
|
|
adapter,
|
2023-09-04 03:54:39 +00:00
|
|
|
adapter_info,
|
2022-02-23 20:58:40 +00:00
|
|
|
adapter_downlevel_capabilities,
|
2021-06-16 18:18:41 +00:00
|
|
|
device,
|
2022-02-23 20:58:40 +00:00
|
|
|
device_features: parameters.required_features,
|
2022-02-23 21:32:24 +00:00
|
|
|
device_limits: parameters.required_limits,
|
2021-06-16 18:18:41 +00:00
|
|
|
queue,
|
2021-06-08 06:40:33 +00:00
|
|
|
};
|
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
// Check if we should skip the test altogether.
|
|
|
|
if let Some(skip_reason) = parameters
|
|
|
|
.skips
|
|
|
|
.iter()
|
|
|
|
.find_map(|case| case.applies_to(&adapter_lowercase_info))
|
|
|
|
{
|
|
|
|
log::info!("EXPECTED TEST FAILURE SKIPPED: {:?}", skip_reason);
|
2021-06-24 00:47:35 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
// Determine if we expect this test to fail, and if so, why.
|
|
|
|
let expected_failure_reason = parameters
|
|
|
|
.failures
|
|
|
|
.iter()
|
|
|
|
.find_map(|case| case.applies_to(&adapter_lowercase_info));
|
|
|
|
|
|
|
|
// Run the test, and catch panics (possibly due to failed assertions).
|
2021-06-24 00:47:35 +00:00
|
|
|
let panicked = catch_unwind(AssertUnwindSafe(|| test_function(context))).is_err();
|
2023-09-04 03:54:39 +00:00
|
|
|
|
|
|
|
// Check whether any validation errors were reported during the test run.
|
2022-10-15 02:55:36 +00:00
|
|
|
cfg_if::cfg_if!(
|
|
|
|
if #[cfg(any(not(target_arch = "wasm32"), target_os = "emscripten"))] {
|
2023-06-10 18:35:46 +00:00
|
|
|
let canary_set = wgpu::hal::VALIDATION_CANARY.get_and_reset();
|
2022-10-15 02:55:36 +00:00
|
|
|
} else {
|
2023-09-01 05:48:31 +00:00
|
|
|
let canary_set = _surface_guard.unwrap().check_for_unreported_errors();
|
2022-10-15 02:55:36 +00:00
|
|
|
}
|
|
|
|
);
|
2021-06-24 00:47:35 +00:00
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
// Summarize reasons for actual failure, if any.
|
2022-02-24 21:20:53 +00:00
|
|
|
let failure_cause = match (panicked, canary_set) {
|
2023-09-04 03:54:39 +00:00
|
|
|
(true, true) => Some("PANIC AND VALIDATION ERROR"),
|
|
|
|
(true, false) => Some("PANIC"),
|
|
|
|
(false, true) => Some("VALIDATION ERROR"),
|
|
|
|
(false, false) => None,
|
2022-02-24 21:20:53 +00:00
|
|
|
};
|
|
|
|
|
2023-09-04 03:54:39 +00:00
|
|
|
// Compare actual results against expectations.
|
|
|
|
match (failure_cause, expected_failure_reason) {
|
|
|
|
// The test passed, as expected.
|
|
|
|
(None, None) => {}
|
|
|
|
// The test failed unexpectedly.
|
|
|
|
(Some(cause), None) => {
|
|
|
|
panic!("UNEXPECTED TEST FAILURE DUE TO {cause}")
|
|
|
|
}
|
|
|
|
// The test passed unexpectedly.
|
|
|
|
(None, Some(reason)) => {
|
|
|
|
panic!("UNEXPECTED TEST PASS: {reason:?}");
|
|
|
|
}
|
|
|
|
// The test failed, as expected.
|
|
|
|
(Some(cause), Some(reason_expected)) => {
|
2022-12-09 01:02:39 +00:00
|
|
|
log::info!(
|
2023-09-04 03:54:39 +00:00
|
|
|
"EXPECTED FAILURE DUE TO {} (expected because of {:?})",
|
|
|
|
cause,
|
|
|
|
reason_expected
|
2022-02-24 21:20:53 +00:00
|
|
|
);
|
2021-06-08 06:40:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-10-11 17:40:30 +00:00
|
|
|
|
2023-09-16 04:16:49 +00:00
|
|
|
pub fn initialize_adapter() -> (Adapter, Option<SurfaceGuard>) {
|
2023-09-01 05:48:31 +00:00
|
|
|
let instance = initialize_instance();
|
|
|
|
let surface_guard: Option<SurfaceGuard>;
|
2022-12-09 01:02:39 +00:00
|
|
|
let compatible_surface;
|
|
|
|
|
2023-09-01 05:48:31 +00:00
|
|
|
// Create a canvas iff we need a WebGL2RenderingContext to have a working device.
|
2023-02-15 21:46:00 +00:00
|
|
|
#[cfg(not(all(
|
|
|
|
target_arch = "wasm32",
|
|
|
|
any(target_os = "emscripten", feature = "webgl")
|
|
|
|
)))]
|
2022-12-09 01:02:39 +00:00
|
|
|
{
|
2023-09-01 05:48:31 +00:00
|
|
|
surface_guard = None;
|
2022-12-09 01:02:39 +00:00
|
|
|
compatible_surface = None;
|
|
|
|
}
|
2023-02-15 21:46:00 +00:00
|
|
|
#[cfg(all(
|
|
|
|
target_arch = "wasm32",
|
|
|
|
any(target_os = "emscripten", feature = "webgl")
|
|
|
|
))]
|
2022-12-09 01:02:39 +00:00
|
|
|
{
|
|
|
|
// On wasm, append a canvas to the document body for initializing the adapter
|
|
|
|
let canvas = create_html_canvas();
|
|
|
|
|
2023-06-10 18:35:46 +00:00
|
|
|
// We use raw_window_handle here, as create_surface_from_canvas is not implemented on emscripten.
|
|
|
|
struct WindowHandle;
|
|
|
|
unsafe impl raw_window_handle::HasRawWindowHandle for WindowHandle {
|
|
|
|
fn raw_window_handle(&self) -> raw_window_handle::RawWindowHandle {
|
|
|
|
raw_window_handle::RawWindowHandle::Web({
|
|
|
|
let mut handle = raw_window_handle::WebWindowHandle::empty();
|
|
|
|
handle.id = 1;
|
|
|
|
handle
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
unsafe impl raw_window_handle::HasRawDisplayHandle for WindowHandle {
|
|
|
|
fn raw_display_handle(&self) -> raw_window_handle::RawDisplayHandle {
|
|
|
|
raw_window_handle::RawDisplayHandle::Web(
|
|
|
|
raw_window_handle::WebDisplayHandle::empty(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let surface = unsafe {
|
|
|
|
instance
|
|
|
|
.create_surface(&WindowHandle)
|
|
|
|
.expect("could not create surface from canvas")
|
|
|
|
};
|
2022-12-09 01:02:39 +00:00
|
|
|
|
2023-09-01 05:48:31 +00:00
|
|
|
surface_guard = Some(SurfaceGuard { canvas });
|
2022-12-20 20:51:17 +00:00
|
|
|
|
2022-12-09 01:02:39 +00:00
|
|
|
compatible_surface = Some(surface);
|
|
|
|
}
|
|
|
|
|
|
|
|
let compatible_surface: Option<&Surface> = compatible_surface.as_ref();
|
|
|
|
let adapter = pollster::block_on(wgpu::util::initialize_adapter_from_env_or_default(
|
|
|
|
&instance,
|
|
|
|
compatible_surface,
|
|
|
|
))
|
|
|
|
.expect("could not find suitable adapter on the system");
|
|
|
|
|
2022-12-20 20:51:17 +00:00
|
|
|
(adapter, surface_guard)
|
2022-12-09 01:02:39 +00:00
|
|
|
}
|
|
|
|
|
2023-09-01 05:48:31 +00:00
|
|
|
pub fn initialize_instance() -> Instance {
|
|
|
|
let backends = wgpu::util::backend_bits_from_env().unwrap_or_else(Backends::all);
|
|
|
|
let dx12_shader_compiler = wgpu::util::dx12_shader_compiler_from_env().unwrap_or_default();
|
|
|
|
let gles_minor_version = wgpu::util::gles_minor_version_from_env().unwrap_or_default();
|
|
|
|
Instance::new(wgpu::InstanceDescriptor {
|
|
|
|
backends,
|
2023-10-11 17:39:04 +00:00
|
|
|
flags: wgpu::InstanceFlags::debugging().with_env(),
|
2023-09-01 05:48:31 +00:00
|
|
|
dx12_shader_compiler,
|
|
|
|
gles_minor_version,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Public because it is used by tests of interacting with canvas
|
|
|
|
pub struct SurfaceGuard {
|
|
|
|
#[cfg(target_arch = "wasm32")]
|
|
|
|
pub canvas: web_sys::HtmlCanvasElement,
|
2022-12-20 20:51:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl SurfaceGuard {
|
|
|
|
fn check_for_unreported_errors(&self) -> bool {
|
|
|
|
cfg_if::cfg_if! {
|
2023-02-15 21:46:00 +00:00
|
|
|
if #[cfg(all(target_arch = "wasm32", any(target_os = "emscripten", feature = "webgl")))] {
|
2022-12-20 20:51:17 +00:00
|
|
|
use wasm_bindgen::JsCast;
|
|
|
|
|
|
|
|
self.canvas
|
|
|
|
.get_context("webgl2")
|
|
|
|
.unwrap()
|
|
|
|
.unwrap()
|
|
|
|
.dyn_into::<web_sys::WebGl2RenderingContext>()
|
|
|
|
.unwrap()
|
|
|
|
.get_error()
|
|
|
|
!= web_sys::WebGl2RenderingContext::NO_ERROR
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-12-09 01:02:39 +00:00
|
|
|
|
2023-02-15 21:46:00 +00:00
|
|
|
#[cfg(all(
|
|
|
|
target_arch = "wasm32",
|
|
|
|
any(target_os = "emscripten", feature = "webgl")
|
|
|
|
))]
|
2022-12-09 01:02:39 +00:00
|
|
|
impl Drop for SurfaceGuard {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
delete_html_canvas();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-01 05:48:31 +00:00
|
|
|
#[cfg(target_arch = "wasm32")]
|
|
|
|
pub fn create_html_canvas() -> web_sys::HtmlCanvasElement {
|
2022-12-09 01:02:39 +00:00
|
|
|
use wasm_bindgen::JsCast;
|
|
|
|
|
|
|
|
web_sys::window()
|
|
|
|
.and_then(|win| win.document())
|
|
|
|
.and_then(|doc| {
|
|
|
|
let body = doc.body().unwrap();
|
|
|
|
let canvas = doc.create_element("Canvas").unwrap();
|
2023-06-10 18:35:46 +00:00
|
|
|
canvas.set_attribute("data-raw-handle", "1").unwrap();
|
2022-12-09 01:02:39 +00:00
|
|
|
canvas.set_id(CANVAS_ID);
|
|
|
|
body.append_child(&canvas).unwrap();
|
|
|
|
canvas.dyn_into::<web_sys::HtmlCanvasElement>().ok()
|
|
|
|
})
|
|
|
|
.expect("couldn't append canvas to document body")
|
|
|
|
}
|
|
|
|
|
2023-02-15 21:46:00 +00:00
|
|
|
#[cfg(all(
|
|
|
|
target_arch = "wasm32",
|
|
|
|
any(target_os = "emscripten", feature = "webgl")
|
|
|
|
))]
|
2022-12-09 01:02:39 +00:00
|
|
|
fn delete_html_canvas() {
|
|
|
|
if let Some(document) = web_sys::window().and_then(|win| win.document()) {
|
|
|
|
if let Some(element) = document.get_element_by_id(CANVAS_ID) {
|
|
|
|
element.remove();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-10-11 17:40:30 +00:00
|
|
|
// Run some code in an error scope and assert that validation fails.
|
|
|
|
pub fn fail<T>(device: &wgpu::Device, callback: impl FnOnce() -> T) -> T {
|
|
|
|
device.push_error_scope(wgpu::ErrorFilter::Validation);
|
|
|
|
let result = callback();
|
|
|
|
assert!(pollster::block_on(device.pop_error_scope()).is_some());
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
|
|
|
// Run some code in an error scope and assert that validation succeeds.
|
|
|
|
pub fn valid<T>(device: &wgpu::Device, callback: impl FnOnce() -> T) -> T {
|
|
|
|
device.push_error_scope(wgpu::ErrorFilter::Validation);
|
|
|
|
let result = callback();
|
|
|
|
assert!(pollster::block_on(device.pop_error_scope()).is_none());
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
2022-11-02 05:43:44 +00:00
|
|
|
|
|
|
|
// Run some code in an error scope and assert that validation succeeds or fails depending on the
|
|
|
|
// provided `should_fail` boolean.
|
|
|
|
pub fn fail_if<T>(device: &wgpu::Device, should_fail: bool, callback: impl FnOnce() -> T) -> T {
|
|
|
|
if should_fail {
|
|
|
|
fail(device, callback)
|
|
|
|
} else {
|
|
|
|
valid(device, callback)
|
|
|
|
}
|
|
|
|
}
|