Fix wgpu-info's use of DXC (#7184)

This commit is contained in:
Connor Fitzgerald 2025-02-20 12:17:50 -05:00 committed by GitHub
parent 9c0fa2975f
commit 48e2298ed9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -3,7 +3,8 @@ use std::io;
use hashbrown::HashMap;
use serde::{Deserialize, Serialize};
use wgpu::{
AdapterInfo, DownlevelCapabilities, Features, Limits, TextureFormat, TextureFormatFeatures,
AdapterInfo, DownlevelCapabilities, Dx12Compiler, Features, Limits, TextureFormat,
TextureFormatFeatures,
};
use crate::texture;
@ -19,9 +20,10 @@ pub struct GpuReport {
impl GpuReport {
pub fn generate() -> Self {
let instance = wgpu::Instance::new(&{
let mut desc = wgpu::InstanceDescriptor::from_env_or_default();
desc.flags = wgpu::InstanceFlags::debugging().with_env();
desc
let mut desc = wgpu::InstanceDescriptor::default();
desc.backend_options.dx12.shader_compiler = Dx12Compiler::StaticDxc;
desc.flags = wgpu::InstanceFlags::debugging();
desc.with_env()
});
let adapters = instance.enumerate_adapters(wgpu::Backends::all());