invalidate the device when we encounter driver-induced device loss or on unexpected errors

This commit is contained in:
teoxoy 2024-09-06 15:51:26 +02:00 committed by Teodor Tanasoaia
parent ce6a46ee0c
commit eb47449eb9
14 changed files with 176 additions and 186 deletions

View File

@ -163,7 +163,7 @@ impl Global {
// actual hal barrier & operation
let dst_barrier = dst_pending.map(|pending| pending.into_hal(&dst_buffer, &snatch_guard));
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
let cmd_buf_raw = cmd_buf_data.encoder.open(&cmd_buf.device)?;
unsafe {
cmd_buf_raw.transition_buffers(dst_barrier.as_slice());
cmd_buf_raw.clear_buffer(dst_raw, offset..end_offset);
@ -249,7 +249,7 @@ impl Global {
let device = &cmd_buf.device;
device.check_is_valid()?;
let (encoder, tracker) = cmd_buf_data.open_encoder_and_tracker()?;
let (encoder, tracker) = cmd_buf_data.open_encoder_and_tracker(&cmd_buf.device)?;
let snatch_guard = device.snatchable_lock.read();
clear_texture(

View File

@ -433,10 +433,10 @@ impl Global {
// We automatically keep extending command buffers over time, and because
// we want to insert a command buffer _before_ what we're about to record,
// we need to make sure to close the previous one.
encoder.close().map_pass_err(pass_scope)?;
encoder.close(&cmd_buf.device).map_pass_err(pass_scope)?;
// will be reset to true if recording is done without errors
*status = CommandEncoderStatus::Error;
let raw_encoder = encoder.open().map_pass_err(pass_scope)?;
let raw_encoder = encoder.open(&cmd_buf.device).map_pass_err(pass_scope)?;
let mut state = State {
binder: Binder::new(),
@ -617,12 +617,12 @@ impl Global {
} = state;
// Stop the current command buffer.
encoder.close().map_pass_err(pass_scope)?;
encoder.close(&cmd_buf.device).map_pass_err(pass_scope)?;
// Create a new command buffer, which we will insert _before_ the body of the compute pass.
//
// Use that buffer to insert barriers and clear discarded images.
let transit = encoder.open().map_pass_err(pass_scope)?;
let transit = encoder.open(&cmd_buf.device).map_pass_err(pass_scope)?;
fixup_discarded_surfaces(
pending_discard_init_fixups.into_iter(),
transit,
@ -637,7 +637,9 @@ impl Global {
&snatch_guard,
);
// Close the command buffer, and swap it with the previous.
encoder.close_and_swap().map_pass_err(pass_scope)?;
encoder
.close_and_swap(&cmd_buf.device)
.map_pass_err(pass_scope)?;
Ok(())
}

View File

@ -172,10 +172,10 @@ impl CommandEncoder {
/// [l]: CommandEncoder::list
/// [`transition_buffers`]: hal::CommandEncoder::transition_buffers
/// [`transition_textures`]: hal::CommandEncoder::transition_textures
fn close_and_swap(&mut self) -> Result<(), DeviceError> {
fn close_and_swap(&mut self, device: &Device) -> Result<(), DeviceError> {
if self.is_open {
self.is_open = false;
let new = unsafe { self.raw.end_encoding()? };
let new = unsafe { self.raw.end_encoding() }.map_err(|e| device.handle_hal_error(e))?;
self.list.insert(self.list.len() - 1, new);
}
@ -192,10 +192,11 @@ impl CommandEncoder {
/// On return, the underlying hal encoder is closed.
///
/// [l]: CommandEncoder::list
fn close(&mut self) -> Result<(), DeviceError> {
fn close(&mut self, device: &Device) -> Result<(), DeviceError> {
if self.is_open {
self.is_open = false;
let cmd_buf = unsafe { self.raw.end_encoding()? };
let cmd_buf =
unsafe { self.raw.end_encoding() }.map_err(|e| device.handle_hal_error(e))?;
self.list.push(cmd_buf);
}
@ -215,11 +216,15 @@ impl CommandEncoder {
/// Begin recording a new command buffer, if we haven't already.
///
/// The underlying hal encoder is put in the "recording" state.
pub(crate) fn open(&mut self) -> Result<&mut dyn hal::DynCommandEncoder, DeviceError> {
pub(crate) fn open(
&mut self,
device: &Device,
) -> Result<&mut dyn hal::DynCommandEncoder, DeviceError> {
if !self.is_open {
self.is_open = true;
let hal_label = self.hal_label.as_deref();
unsafe { self.raw.begin_encoding(hal_label)? };
unsafe { self.raw.begin_encoding(hal_label) }
.map_err(|e| device.handle_hal_error(e))?;
}
Ok(self.raw.as_mut())
@ -229,9 +234,9 @@ impl CommandEncoder {
/// its own label.
///
/// The underlying hal encoder is put in the "recording" state.
fn open_pass(&mut self, hal_label: Option<&str>) -> Result<(), DeviceError> {
fn open_pass(&mut self, hal_label: Option<&str>, device: &Device) -> Result<(), DeviceError> {
self.is_open = true;
unsafe { self.raw.begin_encoding(hal_label)? };
unsafe { self.raw.begin_encoding(hal_label) }.map_err(|e| device.handle_hal_error(e))?;
Ok(())
}
@ -276,8 +281,9 @@ pub struct CommandBufferMutable {
impl CommandBufferMutable {
pub(crate) fn open_encoder_and_tracker(
&mut self,
device: &Device,
) -> Result<(&mut dyn hal::DynCommandEncoder, &mut Tracker), DeviceError> {
let encoder = self.encoder.open()?;
let encoder = self.encoder.open(device)?;
let tracker = &mut self.trackers;
Ok((encoder, tracker))
@ -621,7 +627,7 @@ impl Global {
let cmd_buf_data = cmd_buf_data.as_mut().unwrap();
match cmd_buf_data.status {
CommandEncoderStatus::Recording => {
if let Err(e) = cmd_buf_data.encoder.close() {
if let Err(e) = cmd_buf_data.encoder.close(&cmd_buf.device) {
Some(e.into())
} else {
cmd_buf_data.status = CommandEncoderStatus::Finished;
@ -671,7 +677,7 @@ impl Global {
list.push(TraceCommand::PushDebugGroup(label.to_string()));
}
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
let cmd_buf_raw = cmd_buf_data.encoder.open(&cmd_buf.device)?;
if !self
.instance
.flags
@ -713,7 +719,7 @@ impl Global {
.flags
.contains(wgt::InstanceFlags::DISCARD_HAL_LABELS)
{
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
let cmd_buf_raw = cmd_buf_data.encoder.open(&cmd_buf.device)?;
unsafe {
cmd_buf_raw.insert_debug_marker(label);
}
@ -744,7 +750,7 @@ impl Global {
list.push(TraceCommand::PopDebugGroup);
}
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
let cmd_buf_raw = cmd_buf_data.encoder.open(&cmd_buf.device)?;
if !self
.instance
.flags

View File

@ -346,7 +346,7 @@ impl Global {
let encoder = &mut cmd_buf_data.encoder;
let tracker = &mut cmd_buf_data.trackers;
let raw_encoder = encoder.open()?;
let raw_encoder = encoder.open(&cmd_buf.device)?;
let query_set = hub
.query_sets
@ -397,7 +397,7 @@ impl Global {
let encoder = &mut cmd_buf_data.encoder;
let tracker = &mut cmd_buf_data.trackers;
let buffer_memory_init_actions = &mut cmd_buf_data.buffer_memory_init_actions;
let raw_encoder = encoder.open()?;
let raw_encoder = encoder.open(&cmd_buf.device)?;
if destination_offset % wgt::QUERY_RESOLVE_BUFFER_ALIGNMENT != 0 {
return Err(QueryError::Resolve(ResolveError::BufferOffsetAlignment));

View File

@ -1588,10 +1588,12 @@ impl Global {
// We automatically keep extending command buffers over time, and because
// we want to insert a command buffer _before_ what we're about to record,
// we need to make sure to close the previous one.
encoder.close().map_pass_err(pass_scope)?;
encoder.close(&cmd_buf.device).map_pass_err(pass_scope)?;
// We will reset this to `Recording` if we succeed, acts as a fail-safe.
*status = CommandEncoderStatus::Error;
encoder.open_pass(hal_label).map_pass_err(pass_scope)?;
encoder
.open_pass(hal_label, &cmd_buf.device)
.map_pass_err(pass_scope)?;
let info = RenderPassInfo::start(
device,
@ -1894,7 +1896,7 @@ impl Global {
.finish(state.raw_encoder, state.snatch_guard)
.map_pass_err(pass_scope)?;
encoder.close().map_pass_err(pass_scope)?;
encoder.close(&cmd_buf.device).map_pass_err(pass_scope)?;
(trackers, pending_discard_init_fixups)
};
@ -1906,7 +1908,7 @@ impl Global {
let tracker = &mut cmd_buf_data.trackers;
{
let transit = encoder.open().map_pass_err(pass_scope)?;
let transit = encoder.open(&cmd_buf.device).map_pass_err(pass_scope)?;
fixup_discarded_surfaces(
pending_discard_init_fixups.into_iter(),
@ -1922,7 +1924,9 @@ impl Global {
}
*status = CommandEncoderStatus::Recording;
encoder.close_and_swap().map_pass_err(pass_scope)?;
encoder
.close_and_swap(&cmd_buf.device)
.map_pass_err(pass_scope)?;
Ok(())
}

View File

@ -432,7 +432,7 @@ fn handle_texture_init(
// In rare cases we may need to insert an init operation immediately onto the command buffer.
if !immediate_inits.is_empty() {
let cmd_buf_raw = encoder.open()?;
let cmd_buf_raw = encoder.open(device)?;
for init in immediate_inits {
clear_texture(
&init.texture,
@ -684,7 +684,7 @@ impl Global {
dst_offset: destination_offset,
size: wgt::BufferSize::new(size).unwrap(),
};
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
let cmd_buf_raw = cmd_buf_data.encoder.open(&cmd_buf.device)?;
let barriers = src_barrier
.into_iter()
.chain(dst_barrier)
@ -855,7 +855,7 @@ impl Global {
})
.collect::<Vec<_>>();
let cmd_buf_raw = encoder.open()?;
let cmd_buf_raw = encoder.open(&cmd_buf.device)?;
unsafe {
cmd_buf_raw.transition_textures(&dst_barrier);
cmd_buf_raw.transition_buffers(src_barrier.as_slice());
@ -1030,7 +1030,7 @@ impl Global {
}
})
.collect::<Vec<_>>();
let cmd_buf_raw = encoder.open()?;
let cmd_buf_raw = encoder.open(&cmd_buf.device)?;
unsafe {
cmd_buf_raw.transition_buffers(dst_barrier.as_slice());
cmd_buf_raw.transition_textures(&src_barrier);
@ -1209,7 +1209,7 @@ impl Global {
}
})
.collect::<Vec<_>>();
let cmd_buf_raw = cmd_buf_data.encoder.open()?;
let cmd_buf_raw = cmd_buf_data.encoder.open(&cmd_buf.device)?;
unsafe {
cmd_buf_raw.transition_textures(&barriers);
cmd_buf_raw.copy_texture_to_texture(

View File

@ -270,21 +270,27 @@ impl Global {
let snatch_guard = device.snatchable_lock.read();
let raw_buf = buffer.try_raw(&snatch_guard)?;
unsafe {
let mapping = device
let mapping = unsafe {
device
.raw()
.map_buffer(raw_buf, offset..offset + data.len() as u64)
.map_err(DeviceError::from)?;
std::ptr::copy_nonoverlapping(data.as_ptr(), mapping.ptr.as_ptr(), data.len());
if !mapping.is_coherent {
#[allow(clippy::single_range_in_vec_init)]
}
.map_err(|e| device.handle_hal_error(e))?;
unsafe { std::ptr::copy_nonoverlapping(data.as_ptr(), mapping.ptr.as_ptr(), data.len()) };
if !mapping.is_coherent {
#[allow(clippy::single_range_in_vec_init)]
unsafe {
device
.raw()
.flush_mapped_ranges(raw_buf, &[offset..offset + data.len() as u64]);
}
device.raw().unmap_buffer(raw_buf);
.flush_mapped_ranges(raw_buf, &[offset..offset + data.len() as u64])
};
}
unsafe { device.raw().unmap_buffer(raw_buf) };
Ok(())
}
@ -2006,7 +2012,9 @@ impl Global {
hal::SurfaceError::Outdated | hal::SurfaceError::Lost => {
E::InvalidSurface
}
hal::SurfaceError::Device(error) => E::Device(error.into()),
hal::SurfaceError::Device(error) => {
E::Device(device.handle_hal_error(error))
}
hal::SurfaceError::Other(message) => {
log::error!("surface configuration failed: {}", message);
E::InvalidSurface
@ -2289,16 +2297,6 @@ impl Global {
}
}
pub fn device_mark_lost(&self, device_id: DeviceId, message: &str) {
api_log!("Device::mark_lost {device_id:?}");
let hub = &self.hub;
if let Ok(device) = hub.devices.get(device_id) {
device.lose(message);
}
}
pub fn device_get_internal_counters(&self, device_id: DeviceId) -> wgt::InternalCounters {
let hub = &self.hub;
if let Ok(device) = hub.devices.get(device_id) {

View File

@ -308,7 +308,7 @@ fn map_buffer(
let raw_buffer = buffer.try_raw(snatch_guard)?;
let mapping = unsafe {
raw.map_buffer(raw_buffer, offset..offset + size)
.map_err(DeviceError::from)?
.map_err(|e| buffer.device.handle_hal_error(e))?
};
if !mapping.is_coherent && kind == HostMap::Read {
@ -420,13 +420,16 @@ pub enum DeviceError {
DeviceMismatch(#[from] Box<DeviceMismatch>),
}
impl From<hal::DeviceError> for DeviceError {
fn from(error: hal::DeviceError) -> Self {
impl DeviceError {
/// Only use this function in contexts where there is no `Device`.
///
/// Use [`Device::handle_hal_error`] otherwise.
pub fn from_hal(error: hal::DeviceError) -> Self {
match error {
hal::DeviceError::Lost => DeviceError::Lost,
hal::DeviceError::OutOfMemory => DeviceError::OutOfMemory,
hal::DeviceError::ResourceCreationFailed => DeviceError::ResourceCreationFailed,
hal::DeviceError::Unexpected => DeviceError::Lost,
hal::DeviceError::Lost => Self::Lost,
hal::DeviceError::OutOfMemory => Self::OutOfMemory,
hal::DeviceError::ResourceCreationFailed => Self::ResourceCreationFailed,
hal::DeviceError::Unexpected => Self::Lost,
}
}
}

View File

@ -269,17 +269,20 @@ impl PendingWrites {
fn pre_submit(
&mut self,
command_allocator: &CommandAllocator,
device: &dyn hal::DynDevice,
queue: &dyn hal::DynQueue,
device: &Device,
queue: &Queue,
) -> Result<Option<EncoderInFlight>, DeviceError> {
if self.is_recording {
let pending_buffers = mem::take(&mut self.dst_buffers);
let pending_textures = mem::take(&mut self.dst_textures);
let cmd_buf = unsafe { self.command_encoder.end_encoding()? };
let cmd_buf = unsafe { self.command_encoder.end_encoding() }
.map_err(|e| device.handle_hal_error(e))?;
self.is_recording = false;
let new_encoder = command_allocator.acquire_encoder(device, queue)?;
let new_encoder = command_allocator
.acquire_encoder(device.raw(), queue.raw())
.map_err(|e| device.handle_hal_error(e))?;
let encoder = EncoderInFlight {
raw: mem::replace(&mut self.command_encoder, new_encoder),
@ -1194,14 +1197,12 @@ impl Global {
// execute resource transitions
unsafe {
baked
.encoder
.begin_encoding(hal_label(
Some("(wgpu internal) Transit"),
device.instance_flags,
))
.map_err(DeviceError::from)?
};
baked.encoder.begin_encoding(hal_label(
Some("(wgpu internal) Transit"),
device.instance_flags,
))
}
.map_err(|e| device.handle_hal_error(e))?;
//Note: locking the trackers has to be done after the storages
let mut trackers = device.trackers.lock();
@ -1224,14 +1225,12 @@ impl Global {
// but here we have a command encoder by hand, so it's easier to use it.
if !used_surface_textures.is_empty() {
unsafe {
baked
.encoder
.begin_encoding(hal_label(
Some("(wgpu internal) Present"),
device.instance_flags,
))
.map_err(DeviceError::from)?
};
baked.encoder.begin_encoding(hal_label(
Some("(wgpu internal) Present"),
device.instance_flags,
))
}
.map_err(|e| device.handle_hal_error(e))?;
let texture_barriers = trackers
.textures
.set_from_usage_scope_and_drain_transitions(
@ -1299,7 +1298,7 @@ impl Global {
}
if let Some(pending_execution) =
pending_writes.pre_submit(&device.command_allocator, device.raw(), queue.raw())?
pending_writes.pre_submit(&device.command_allocator, device, &queue)?
{
active_executions.insert(0, pending_execution);
}
@ -1324,15 +1323,13 @@ impl Global {
}
unsafe {
queue
.raw()
.submit(
&hal_command_buffers,
&submit_surface_textures,
(fence.as_mut(), submit_index),
)
.map_err(DeviceError::from)?;
queue.raw().submit(
&hal_command_buffers,
&submit_surface_textures,
(fence.as_mut(), submit_index),
)
}
.map_err(|e| device.handle_hal_error(e))?;
// Advance the successful submission index.
device

View File

@ -227,31 +227,29 @@ impl Device {
desc: &DeviceDescriptor,
trace_path: Option<&std::path::Path>,
instance_flags: wgt::InstanceFlags,
) -> Result<Self, CreateDeviceError> {
) -> Result<Self, DeviceError> {
#[cfg(not(feature = "trace"))]
if let Some(_) = trace_path {
log::error!("Feature 'trace' is not enabled");
}
let fence =
unsafe { raw_device.create_fence() }.map_err(|_| CreateDeviceError::OutOfMemory)?;
let fence = unsafe { raw_device.create_fence() }.map_err(DeviceError::from_hal)?;
let command_allocator = command::CommandAllocator::new();
let pending_encoder = command_allocator
.acquire_encoder(raw_device.as_ref(), raw_queue)
.map_err(|_| CreateDeviceError::OutOfMemory)?;
.map_err(DeviceError::from_hal)?;
let mut pending_writes = PendingWrites::new(pending_encoder);
// Create zeroed buffer used for texture clears.
let zero_buffer = unsafe {
raw_device
.create_buffer(&hal::BufferDescriptor {
label: hal_label(Some("(wgpu internal) zero init buffer"), instance_flags),
size: ZERO_BUFFER_SIZE,
usage: hal::BufferUses::COPY_SRC | hal::BufferUses::COPY_DST,
memory_flags: hal::MemoryFlags::empty(),
})
.map_err(DeviceError::from)?
};
raw_device.create_buffer(&hal::BufferDescriptor {
label: hal_label(Some("(wgpu internal) zero init buffer"), instance_flags),
size: ZERO_BUFFER_SIZE,
usage: hal::BufferUses::COPY_SRC | hal::BufferUses::COPY_DST,
memory_flags: hal::MemoryFlags::empty(),
})
}
.map_err(DeviceError::from_hal)?;
pending_writes.activate();
unsafe {
pending_writes
@ -339,6 +337,18 @@ impl Device {
}
}
pub fn handle_hal_error(&self, error: hal::DeviceError) -> DeviceError {
match error {
hal::DeviceError::OutOfMemory => {}
hal::DeviceError::Lost
| hal::DeviceError::ResourceCreationFailed
| hal::DeviceError::Unexpected => {
self.lose(&error.to_string());
}
}
DeviceError::from_hal(error)
}
pub(crate) fn release_queue(&self, queue: Box<dyn hal::DynQueue>) {
assert!(self.queue_to_drop.set(queue).is_ok());
}
@ -441,11 +451,8 @@ impl Device {
wgt::Maintain::Wait => self
.last_successful_submission_index
.load(Ordering::Acquire),
wgt::Maintain::Poll => unsafe {
self.raw()
.get_fence_value(fence.as_ref())
.map_err(DeviceError::from)?
},
wgt::Maintain::Poll => unsafe { self.raw().get_fence_value(fence.as_ref()) }
.map_err(|e| self.handle_hal_error(e))?,
};
// If necessary, wait for that submission to complete.
@ -453,8 +460,8 @@ impl Device {
unsafe {
self.raw()
.wait(fence.as_ref(), submission_index, CLEANUP_WAIT_MS)
.map_err(DeviceError::from)?
};
}
.map_err(|e| self.handle_hal_error(e))?;
}
log::trace!("Device::maintain: waiting for submission index {submission_index}");
@ -588,7 +595,8 @@ impl Device {
usage,
memory_flags: hal::MemoryFlags::empty(),
};
let buffer = unsafe { self.raw().create_buffer(&hal_desc) }.map_err(DeviceError::from)?;
let buffer =
unsafe { self.raw().create_buffer(&hal_desc) }.map_err(|e| self.handle_hal_error(e))?;
let buffer = Buffer {
raw: Snatchable::new(buffer),
@ -935,11 +943,8 @@ impl Device {
view_formats: hal_view_formats,
};
let raw_texture = unsafe {
self.raw()
.create_texture(&hal_desc)
.map_err(DeviceError::from)?
};
let raw_texture = unsafe { self.raw().create_texture(&hal_desc) }
.map_err(|e| self.handle_hal_error(e))?;
let clear_mode = if hal_usage
.intersects(hal::TextureUses::DEPTH_STENCIL_WRITE | hal::TextureUses::COLOR_TARGET)
@ -982,7 +987,7 @@ impl Device {
unsafe {
self.raw().create_texture_view(raw_texture.as_ref(), &desc)
}
.map_err(DeviceError::from)?,
.map_err(|e| self.handle_hal_error(e))?,
));
};
}
@ -1288,11 +1293,8 @@ impl Device {
range: resolved_range,
};
let raw = unsafe {
self.raw()
.create_texture_view(texture_raw, &hal_desc)
.map_err(|_| resource::CreateTextureViewError::OutOfMemory)?
};
let raw = unsafe { self.raw().create_texture_view(texture_raw, &hal_desc) }
.map_err(|e| self.handle_hal_error(e))?;
let selector = TextureSelector {
mips: desc.range.base_mip_level..mip_level_end,
@ -1423,11 +1425,8 @@ impl Device {
border_color: desc.border_color,
};
let raw = unsafe {
self.raw()
.create_sampler(&hal_desc)
.map_err(DeviceError::from)?
};
let raw = unsafe { self.raw().create_sampler(&hal_desc) }
.map_err(|e| self.handle_hal_error(e))?;
let sampler = Sampler {
raw: ManuallyDrop::new(raw),
@ -1551,7 +1550,7 @@ impl Device {
Err(error) => {
return Err(match error {
hal::ShaderError::Device(error) => {
pipeline::CreateShaderModuleError::Device(error.into())
pipeline::CreateShaderModuleError::Device(self.handle_hal_error(error))
}
hal::ShaderError::Compilation(ref msg) => {
log::error!("Shader error: {}", msg);
@ -1592,7 +1591,7 @@ impl Device {
Err(error) => {
return Err(match error {
hal::ShaderError::Device(error) => {
pipeline::CreateShaderModuleError::Device(error.into())
pipeline::CreateShaderModuleError::Device(self.handle_hal_error(error))
}
hal::ShaderError::Compilation(ref msg) => {
log::error!("Shader error: {}", msg);
@ -1624,7 +1623,8 @@ impl Device {
let encoder = self
.command_allocator
.acquire_encoder(self.raw(), queue.raw())?;
.acquire_encoder(self.raw(), queue.raw())
.map_err(|e| self.handle_hal_error(e))?;
let command_buffer = command::CommandBuffer::new(encoder, self, label);
@ -1856,11 +1856,9 @@ impl Device {
flags: bgl_flags,
entries: &hal_bindings,
};
let raw = unsafe {
self.raw()
.create_bind_group_layout(&hal_desc)
.map_err(DeviceError::from)?
};
let raw = unsafe { self.raw().create_bind_group_layout(&hal_desc) }
.map_err(|e| self.handle_hal_error(e))?;
let mut count_validator = binding_model::BindingTypeMaxCountValidator::default();
for entry in entry_map.values() {
@ -2290,11 +2288,8 @@ impl Device {
textures: &hal_textures,
acceleration_structures: &[],
};
let raw = unsafe {
self.raw()
.create_bind_group(&hal_desc)
.map_err(DeviceError::from)?
};
let raw = unsafe { self.raw().create_bind_group(&hal_desc) }
.map_err(|e| self.handle_hal_error(e))?;
// collect in the order of BGL iteration
let late_buffer_binding_sizes = layout
@ -2588,11 +2583,8 @@ impl Device {
push_constant_ranges: desc.push_constant_ranges.as_ref(),
};
let raw = unsafe {
self.raw()
.create_pipeline_layout(&hal_desc)
.map_err(DeviceError::from)?
};
let raw = unsafe { self.raw().create_pipeline_layout(&hal_desc) }
.map_err(|e| self.handle_hal_error(e))?;
drop(raw_bind_group_layouts);
@ -2746,7 +2738,7 @@ impl Device {
unsafe { self.raw().create_compute_pipeline(&pipeline_desc) }.map_err(
|err| match err {
hal::PipelineError::Device(error) => {
pipeline::CreateComputePipelineError::Device(error.into())
pipeline::CreateComputePipelineError::Device(self.handle_hal_error(error))
}
hal::PipelineError::Linkage(_stages, msg) => {
pipeline::CreateComputePipelineError::Internal(msg)
@ -3326,7 +3318,7 @@ impl Device {
unsafe { self.raw().create_render_pipeline(&pipeline_desc) }.map_err(
|err| match err {
hal::PipelineError::Device(error) => {
pipeline::CreateRenderPipelineError::Device(error.into())
pipeline::CreateRenderPipelineError::Device(self.handle_hal_error(error))
}
hal::PipelineError::Linkage(stage, msg) => {
pipeline::CreateRenderPipelineError::Internal { stage, error: msg }
@ -3449,7 +3441,9 @@ impl Device {
};
let raw = match unsafe { self.raw().create_pipeline_cache(&cache_desc) } {
Ok(raw) => raw,
Err(e) => return Err(e.into()),
Err(e) => match e {
hal::PipelineCacheError::Device(e) => return Err(self.handle_hal_error(e).into()),
},
};
let cache = pipeline::PipelineCache {
device: self.clone(),
@ -3506,9 +3500,11 @@ impl Device {
submission_index: crate::SubmissionIndex,
) -> Result<(), DeviceError> {
let fence = self.fence.read();
let last_done_index = unsafe { self.raw().get_fence_value(fence.as_ref())? };
let last_done_index = unsafe { self.raw().get_fence_value(fence.as_ref()) }
.map_err(|e| self.handle_hal_error(e))?;
if last_done_index < submission_index {
unsafe { self.raw().wait(fence.as_ref(), submission_index, !0)? };
unsafe { self.raw().wait(fence.as_ref(), submission_index, !0) }
.map_err(|e| self.handle_hal_error(e))?;
drop(fence);
let closures = self
.lock_life()
@ -3567,7 +3563,7 @@ impl Device {
Ok(query_set)
}
pub(crate) fn lose(&self, message: &str) {
fn lose(&self, message: &str) {
// Follow the steps at https://gpuweb.github.io/gpuweb/#lose-the-device.
// Mark the device explicitly as invalid. This is checked in various

View File

@ -4,7 +4,7 @@ use std::{borrow::Cow, collections::HashMap};
use crate::hub::Hub;
use crate::{
api_log,
device::{queue::Queue, resource::Device, DeviceDescriptor},
device::{queue::Queue, resource::Device, DeviceDescriptor, DeviceError},
global::Global,
hal_api::HalApi,
id::{markers, AdapterId, DeviceId, Id, Marker, QueueId, SurfaceId},
@ -272,20 +272,19 @@ impl Adapter {
) -> Result<(Arc<Device>, Arc<Queue>), RequestDeviceError> {
api_log!("Adapter::create_device");
if let Ok(device) = Device::new(
let device = Device::new(
hal_device.device,
hal_device.queue.as_ref(),
self,
desc,
trace_path,
instance_flags,
) {
let device = Arc::new(device);
let queue = Arc::new(Queue::new(device.clone(), hal_device.queue));
device.set_queue(&queue);
return Ok((device, queue));
}
Err(RequestDeviceError::OutOfMemory)
)?;
let device = Arc::new(device);
let queue = Arc::new(Queue::new(device.clone(), hal_device.queue));
device.set_queue(&queue);
Ok((device, queue))
}
#[allow(clippy::type_complexity)]
@ -338,12 +337,7 @@ impl Adapter {
&desc.memory_hints,
)
}
.map_err(|err| match err {
hal::DeviceError::Lost => RequestDeviceError::DeviceLost,
hal::DeviceError::OutOfMemory => RequestDeviceError::OutOfMemory,
hal::DeviceError::ResourceCreationFailed => RequestDeviceError::Internal,
hal::DeviceError::Unexpected => RequestDeviceError::DeviceLost,
})?;
.map_err(DeviceError::from_hal)?;
self.create_device_and_queue_from_hal(open, desc, instance_flags, trace_path)
}
@ -377,18 +371,14 @@ pub enum GetSurfaceSupportError {
/// Error when requesting a device from the adaptor
#[non_exhaustive]
pub enum RequestDeviceError {
#[error(transparent)]
Device(#[from] DeviceError),
#[error("Parent adapter is invalid")]
InvalidAdapter,
#[error("Connection to device was lost during initialization")]
DeviceLost,
#[error("Device initialization failed due to implementation specific errors")]
Internal,
#[error(transparent)]
LimitsExceeded(#[from] FailedLimit),
#[error("Device has no queue supporting graphics")]
NoGraphicsQueue,
#[error("Not enough memory left to request device")]
OutOfMemory,
#[error("Unsupported features were requested: {0:?}")]
UnsupportedFeature(wgt::Features),
}

View File

@ -286,16 +286,6 @@ pub enum CreatePipelineCacheError {
Internal(String),
}
impl From<hal::PipelineCacheError> for CreatePipelineCacheError {
fn from(value: hal::PipelineCacheError) -> Self {
match value {
hal::PipelineCacheError::Device(device) => {
CreatePipelineCacheError::Device(device.into())
}
}
}
}
#[derive(Debug)]
pub struct PipelineCache {
pub(crate) raw: ManuallyDrop<Box<dyn hal::DynPipelineCache>>,

View File

@ -191,7 +191,7 @@ impl Global {
.raw()
.create_texture_view(ast.texture.as_ref().borrow(), &clear_view_desc)
}
.map_err(DeviceError::from)?;
.map_err(|e| device.handle_hal_error(e))?;
let mut presentation = surface.presentation.lock();
let present = presentation.as_mut().unwrap();
@ -238,7 +238,7 @@ impl Global {
match err {
hal::SurfaceError::Lost => Status::Lost,
hal::SurfaceError::Device(err) => {
return Err(DeviceError::from(err).into());
return Err(device.handle_hal_error(err).into());
}
hal::SurfaceError::Outdated => Status::Outdated,
hal::SurfaceError::Other(msg) => {
@ -315,7 +315,9 @@ impl Global {
Ok(()) => Ok(Status::Good),
Err(err) => match err {
hal::SurfaceError::Lost => Ok(Status::Lost),
hal::SurfaceError::Device(err) => Err(SurfaceError::from(DeviceError::from(err))),
hal::SurfaceError::Device(err) => {
Err(SurfaceError::from(device.handle_hal_error(err)))
}
hal::SurfaceError::Outdated => Ok(Status::Outdated),
hal::SurfaceError::Other(msg) => {
log::error!("acquire error: {}", msg);

View File

@ -832,8 +832,10 @@ impl StagingBuffer {
memory_flags: hal::MemoryFlags::TRANSIENT,
};
let raw = unsafe { device.raw().create_buffer(&stage_desc)? };
let mapping = unsafe { device.raw().map_buffer(raw.as_ref(), 0..size.get()) }?;
let raw = unsafe { device.raw().create_buffer(&stage_desc) }
.map_err(|e| device.handle_hal_error(e))?;
let mapping = unsafe { device.raw().map_buffer(raw.as_ref(), 0..size.get()) }
.map_err(|e| device.handle_hal_error(e))?;
let staging_buffer = StagingBuffer {
raw,
@ -1358,7 +1360,7 @@ impl Global {
let cmd_buf_data = cmd_buf_data.as_mut().unwrap();
let cmd_buf_raw = cmd_buf_data
.encoder
.open()
.open(&cmd_buf.device)
.ok()
.and_then(|encoder| encoder.as_any_mut().downcast_mut());
hal_command_encoder_callback(cmd_buf_raw)