mirror of
https://github.com/rust-lang/rust.git
synced 2025-05-14 02:49:40 +00:00
Mitigate Stale Data Read for xAPIC vulnerability
In order to mitigate the Stale Data Read for xAPIC vulnerability completely, reading userspace from an SGX enclave must be aligned and in 8-bytes chunks. References: - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html
This commit is contained in:
parent
25de53f768
commit
2a23d08aae
@ -313,9 +313,9 @@ where
|
||||
// +--------+
|
||||
// | small1 | Chunk smaller than 8 bytes
|
||||
// +--------+
|
||||
fn region_as_aligned_chunks(ptr: *const u8, len: usize) -> (u8, usize, u8) {
|
||||
let small0_size = (8 - ptr as usize % 8) as u8;
|
||||
let small1_size = ((len - small0_size as usize) % 8) as u8;
|
||||
fn region_as_aligned_chunks(ptr: *const u8, len: usize) -> (usize, usize, usize) {
|
||||
let small0_size = if ptr as usize % 8 == 0 { 0 } else { 8 - ptr as usize % 8 };
|
||||
let small1_size = (len - small0_size as usize) % 8;
|
||||
let big_size = len - small0_size as usize - small1_size as usize;
|
||||
|
||||
(small0_size, big_size, small1_size)
|
||||
@ -417,6 +417,106 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize)
|
||||
}
|
||||
}
|
||||
|
||||
/// Copies `len` bytes of data from userspace pointer `src` to enclave pointer `dst`
|
||||
///
|
||||
/// This function mitigates AEPIC leak vulnerabilities by ensuring all reads from untrusted memory are 8-byte aligned
|
||||
///
|
||||
/// # Panics
|
||||
/// This function panics if:
|
||||
///
|
||||
/// * The `src` pointer is null
|
||||
/// * The `dst` pointer is null
|
||||
/// * The `src` memory range is not in user memory
|
||||
/// * The `dst` memory range is not in enclave memory
|
||||
///
|
||||
/// # References
|
||||
/// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html
|
||||
/// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html
|
||||
pub(crate) unsafe fn copy_from_userspace(src: *const u8, dst: *mut u8, len: usize) {
|
||||
// Copies memory region `src..src + len` to the enclave at `dst`. The source memory region
|
||||
// is:
|
||||
// - strictly less than 8 bytes in size and may be
|
||||
// - located at a misaligned memory location
|
||||
fn copy_misaligned_chunk_to_enclave(src: *const u8, dst: *mut u8, len: usize) {
|
||||
let mut tmp_buff = [0u8; 16];
|
||||
|
||||
unsafe {
|
||||
// Compute an aligned memory region to read from
|
||||
// +--------+ <-- aligned_src + aligned_len (8B-aligned)
|
||||
// | pad1 |
|
||||
// +--------+ <-- src + len (misaligned)
|
||||
// | |
|
||||
// | |
|
||||
// | |
|
||||
// +--------+ <-- src (misaligned)
|
||||
// | pad0 |
|
||||
// +--------+ <-- aligned_src (8B-aligned)
|
||||
let pad0_size = src as usize % 8;
|
||||
let aligned_src = src.sub(pad0_size);
|
||||
|
||||
let pad1_size = 8 - (src.add(len) as usize % 8);
|
||||
let aligned_len = pad0_size + len + pad1_size;
|
||||
|
||||
debug_assert!(len < 8);
|
||||
debug_assert_eq!(aligned_src as usize % 8, 0);
|
||||
debug_assert_eq!(aligned_len % 8, 0);
|
||||
debug_assert!(aligned_len <= 16);
|
||||
|
||||
// Copy the aligned buffer to a temporary buffer
|
||||
// Note: copying from a slightly different memory location is a bit odd. In this case it
|
||||
// can't lead to page faults or inadvertent copying from the enclave as we only ensured
|
||||
// that the `src` pointer is aligned at an 8 byte boundary. As pages are 4096 bytes
|
||||
// aligned, `aligned_src` must be on the same page as `src`. A similar argument can be made
|
||||
// for `src + len`
|
||||
copy_quadwords(aligned_src as _, tmp_buff.as_mut_ptr(), aligned_len);
|
||||
|
||||
// Copy the correct parts of the temporary buffer to the destination
|
||||
ptr::copy(tmp_buff.as_ptr().add(pad0_size), dst, len);
|
||||
}
|
||||
}
|
||||
|
||||
assert!(!src.is_null());
|
||||
assert!(!dst.is_null());
|
||||
assert!(is_user_range(src, len));
|
||||
assert!(is_enclave_range(dst, len));
|
||||
assert!(!(src as usize).overflowing_add(len + 8).1);
|
||||
assert!(!(dst as usize).overflowing_add(len + 8).1);
|
||||
|
||||
if len < 8 {
|
||||
copy_misaligned_chunk_to_enclave(src, dst, len);
|
||||
} else if len % 8 == 0 && src as usize % 8 == 0 {
|
||||
// Copying 8-byte aligned quadwords: copy quad word per quad word
|
||||
unsafe {
|
||||
copy_quadwords(src, dst, len);
|
||||
}
|
||||
} else {
|
||||
// Split copies into three parts:
|
||||
// +--------+
|
||||
// | small0 | Chunk smaller than 8 bytes
|
||||
// +--------+
|
||||
// | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
|
||||
// +--------+
|
||||
// | small1 | Chunk smaller than 8 bytes
|
||||
// +--------+
|
||||
let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len);
|
||||
|
||||
unsafe {
|
||||
// Copy small0
|
||||
copy_misaligned_chunk_to_enclave(src, dst, small0_size);
|
||||
|
||||
// Copy big
|
||||
let big_src = src.add(small0_size);
|
||||
let big_dst = dst.add(small0_size);
|
||||
copy_quadwords(big_src, big_dst, big_size);
|
||||
|
||||
// Copy small1
|
||||
let small1_src = src.add(big_size + small0_size);
|
||||
let small1_dst = dst.add(big_size + small0_size);
|
||||
copy_misaligned_chunk_to_enclave(small1_src, small1_dst, small1_size);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "sgx_platform", issue = "56975")]
|
||||
impl<T: ?Sized> UserRef<T>
|
||||
where
|
||||
@ -481,7 +581,7 @@ where
|
||||
pub fn copy_to_enclave(&self, dest: &mut T) {
|
||||
unsafe {
|
||||
assert_eq!(mem::size_of_val(dest), mem::size_of_val(&*self.0.get()));
|
||||
ptr::copy(
|
||||
copy_from_userspace(
|
||||
self.0.get() as *const T as *const u8,
|
||||
dest as *mut T as *mut u8,
|
||||
mem::size_of_val(dest),
|
||||
@ -507,7 +607,11 @@ where
|
||||
{
|
||||
/// Copies the value from user memory into enclave memory.
|
||||
pub fn to_enclave(&self) -> T {
|
||||
unsafe { ptr::read(self.0.get()) }
|
||||
unsafe {
|
||||
let mut data: T = mem::MaybeUninit::uninit().assume_init();
|
||||
copy_from_userspace(self.0.get() as _, &mut data as *mut T as _, mem::size_of::<T>());
|
||||
data
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use super::alloc::copy_to_userspace;
|
||||
use super::alloc::User;
|
||||
use super::alloc::{copy_from_userspace, copy_to_userspace};
|
||||
|
||||
#[test]
|
||||
fn test_copy_function() {
|
||||
fn test_copy_to_userspace_function() {
|
||||
let mut src = [0u8; 100];
|
||||
let mut dst = User::<[u8]>::uninitialized(100);
|
||||
|
||||
@ -28,3 +28,29 @@ fn test_copy_function() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_from_userspace_function() {
|
||||
let mut dst = [0u8; 100];
|
||||
let mut src = User::<[u8]>::uninitialized(100);
|
||||
|
||||
src.copy_from_enclave(&[0u8; 100]);
|
||||
|
||||
for size in 0..48 {
|
||||
// For all possible alignment
|
||||
for offset in 0..8 {
|
||||
// overwrite complete dst
|
||||
dst = [0u8; 100];
|
||||
|
||||
// Copy src[0..size] to dst + offset
|
||||
unsafe { copy_from_userspace(src.as_ptr().offset(offset), dst.as_mut_ptr(), size) };
|
||||
|
||||
// Verify copy
|
||||
for byte in 0..size {
|
||||
unsafe {
|
||||
assert_eq!(dst[byte as usize], *src.as_ptr().offset(offset + byte as isize));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user