Skip to content

Commit 94811fd

Browse files
committed
Auto merge of rust-lang#98949 - ehuss:update-beta-cargo, r=ehuss
[beta] Beta 1.63 backports * fix data race in thread::scope rust-lang#98503 * Mitigate MMIO stale data vulnerability rust-lang#98126 * Cargo: * [BETA-1.63] Fix zsh completions for add and locate-project (rust-lang/cargo#10811) * [BETA-1.63] Bump cargo-util version. (rust-lang/cargo#10805)
2 parents 59f577d + 372fa7c commit 94811fd

File tree

7 files changed

+161
-22
lines changed

7 files changed

+161
-22
lines changed

Cargo.lock

+1-1
Original file line numberDiff line numberDiff line change
@@ -471,7 +471,7 @@ dependencies = [
471471

472472
[[package]]
473473
name = "cargo-util"
474-
version = "0.1.4"
474+
version = "0.2.1"
475475
dependencies = [
476476
"anyhow",
477477
"core-foundation",

library/std/src/sys/sgx/abi/usercalls/alloc.rs

+110-10
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
#![allow(unused)]
22

3+
use crate::arch::asm;
34
use crate::cell::UnsafeCell;
5+
use crate::cmp;
6+
use crate::convert::TryInto;
47
use crate::mem;
58
use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
69
use crate::ptr::{self, NonNull};
710
use crate::slice;
811
use crate::slice::SliceIndex;
912

10-
use super::super::mem::is_user_range;
13+
use super::super::mem::{is_enclave_range, is_user_range};
1114
use fortanix_sgx_abi::*;
1215

1316
/// A type that can be safely read from or written to userspace.
@@ -210,7 +213,9 @@ where
210213
unsafe {
211214
// Mustn't call alloc with size 0.
212215
let ptr = if size > 0 {
213-
rtunwrap!(Ok, super::alloc(size, T::align_of())) as _
216+
// `copy_to_userspace` is more efficient when data is 8-byte aligned
217+
let alignment = cmp::max(T::align_of(), 8);
218+
rtunwrap!(Ok, super::alloc(size, alignment)) as _
214219
} else {
215220
T::align_of() as _ // dangling pointer ok for size 0
216221
};
@@ -225,13 +230,9 @@ where
225230
/// Copies `val` into freshly allocated space in user memory.
226231
pub fn new_from_enclave(val: &T) -> Self {
227232
unsafe {
228-
let ret = Self::new_uninit_bytes(mem::size_of_val(val));
229-
ptr::copy(
230-
val as *const T as *const u8,
231-
ret.0.as_ptr() as *mut u8,
232-
mem::size_of_val(val),
233-
);
234-
ret
233+
let mut user = Self::new_uninit_bytes(mem::size_of_val(val));
234+
user.copy_from_enclave(val);
235+
user
235236
}
236237
}
237238

@@ -304,6 +305,105 @@ where
304305
}
305306
}
306307

308+
/// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
309+
///
310+
/// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
311+
/// - preceded by the VERW instruction and followed by the MFENCE; LFENCE instruction sequence
312+
/// - or are in multiples of 8 bytes, aligned to an 8-byte boundary
313+
///
314+
/// # Panics
315+
/// This function panics if:
316+
///
317+
/// * The `src` pointer is null
318+
/// * The `dst` pointer is null
319+
/// * The `src` memory range is not in enclave memory
320+
/// * The `dst` memory range is not in user memory
321+
///
322+
/// # References
323+
/// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html
324+
/// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2
325+
pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
326+
unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
327+
unsafe {
328+
let mut seg_sel: u16 = 0;
329+
for off in 0..len {
330+
asm!("
331+
mov %ds, ({seg_sel})
332+
verw ({seg_sel})
333+
movb {val}, ({dst})
334+
mfence
335+
lfence
336+
",
337+
val = in(reg_byte) *src.offset(off as isize),
338+
dst = in(reg) dst.offset(off as isize),
339+
seg_sel = in(reg) &mut seg_sel,
340+
options(nostack, att_syntax)
341+
);
342+
}
343+
}
344+
}
345+
346+
unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
347+
unsafe {
348+
asm!(
349+
"rep movsq (%rsi), (%rdi)",
350+
inout("rcx") len / 8 => _,
351+
inout("rdi") dst => _,
352+
inout("rsi") src => _,
353+
options(att_syntax, nostack, preserves_flags)
354+
);
355+
}
356+
}
357+
assert!(!src.is_null());
358+
assert!(!dst.is_null());
359+
assert!(is_enclave_range(src, len));
360+
assert!(is_user_range(dst, len));
361+
assert!(len < isize::MAX as usize);
362+
assert!(!(src as usize).overflowing_add(len).1);
363+
assert!(!(dst as usize).overflowing_add(len).1);
364+
365+
if len < 8 {
366+
// Can't align on 8 byte boundary: copy safely byte per byte
367+
unsafe {
368+
copy_bytewise_to_userspace(src, dst, len);
369+
}
370+
} else if len % 8 == 0 && dst as usize % 8 == 0 {
371+
// Copying 8-byte aligned quadwords: copy quad word per quad word
372+
unsafe {
373+
copy_aligned_quadwords_to_userspace(src, dst, len);
374+
}
375+
} else {
376+
// Split copies into three parts:
377+
// +--------+
378+
// | small0 | Chunk smaller than 8 bytes
379+
// +--------+
380+
// | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
381+
// +--------+
382+
// | small1 | Chunk smaller than 8 bytes
383+
// +--------+
384+
385+
unsafe {
386+
// Copy small0
387+
let small0_size = (8 - dst as usize % 8) as u8;
388+
let small0_src = src;
389+
let small0_dst = dst;
390+
copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _);
391+
392+
// Copy big
393+
let small1_size = ((len - small0_size as usize) % 8) as u8;
394+
let big_size = len - small0_size as usize - small1_size as usize;
395+
let big_src = src.offset(small0_size as _);
396+
let big_dst = dst.offset(small0_size as _);
397+
copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size);
398+
399+
// Copy small1
400+
let small1_src = src.offset(big_size as isize + small0_size as isize);
401+
let small1_dst = dst.offset(big_size as isize + small0_size as isize);
402+
copy_bytewise_to_userspace(small1_src, small1_dst, small1_size as _);
403+
}
404+
}
405+
}
406+
307407
#[unstable(feature = "sgx_platform", issue = "56975")]
308408
impl<T: ?Sized> UserRef<T>
309409
where
@@ -352,7 +452,7 @@ where
352452
pub fn copy_from_enclave(&mut self, val: &T) {
353453
unsafe {
354454
assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
355-
ptr::copy(
455+
copy_to_userspace(
356456
val as *const T as *const u8,
357457
self.0.get() as *mut T as *mut u8,
358458
mem::size_of_val(val),

library/std/src/sys/sgx/abi/usercalls/mod.rs

+2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ use crate::time::{Duration, Instant};
66
pub(crate) mod alloc;
77
#[macro_use]
88
pub(crate) mod raw;
9+
#[cfg(test)]
10+
mod tests;
911

1012
use self::raw::*;
1113

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
use super::alloc::copy_to_userspace;
2+
use super::alloc::User;
3+
4+
#[test]
5+
fn test_copy_function() {
6+
let mut src = [0u8; 100];
7+
let mut dst = User::<[u8]>::uninitialized(100);
8+
9+
for i in 0..src.len() {
10+
src[i] = i as _;
11+
}
12+
13+
for size in 0..48 {
14+
// For all possible alignment
15+
for offset in 0..8 {
16+
// overwrite complete dst
17+
dst.copy_from_enclave(&[0u8; 100]);
18+
19+
// Copy src[0..size] to dst + offset
20+
unsafe { copy_to_userspace(src.as_ptr(), dst.as_mut_ptr().offset(offset), size) };
21+
22+
// Verify copy
23+
for byte in 0..size {
24+
unsafe {
25+
assert_eq!(*dst.as_ptr().offset(offset + byte as isize), src[byte as usize]);
26+
}
27+
}
28+
}
29+
}
30+
}

library/std/src/thread/mod.rs

+11-6
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,7 @@ use crate::cell::UnsafeCell;
159159
use crate::ffi::{CStr, CString};
160160
use crate::fmt;
161161
use crate::io;
162+
use crate::marker::PhantomData;
162163
use crate::mem;
163164
use crate::num::NonZeroU64;
164165
use crate::num::NonZeroUsize;
@@ -462,7 +463,7 @@ impl Builder {
462463
unsafe fn spawn_unchecked_<'a, 'scope, F, T>(
463464
self,
464465
f: F,
465-
scope_data: Option<&'scope scoped::ScopeData>,
466+
scope_data: Option<Arc<scoped::ScopeData>>,
466467
) -> io::Result<JoinInner<'scope, T>>
467468
where
468469
F: FnOnce() -> T,
@@ -479,8 +480,11 @@ impl Builder {
479480
}));
480481
let their_thread = my_thread.clone();
481482

482-
let my_packet: Arc<Packet<'scope, T>> =
483-
Arc::new(Packet { scope: scope_data, result: UnsafeCell::new(None) });
483+
let my_packet: Arc<Packet<'scope, T>> = Arc::new(Packet {
484+
scope: scope_data,
485+
result: UnsafeCell::new(None),
486+
_marker: PhantomData,
487+
});
484488
let their_packet = my_packet.clone();
485489

486490
let output_capture = crate::io::set_output_capture(None);
@@ -507,7 +511,7 @@ impl Builder {
507511
unsafe { *their_packet.result.get() = Some(try_result) };
508512
};
509513

510-
if let Some(scope_data) = scope_data {
514+
if let Some(scope_data) = &my_packet.scope {
511515
scope_data.increment_num_running_threads();
512516
}
513517

@@ -1298,8 +1302,9 @@ pub type Result<T> = crate::result::Result<T, Box<dyn Any + Send + 'static>>;
12981302
// An Arc to the packet is stored into a `JoinInner` which in turns is placed
12991303
// in `JoinHandle`.
13001304
struct Packet<'scope, T> {
1301-
scope: Option<&'scope scoped::ScopeData>,
1305+
scope: Option<Arc<scoped::ScopeData>>,
13021306
result: UnsafeCell<Option<Result<T>>>,
1307+
_marker: PhantomData<Option<&'scope scoped::ScopeData>>,
13031308
}
13041309

13051310
// Due to the usage of `UnsafeCell` we need to manually implement Sync.
@@ -1330,7 +1335,7 @@ impl<'scope, T> Drop for Packet<'scope, T> {
13301335
rtabort!("thread result panicked on drop");
13311336
}
13321337
// Book-keeping so the scope knows when it's done.
1333-
if let Some(scope) = self.scope {
1338+
if let Some(scope) = &self.scope {
13341339
// Now that there will be no more user code running on this thread
13351340
// that can use 'scope, mark the thread as 'finished'.
13361341
// It's important we only do this after the `result` has been dropped,

library/std/src/thread/scoped.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ use crate::sync::Arc;
1111
/// See [`scope`] for details.
1212
#[stable(feature = "scoped_threads", since = "1.63.0")]
1313
pub struct Scope<'scope, 'env: 'scope> {
14-
data: ScopeData,
14+
data: Arc<ScopeData>,
1515
/// Invariance over 'scope, to make sure 'scope cannot shrink,
1616
/// which is necessary for soundness.
1717
///
@@ -130,12 +130,14 @@ pub fn scope<'env, F, T>(f: F) -> T
130130
where
131131
F: for<'scope> FnOnce(&'scope Scope<'scope, 'env>) -> T,
132132
{
133+
// We put the `ScopeData` into an `Arc` so that other threads can finish their
134+
// `decrement_num_running_threads` even after this function returns.
133135
let scope = Scope {
134-
data: ScopeData {
136+
data: Arc::new(ScopeData {
135137
num_running_threads: AtomicUsize::new(0),
136138
main_thread: current(),
137139
a_thread_panicked: AtomicBool::new(false),
138-
},
140+
}),
139141
env: PhantomData,
140142
scope: PhantomData,
141143
};
@@ -250,7 +252,7 @@ impl Builder {
250252
F: FnOnce() -> T + Send + 'scope,
251253
T: Send + 'scope,
252254
{
253-
Ok(ScopedJoinHandle(unsafe { self.spawn_unchecked_(f, Some(&scope.data)) }?))
255+
Ok(ScopedJoinHandle(unsafe { self.spawn_unchecked_(f, Some(scope.data.clone())) }?))
254256
}
255257
}
256258

0 commit comments

Comments
 (0)