Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 5 pull requests #60030

Merged
merged 24 commits into from
Apr 17, 2019
Merged
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
96404ee
Emit ansi color codes in the `rendered` field of json diagnostics
oli-obk Mar 12, 2019
dc8fec2
Update rustdoc
oli-obk Mar 12, 2019
c26f6db
Remove workaround code for a closed issue
oli-obk Mar 12, 2019
7d4f868
Don't dump diagnostics json if not absolutely necessary
oli-obk Mar 12, 2019
3be13c4
Adjust tests to new enum variant fields
oli-obk Mar 12, 2019
0421c60
Fix forgotten capitalization
oli-obk Mar 13, 2019
0a842e8
Update more unit test to new API
oli-obk Mar 13, 2019
39b2137
Rename `colorful-json` to `json-rendered` and make it a selection ins…
oli-obk Mar 25, 2019
5fc4149
Do not render coloful json in rustdoc
oli-obk Apr 2, 2019
776c8a3
Update ui tests to latest master
oli-obk Apr 2, 2019
ae2ed21
Improve wording
JohnTitor Apr 2, 2019
325936a
Do not render ascii colors to buffers
oli-obk Apr 9, 2019
dce86f9
Make wording verbose
JohnTitor Apr 15, 2019
4075415
Fix tests
JohnTitor Apr 15, 2019
eb8e426
LLD is not supported on Darwin
petrhosek Apr 15, 2019
5c6a43a
Don't test json with color codes on windows
oli-obk Apr 16, 2019
9b21324
Miri now supports entropy, but is still slow
RalfJung Apr 16, 2019
19485cc
Miri: refactor new allocation tagging
RalfJung Apr 15, 2019
d55e4b7
test sort_unstable in Miri
RalfJung Apr 17, 2019
c89bc54
Rollup merge of #59128 - oli-obk:colorful_json, r=mark-i-m,eddyb
Centril Apr 17, 2019
0349b78
Rollup merge of #59646 - JohnTitor:improve-wording, r=oli-obk
Centril Apr 17, 2019
3049f05
Rollup merge of #59986 - RalfJung:miri-new-alloc, r=oli-obk
Centril Apr 17, 2019
23e8aaf
Rollup merge of #60003 - petrhosek:llvm-lto-lld, r=cramertj
Centril Apr 17, 2019
eb958e1
Rollup merge of #60018 - RalfJung:miri-test-libstd, r=oli-obk
Centril Apr 17, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions src/bootstrap/native.rs
Original file line number Diff line number Diff line change
@@ -156,8 +156,10 @@ impl Step for Llvm {
.define("LLVM_DEFAULT_TARGET_TRIPLE", target);

if builder.config.llvm_thin_lto && !emscripten {
cfg.define("LLVM_ENABLE_LTO", "Thin")
.define("LLVM_ENABLE_LLD", "ON");
cfg.define("LLVM_ENABLE_LTO", "Thin");
if !target.contains("apple") {
cfg.define("LLVM_ENABLE_LLD", "ON");
}
}

// By default, LLVM will automatically find OCaml and, if it finds it,
2 changes: 1 addition & 1 deletion src/liballoc/tests/binary_heap.rs
Original file line number Diff line number Diff line change
@@ -282,7 +282,7 @@ fn assert_covariance() {
//
// Destructors must be called exactly once per element.
#[test]
#[cfg(not(miri))] // Miri does not support panics nor entropy
#[cfg(not(miri))] // Miri does not support catching panics
fn panic_safe() {
static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);

21 changes: 15 additions & 6 deletions src/liballoc/tests/slice.rs
Original file line number Diff line number Diff line change
@@ -389,7 +389,7 @@ fn test_reverse() {
}

#[test]
#[cfg(not(miri))] // Miri does not support entropy
#[cfg(not(miri))] // Miri is too slow
fn test_sort() {
let mut rng = thread_rng();

@@ -466,10 +466,19 @@ fn test_sort() {
}

#[test]
#[cfg(not(miri))] // Miri does not support entropy
fn test_sort_stability() {
for len in (2..25).chain(500..510) {
for _ in 0..10 {
#[cfg(not(miri))] // Miri is too slow
let large_range = 500..510;
#[cfg(not(miri))] // Miri is too slow
let rounds = 10;

#[cfg(miri)]
let large_range = 0..0; // empty range
#[cfg(miri)]
let rounds = 1;

for len in (2..25).chain(large_range) {
for _ in 0..rounds {
let mut counts = [0; 10];

// create a vector like [(6, 1), (5, 1), (6, 2), ...],
@@ -1397,7 +1406,7 @@ fn test_box_slice_clone() {
#[test]
#[allow(unused_must_use)] // here, we care about the side effects of `.clone()`
#[cfg_attr(target_os = "emscripten", ignore)]
#[cfg(not(miri))] // Miri does not support threads nor entropy
#[cfg(not(miri))] // Miri does not support threads
fn test_box_slice_clone_panics() {
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
@@ -1589,7 +1598,7 @@ thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));

#[test]
#[cfg_attr(target_os = "emscripten", ignore)] // no threads
#[cfg(not(miri))] // Miri does not support threads nor entropy
#[cfg(not(miri))] // Miri does not support threads
fn panic_safe() {
let prev = panic::take_hook();
panic::set_hook(Box::new(move |info| {
17 changes: 13 additions & 4 deletions src/libcore/tests/slice.rs
Original file line number Diff line number Diff line change
@@ -1024,22 +1024,31 @@ fn test_rotate_right() {

#[test]
#[cfg(not(target_arch = "wasm32"))]
#[cfg(not(miri))] // Miri does not support entropy
fn sort_unstable() {
use core::cmp::Ordering::{Equal, Greater, Less};
use core::slice::heapsort;
use rand::{FromEntropy, Rng, rngs::SmallRng, seq::SliceRandom};

#[cfg(not(miri))] // Miri is too slow
let large_range = 500..510;
#[cfg(not(miri))] // Miri is too slow
let rounds = 100;

#[cfg(miri)]
let large_range = 0..0; // empty range
#[cfg(miri)]
let rounds = 1;

let mut v = [0; 600];
let mut tmp = [0; 600];
let mut rng = SmallRng::from_entropy();

for len in (2..25).chain(500..510) {
for len in (2..25).chain(large_range) {
let v = &mut v[0..len];
let tmp = &mut tmp[0..len];

for &modulus in &[5, 10, 100, 1000] {
for _ in 0..100 {
for _ in 0..rounds {
for i in 0..len {
v[i] = rng.gen::<i32>() % modulus;
}
@@ -1095,7 +1104,7 @@ fn sort_unstable() {

#[test]
#[cfg(not(target_arch = "wasm32"))]
#[cfg(not(miri))] // Miri does not support entropy
#[cfg(not(miri))] // Miri is too slow
fn partition_at_index() {
use core::cmp::Ordering::{Equal, Greater, Less};
use rand::rngs::SmallRng;
73 changes: 20 additions & 53 deletions src/librustc/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
@@ -45,12 +45,10 @@ pub struct Allocation<Tag=(),Extra=()> {
}


pub trait AllocationExtra<Tag, MemoryExtra>: ::std::fmt::Debug + Clone {
/// Hook to initialize the extra data when an allocation gets created.
fn memory_allocated(
_size: Size,
_memory_extra: &MemoryExtra
) -> Self;
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Clone {
// There is no constructor in here because the constructor's type depends
// on `MemoryKind`, and making things sufficiently generic leads to painful
// inference failure.

/// Hook for performing extra checks on a memory read access.
///
@@ -88,15 +86,8 @@ pub trait AllocationExtra<Tag, MemoryExtra>: ::std::fmt::Debug + Clone {
}
}

impl AllocationExtra<(), ()> for () {
#[inline(always)]
fn memory_allocated(
_size: Size,
_memory_extra: &()
) -> Self {
()
}
}
// For Tag=() and no extra state, we have is a trivial implementation.
impl AllocationExtra<()> for () { }

impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
@@ -159,23 +150,21 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
}

/// Byte accessors
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// The last argument controls whether we error out when there are undefined
/// or pointer bytes. You should never call this, call `get_bytes` or
/// `get_bytes_with_undef_and_ptr` instead,
///
/// This function also guarantees that the resulting pointer will remain stable
/// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies
/// on that.
fn get_bytes_internal<MemoryExtra>(
fn get_bytes_internal(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
check_defined_and_ptr: bool,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.check_bounds(cx, ptr, size)?;

@@ -196,43 +185,37 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}

#[inline]
pub fn get_bytes<MemoryExtra>(
pub fn get_bytes(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.get_bytes_internal(cx, ptr, size, true)
}

/// It is the caller's responsibility to handle undefined and pointer bytes.
/// However, this still checks that there are no relocations on the *edges*.
#[inline]
pub fn get_bytes_with_undef_and_ptr<MemoryExtra>(
pub fn get_bytes_with_undef_and_ptr(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.get_bytes_internal(cx, ptr, size, false)
}

/// Just calling this already marks everything as defined and removes relocations,
/// so be sure to actually put data there!
pub fn get_bytes_mut<MemoryExtra>(
pub fn get_bytes_mut(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &mut [u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
self.check_bounds(cx, ptr, size)?;
@@ -250,16 +233,14 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}

/// Reading and writing
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached
/// before a `0` is found.
pub fn read_c_str<MemoryExtra>(
pub fn read_c_str(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
let offset = ptr.offset.bytes() as usize;
@@ -278,15 +259,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a
/// relocation. If `allow_ptr_and_undef` is `false`, also enforces that the memory in the
/// given range contains neither relocations nor undef bytes.
pub fn check_bytes<MemoryExtra>(
pub fn check_bytes(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
allow_ptr_and_undef: bool,
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
// Check bounds and relocations on the edges
self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
@@ -301,30 +280,26 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Writes `src` to the memory starting at `ptr.offset`.
///
/// Will do bounds checks on the allocation.
pub fn write_bytes<MemoryExtra>(
pub fn write_bytes(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
src: &[u8],
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?;
bytes.clone_from_slice(src);
Ok(())
}

/// Sets `count` bytes starting at `ptr.offset` with `val`. Basically `memset`.
pub fn write_repeat<MemoryExtra>(
pub fn write_repeat(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: u8,
count: Size
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let bytes = self.get_bytes_mut(cx, ptr, count)?;
for b in bytes {
@@ -341,14 +316,12 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// being valid for ZSTs
///
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn read_scalar<MemoryExtra>(
pub fn read_scalar(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
// get_bytes_unchecked tests relocation edges
let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
@@ -379,13 +352,11 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}

/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn read_ptr_sized<MemoryExtra>(
pub fn read_ptr_sized(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.read_scalar(cx, ptr, cx.data_layout().pointer_size)
}
@@ -398,15 +369,13 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// being valid for ZSTs
///
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn write_scalar<MemoryExtra>(
pub fn write_scalar(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: ScalarMaybeUndef<Tag>,
type_size: Size,
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let val = match val {
ScalarMaybeUndef::Scalar(scalar) => scalar,
@@ -446,14 +415,12 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
}

/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn write_ptr_sized<MemoryExtra>(
pub fn write_ptr_sized(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: ScalarMaybeUndef<Tag>
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let ptr_size = cx.data_layout().pointer_size;
self.write_scalar(cx, ptr.into(), val, ptr_size)
8 changes: 7 additions & 1 deletion src/librustc/mir/interpret/pointer.rs
Original file line number Diff line number Diff line change
@@ -94,11 +94,17 @@ impl<'tcx> Pointer<()> {
Pointer { alloc_id, offset, tag: () }
}

#[inline(always)]
pub fn with_tag<Tag>(self, tag: Tag) -> Pointer<Tag>
{
Pointer::new_with_tag(self.alloc_id, self.offset, tag)
}

#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Pointer<Tag>
where Tag: Default
{
Pointer::new_with_tag(self.alloc_id, self.offset, Default::default())
self.with_tag(Tag::default())
}
}

Loading