Skip to content

Rollup of 7 pull requests #114331

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 18 commits into from
Aug 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
0f06b07
Implement UnwindSafe and RefUnwindSafe for Backtrace
KamilaBorowska Aug 12, 2022
b169ee7
fix alignment handling for Repeat expressions
RalfJung Jul 31, 2023
b84942a
[rustc_data_structures][perf] Simplify base_n::push_str.
ttsugriy Jul 31, 2023
5a808d4
Add some line comments to enum `CoverageKind`
Zalathar Jul 4, 2023
1a014d4
Replace `ExpressionOperandId` with enum `Operand`
Zalathar Jun 26, 2023
f103db8
Make coverage expression IDs count up from 0, not down from `u32::MAX`
Zalathar Jun 29, 2023
3920e07
Make coverage counter IDs count up from 0, not 1
Zalathar Jun 29, 2023
3eb5733
Always use parking_lot's RwLock, even without parallel compiler
oli-obk Jul 31, 2023
ad0729e
Improve diagnostic for wrong borrow on binary operations
Urgau Jul 31, 2023
206bfc4
Cover statements for stable_mir
ouz-a Aug 1, 2023
2ff62fd
clean up, use opaque types
ouz-a Aug 1, 2023
c97af34
Rollup merge of #100455 - xfix:backtrace-ref-unwind-safe, r=dtolnay
matthiaskrgr Aug 1, 2023
52bfceb
Rollup merge of #113428 - Zalathar:operand, r=davidtwco
matthiaskrgr Aug 1, 2023
b387180
Rollup merge of #114283 - oli-obk:parkin_lot_rwlock, r=SparrowLii
matthiaskrgr Aug 1, 2023
c726dcb
Rollup merge of #114288 - Urgau:fix-issue-109352, r=b-naber
matthiaskrgr Aug 1, 2023
fa33d6e
Rollup merge of #114296 - RalfJung:interpret-repeat-align, r=oli-obk
matthiaskrgr Aug 1, 2023
00ad3cc
Rollup merge of #114306 - ttsugriy:push_str, r=wesleywiser
matthiaskrgr Aug 1, 2023
41364c7
Rollup merge of #114320 - ouz-a:smir_statements, r=oli-obk
matthiaskrgr Aug 1, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 4 additions & 6 deletions compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use rustc_middle::mir::coverage::{CounterValueReference, MappedExpressionIndex};
use rustc_middle::mir::coverage::{CounterId, MappedExpressionIndex};

/// Must match the layout of `LLVMRustCounterKind`.
#[derive(Copy, Clone, Debug)]
Expand Down Expand Up @@ -36,11 +36,9 @@ impl Counter {
Self { kind: CounterKind::Zero, id: 0 }
}

/// Constructs a new `Counter` of kind `CounterValueReference`, and converts
/// the given 1-based counter_id to the required 0-based equivalent for
/// the `Counter` encoding.
pub fn counter_value_reference(counter_id: CounterValueReference) -> Self {
Self { kind: CounterKind::CounterValueReference, id: counter_id.zero_based_index() }
/// Constructs a new `Counter` of kind `CounterValueReference`.
pub fn counter_value_reference(counter_id: CounterId) -> Self {
Self { kind: CounterKind::CounterValueReference, id: counter_id.as_u32() }
}

/// Constructs a new `Counter` of kind `Expression`.
Expand Down
87 changes: 25 additions & 62 deletions compiler/rustc_codegen_llvm/src/coverageinfo/map_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,22 @@ pub use super::ffi::*;
use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::bug;
use rustc_middle::mir::coverage::{
CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionId,
InjectedExpressionIndex, MappedExpressionIndex, Op,
CodeRegion, CounterId, ExpressionId, MappedExpressionIndex, Op, Operand,
};
use rustc_middle::ty::Instance;
use rustc_middle::ty::TyCtxt;

#[derive(Clone, Debug, PartialEq)]
pub struct Expression {
lhs: ExpressionOperandId,
lhs: Operand,
op: Op,
rhs: ExpressionOperandId,
rhs: Operand,
region: Option<CodeRegion>,
}

/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
/// for a given Function. Counters and counter expressions have non-overlapping `id`s because they
/// can both be operands in an expression. This struct also stores the `function_source_hash`,
/// for a given Function. This struct also stores the `function_source_hash`,
/// computed during instrumentation, and forwarded with counters.
///
/// Note, it may be important to understand LLVM's definitions of `unreachable` regions versus "gap
Expand All @@ -34,8 +32,8 @@ pub struct FunctionCoverage<'tcx> {
instance: Instance<'tcx>,
source_hash: u64,
is_used: bool,
counters: IndexVec<CounterValueReference, Option<CodeRegion>>,
expressions: IndexVec<InjectedExpressionIndex, Option<Expression>>,
counters: IndexVec<CounterId, Option<CodeRegion>>,
expressions: IndexVec<ExpressionId, Option<Expression>>,
unreachable_regions: Vec<CodeRegion>,
}

Expand Down Expand Up @@ -82,48 +80,36 @@ impl<'tcx> FunctionCoverage<'tcx> {
}

/// Adds a code region to be counted by an injected counter intrinsic.
pub fn add_counter(&mut self, id: CounterValueReference, region: CodeRegion) {
pub fn add_counter(&mut self, id: CounterId, region: CodeRegion) {
if let Some(previous_region) = self.counters[id].replace(region.clone()) {
assert_eq!(previous_region, region, "add_counter: code region for id changed");
}
}

/// Both counters and "counter expressions" (or simply, "expressions") can be operands in other
/// expressions. Expression IDs start from `u32::MAX` and go down, so the range of expression
/// IDs will not overlap with the range of counter IDs. Counters and expressions can be added in
/// any order, and expressions can still be assigned contiguous (though descending) IDs, without
/// knowing what the last counter ID will be.
///
/// When storing the expression data in the `expressions` vector in the `FunctionCoverage`
/// struct, its vector index is computed, from the given expression ID, by subtracting from
/// `u32::MAX`.
///
/// Since the expression operands (`lhs` and `rhs`) can reference either counters or
/// expressions, an operand that references an expression also uses its original ID, descending
/// from `u32::MAX`. Theses operands are translated only during code generation, after all
/// counters and expressions have been added.
/// expressions. These are tracked as separate variants of `Operand`, so there is no ambiguity
/// between operands that are counter IDs and operands that are expression IDs.
pub fn add_counter_expression(
&mut self,
expression_id: InjectedExpressionId,
lhs: ExpressionOperandId,
expression_id: ExpressionId,
lhs: Operand,
op: Op,
rhs: ExpressionOperandId,
rhs: Operand,
region: Option<CodeRegion>,
) {
debug!(
"add_counter_expression({:?}, lhs={:?}, op={:?}, rhs={:?} at {:?}",
expression_id, lhs, op, rhs, region
);
let expression_index = self.expression_index(u32::from(expression_id));
debug_assert!(
expression_index.as_usize() < self.expressions.len(),
"expression_index {} is out of range for expressions.len() = {}
expression_id.as_usize() < self.expressions.len(),
"expression_id {} is out of range for expressions.len() = {}
for {:?}",
expression_index.as_usize(),
expression_id.as_usize(),
self.expressions.len(),
self,
);
if let Some(previous_expression) = self.expressions[expression_index].replace(Expression {
if let Some(previous_expression) = self.expressions[expression_id].replace(Expression {
lhs,
op,
rhs,
Expand Down Expand Up @@ -186,14 +172,11 @@ impl<'tcx> FunctionCoverage<'tcx> {

// This closure converts any `Expression` operand (`lhs` or `rhs` of the `Op::Add` or
// `Op::Subtract` operation) into its native `llvm::coverage::Counter::CounterKind` type
// and value. Operand ID value `0` maps to `CounterKind::Zero`; values in the known range
// of injected LLVM counters map to `CounterKind::CounterValueReference` (and the value
// matches the injected counter index); and any other value is converted into a
// `CounterKind::Expression` with the expression's `new_index`.
// and value.
//
// Expressions will be returned from this function in a sequential vector (array) of
// `CounterExpression`, so the expression IDs must be mapped from their original,
// potentially sparse set of indexes, originally in reverse order from `u32::MAX`.
// potentially sparse set of indexes.
//
// An `Expression` as an operand will have already been encountered as an `Expression` with
// operands, so its new_index will already have been generated (as a 1-up index value).
Expand All @@ -206,34 +189,19 @@ impl<'tcx> FunctionCoverage<'tcx> {
// `expression_index`s lower than the referencing `Expression`. Therefore, it is
// reasonable to look up the new index of an expression operand while the `new_indexes`
// vector is only complete up to the current `ExpressionIndex`.
let id_to_counter = |new_indexes: &IndexSlice<
InjectedExpressionIndex,
Option<MappedExpressionIndex>,
>,
id: ExpressionOperandId| {
if id == ExpressionOperandId::ZERO {
Some(Counter::zero())
} else if id.index() < self.counters.len() {
debug_assert!(
id.index() > 0,
"ExpressionOperandId indexes for counters are 1-based, but this id={}",
id.index()
);
// Note: Some codegen-injected Counters may be only referenced by `Expression`s,
// and may not have their own `CodeRegion`s,
let index = CounterValueReference::from(id.index());
// Note, the conversion to LLVM `Counter` adjusts the index to be zero-based.
Some(Counter::counter_value_reference(index))
} else {
let index = self.expression_index(u32::from(id));
type NewIndexes = IndexSlice<ExpressionId, Option<MappedExpressionIndex>>;
let id_to_counter = |new_indexes: &NewIndexes, operand: Operand| match operand {
Operand::Zero => Some(Counter::zero()),
Operand::Counter(id) => Some(Counter::counter_value_reference(id)),
Operand::Expression(id) => {
self.expressions
.get(index)
.get(id)
.expect("expression id is out of range")
.as_ref()
// If an expression was optimized out, assume it would have produced a count
// of zero. This ensures that expressions dependent on optimized-out
// expressions are still valid.
.map_or(Some(Counter::zero()), |_| new_indexes[index].map(Counter::expression))
.map_or(Some(Counter::zero()), |_| new_indexes[id].map(Counter::expression))
}
};

Expand Down Expand Up @@ -340,9 +308,4 @@ impl<'tcx> FunctionCoverage<'tcx> {
fn unreachable_regions(&self) -> impl Iterator<Item = (Counter, &CodeRegion)> {
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
}

fn expression_index(&self, id_descending_from_max: u32) -> InjectedExpressionIndex {
debug_assert!(id_descending_from_max >= self.counters.len() as u32);
InjectedExpressionIndex::from(u32::MAX - id_descending_from_max)
}
}
16 changes: 7 additions & 9 deletions compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_llvm::RustString;
use rustc_middle::bug;
use rustc_middle::mir::coverage::{
CodeRegion, CounterValueReference, CoverageKind, ExpressionOperandId, InjectedExpressionId, Op,
};
use rustc_middle::mir::coverage::{CodeRegion, CounterId, CoverageKind, ExpressionId, Op, Operand};
use rustc_middle::mir::Coverage;
use rustc_middle::ty;
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt};
Expand All @@ -33,7 +31,7 @@ mod ffi;
pub(crate) mod map_data;
pub mod mapgen;

const UNUSED_FUNCTION_COUNTER_ID: CounterValueReference = CounterValueReference::START;
const UNUSED_FUNCTION_COUNTER_ID: CounterId = CounterId::START;

const VAR_ALIGN_BYTES: usize = 8;

Expand Down Expand Up @@ -125,7 +123,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
let fn_name = bx.get_pgo_func_name_var(instance);
let hash = bx.const_u64(function_source_hash);
let num_counters = bx.const_u32(coverageinfo.num_counters);
let index = bx.const_u32(id.zero_based_index());
let index = bx.const_u32(id.as_u32());
debug!(
"codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
fn_name, hash, num_counters, index,
Expand Down Expand Up @@ -178,7 +176,7 @@ impl<'tcx> Builder<'_, '_, 'tcx> {
fn add_coverage_counter(
&mut self,
instance: Instance<'tcx>,
id: CounterValueReference,
id: CounterId,
region: CodeRegion,
) -> bool {
if let Some(coverage_context) = self.coverage_context() {
Expand All @@ -202,10 +200,10 @@ impl<'tcx> Builder<'_, '_, 'tcx> {
fn add_coverage_counter_expression(
&mut self,
instance: Instance<'tcx>,
id: InjectedExpressionId,
lhs: ExpressionOperandId,
id: ExpressionId,
lhs: Operand,
op: Op,
rhs: ExpressionOperandId,
rhs: Operand,
region: Option<CodeRegion>,
) -> bool {
if let Some(coverage_context) = self.coverage_context() {
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_const_eval/src/interpret/step.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,13 +208,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let rest_ptr = first_ptr.offset(elem_size, self)?;
// For the alignment of `rest_ptr`, we crucially do *not* use `first.align` as
// that place might be more aligned than its type mandates (a `u8` array could
// be 4-aligned if it sits at the right spot in a struct). Instead we use
// `first.layout.align`, i.e., the alignment given by the type.
// be 4-aligned if it sits at the right spot in a struct). We have to also factor
// in element size.
self.mem_copy_repeatedly(
first_ptr,
first.align,
dest.align,
rest_ptr,
first.layout.align.abi,
dest.align.restrict_for_offset(elem_size),
elem_size,
length - 1,
/*nonoverlapping:*/ true,
Expand Down
7 changes: 3 additions & 4 deletions compiler/rustc_data_structures/src/base_n.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,22 +16,21 @@ const BASE_64: &[u8; MAX_BASE] =
pub fn push_str(mut n: u128, base: usize, output: &mut String) {
debug_assert!(base >= 2 && base <= MAX_BASE);
let mut s = [0u8; 128];
let mut index = 0;
let mut index = s.len();

let base = base as u128;

loop {
index -= 1;
s[index] = BASE_64[(n % base) as usize];
index += 1;
n /= base;

if n == 0 {
break;
}
}
s[0..index].reverse();

output.push_str(str::from_utf8(&s[0..index]).unwrap());
output.push_str(str::from_utf8(&s[index..]).unwrap());
}

#[inline]
Expand Down
28 changes: 7 additions & 21 deletions compiler/rustc_data_structures/src/sync/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,37 +43,23 @@ impl<I: Idx, T: Copy> AppendOnlyIndexVec<I, T> {

#[derive(Default)]
pub struct AppendOnlyVec<T: Copy> {
#[cfg(not(parallel_compiler))]
vec: elsa::vec::FrozenVec<T>,
#[cfg(parallel_compiler)]
vec: elsa::sync::LockFreeFrozenVec<T>,
vec: parking_lot::RwLock<Vec<T>>,
}

impl<T: Copy> AppendOnlyVec<T> {
pub fn new() -> Self {
Self {
#[cfg(not(parallel_compiler))]
vec: elsa::vec::FrozenVec::new(),
#[cfg(parallel_compiler)]
vec: elsa::sync::LockFreeFrozenVec::new(),
}
Self { vec: Default::default() }
}

pub fn push(&self, val: T) -> usize {
#[cfg(not(parallel_compiler))]
let i = self.vec.len();
#[cfg(not(parallel_compiler))]
self.vec.push(val);
#[cfg(parallel_compiler)]
let i = self.vec.push(val);
i
let mut v = self.vec.write();
let n = v.len();
v.push(val);
n
}

pub fn get(&self, i: usize) -> Option<T> {
#[cfg(not(parallel_compiler))]
return self.vec.get_copy(i);
#[cfg(parallel_compiler)]
return self.vec.get(i);
self.vec.read().get(i).copied()
}

pub fn iter_enumerated(&self) -> impl Iterator<Item = (usize, T)> + '_ {
Expand Down
Loading