Skip to content
This repository has been archived by the owner on Oct 4, 2022. It is now read-only.

Bugfix and API adjustment for reference types support. #79

Merged
merged 2 commits into from
Jul 2, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 6 additions & 7 deletions bin/minira.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ fn main() {
// Just so we can run it later. Not needed for actual allocation.
let original_func = func.clone();

let result = match allocate_registers_with_opts(&mut func, &reg_universe, &None, opts.clone()) {
let result = match allocate_registers_with_opts(&mut func, &reg_universe, None, opts.clone()) {
Err(e) => {
println!("allocation failed: {}", e);
return;
Expand Down Expand Up @@ -274,7 +274,7 @@ mod test_utils {
&reg_universe,
RunStage::BeforeRegalloc,
);
let result = allocate_registers_with_opts(&mut func, &reg_universe, &None, opts)
let result = allocate_registers_with_opts(&mut func, &reg_universe, None, opts)
.unwrap_or_else(|err| {
panic!("allocation failed: {}", err);
});
Expand All @@ -301,7 +301,7 @@ mod test_utils {
allocate_registers(
&mut func,
&reg_universe,
&None,
None,
AlgorithmWithDefaults::LinearScan,
)
}
Expand Down Expand Up @@ -329,7 +329,7 @@ mod test_utils {
.allocate(opts.clone())
.expect("generic allocator failed!");

let result = allocate_registers_with_opts(&mut func, &reg_universe, &None, opts)
let result = allocate_registers_with_opts(&mut func, &reg_universe, None, opts)
.unwrap_or_else(|err| {
panic!("allocation failed: {}", err);
});
Expand Down Expand Up @@ -375,9 +375,8 @@ mod test_utils {
.allocate(opts.clone())
.expect("generic allocator failed!");

let result =
allocate_registers_with_opts(&mut func, &reg_universe, &None, opts.clone())
.expect("regalloc failure");
let result = allocate_registers_with_opts(&mut func, &reg_universe, None, opts.clone())
.expect("regalloc failure");

func.update_from_alloc(result);
func.print("AFTER", &None);
Expand Down
2 changes: 1 addition & 1 deletion fuzz/fuzz_bt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ fuzz_target!(|func: ir::Func| {
algorithm: regalloc::Algorithm::Backtracking(Default::default()),
};

let ra_result = regalloc::allocate_registers_with_opts(&mut func, &reg_universe, &None, opts);
let ra_result = regalloc::allocate_registers_with_opts(&mut func, &reg_universe, None, opts);

match ra_result {
Ok(result) => {
Expand Down
3 changes: 2 additions & 1 deletion fuzz/fuzz_bt_differential.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ fuzz_target!(|func: ir::Func| {
algorithm: regalloc::Algorithm::Backtracking(Default::default()),
};

let result = match regalloc::allocate_registers_with_opts(&mut func, &reg_universe, &None, opts) {
let result = match regalloc::allocate_registers_with_opts(&mut func, &reg_universe, None, opts)
{
Ok(result) => result,
Err(err) => {
if let regalloc::RegAllocError::RegChecker(_) = &err {
Expand Down
2 changes: 1 addition & 1 deletion fuzz/fuzz_lsra.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ fuzz_target!(|func: ir::Func| {
let result = match regalloc::allocate_registers(
&mut func,
&reg_universe,
&None,
None,
regalloc::AlgorithmWithDefaults::LinearScan,
) {
Ok(result) => result,
Expand Down
2 changes: 1 addition & 1 deletion fuzz/fuzz_lsra_differential.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ fuzz_target!(|func: ir::Func| {
let result = match regalloc::allocate_registers(
&mut func,
&reg_universe,
&None,
None,
regalloc::AlgorithmWithDefaults::LinearScan,
) {
Ok(result) => result,
Expand Down
46 changes: 41 additions & 5 deletions lib/src/analysis_data_flow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ use std::fmt;
use crate::analysis_control_flow::CFGInfo;
use crate::data_structures::{
BlockIx, InstIx, InstPoint, MoveInfo, MoveInfoElem, Point, Queue, RangeFrag, RangeFragIx,
RangeFragKind, RangeFragMetrics, RealRange, RealRangeIx, RealReg, RealRegUniverse, Reg,
RegClass, RegSets, RegToRangesMaps, RegUsageCollector, RegVecBounds, RegVecs, RegVecsAndBounds,
SortedRangeFragIxs, SortedRangeFrags, SpillCost, TypedIxVec, VirtualRange, VirtualRangeIx,
VirtualReg,
RangeFragKind, RangeFragMetrics, RangeId, RealRange, RealRangeIx, RealReg, RealRegUniverse,
Reg, RegClass, RegSets, RegToRangesMaps, RegUsageCollector, RegVecBounds, RegVecs,
RegVecsAndBounds, SortedRangeFragIxs, SortedRangeFrags, SpillCost, TypedIxVec, VirtualRange,
VirtualRangeIx, VirtualReg,
};
use crate::sparse_set::SparseSet;
use crate::union_find::{ToFromU32, UnionFind};
Expand Down Expand Up @@ -1870,13 +1870,40 @@ pub fn compute_reg_to_ranges_maps<F: Function>(
}
}

// Collect info about registers that are connected by moves:
// Collect info about registers (and optionally Virtual/RealRanges) that are
// connected by moves:
#[inline(never)]
pub fn collect_move_info<F: Function>(
func: &F,
reg_vecs_and_bounds: &RegVecsAndBounds,
est_freqs: &TypedIxVec<BlockIx, u32>,
reg_to_ranges_maps: &RegToRangesMaps,
rlr_env: &TypedIxVec<RealRangeIx, RealRange>,
vlr_env: &TypedIxVec<VirtualRangeIx, VirtualRange>,
fenv: &TypedIxVec<RangeFragIx, RangeFrag>,
want_ranges: bool,
) -> MoveInfo {
// Helper: find the RealRange or VirtualRange for a register at an InstPoint.
let find_range_for_reg = |pt: InstPoint, reg: Reg| {
if !want_ranges {
return RangeId::invalid_value();
}
if reg.is_real() {
for &rlrix in &reg_to_ranges_maps.rreg_to_rlrs_map[reg.get_index() as usize] {
if rlr_env[rlrix].sorted_frags.contains_pt(fenv, pt) {
return RangeId::new_real(rlrix);
}
}
} else {
for &vlrix in &reg_to_ranges_maps.vreg_to_vlrs_map[reg.get_index() as usize] {
if vlr_env[vlrix].sorted_frags.contains_pt(pt) {
return RangeId::new_virtual(vlrix);
}
}
}
RangeId::invalid_value()
};

let mut moves = Vec::<MoveInfoElem>::new();
for b in func.blocks() {
let block_eef = est_freqs[b];
Expand Down Expand Up @@ -1908,9 +1935,18 @@ pub fn collect_move_info<F: Function>(
let dst = wreg.to_reg();
let src = reg;
let est_freq = block_eef;

// Find the ranges for source and dest, if requested.
let (src_range, dst_range) = (
find_range_for_reg(InstPoint::new(iix, Point::Use), src),
find_range_for_reg(InstPoint::new(iix, Point::Def), dst),
);

moves.push(MoveInfoElem {
dst,
dst_range,
src,
src_range,
iix,
est_freq,
});
Expand Down
5 changes: 5 additions & 0 deletions lib/src/analysis_main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,11 @@ pub fn run_analysis<F: Function>(
func,
&reg_vecs_and_bounds,
&estimated_frequencies,
reg_to_ranges_maps.as_ref().unwrap(),
&rlr_env,
&vlr_env,
&frag_env,
/* want_ranges = */ client_wants_stackmaps,
))
} else {
None
Expand Down
73 changes: 21 additions & 52 deletions lib/src/analysis_reftypes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ use crate::data_structures::{
};
use crate::sparse_set::SparseSet;

use log::debug;

pub fn do_reftypes_analysis(
// From dataflow/liveness analysis. Modified by setting their is_ref bit.
rlr_env: &mut TypedIxVec<RealRangeIx, RealRange>,
Expand All @@ -27,63 +29,26 @@ pub fn do_reftypes_analysis(

let mut range_pairs = Vec::<(RangeId, RangeId)>::new(); // (DST, SRC)

for MoveInfoElem {
dst: dst_reg,
src: src_reg,
iix: _,
est_freq: _,
debug!("do_reftypes_analysis starting");

for &MoveInfoElem {
dst,
src,
src_range,
dst_range,
iix,
..
} in &move_info.moves
{
debug_assert!(dst_reg.get_class() == src_reg.get_class());

// Don't waste time processing moves which can't possibly be of reftyped values.
if dst_reg.get_class() != reftype_class {
if dst.get_class() != reftype_class {
continue;
}

// This is kinda tiresome because of the differing representations, but .. construct the
// cartesian product of the range indicies for both the source and destination of the
// move.

let dst_reg_ix = dst_reg.get_index();
let src_reg_ix = src_reg.get_index();
match (dst_reg.is_real(), src_reg.is_real()) {
(true, true) => {
// R <- R
for dst_ix in &reg_to_ranges_maps.rreg_to_rlrs_map[dst_reg_ix] {
for src_ix in &reg_to_ranges_maps.rreg_to_rlrs_map[src_reg_ix] {
range_pairs.push((RangeId::new_real(*dst_ix), RangeId::new_real(*src_ix)));
}
}
}
(true, false) => {
// R <- V
for dst_ix in &reg_to_ranges_maps.rreg_to_rlrs_map[dst_reg_ix] {
for src_ix in &reg_to_ranges_maps.vreg_to_vlrs_map[src_reg_ix] {
range_pairs
.push((RangeId::new_real(*dst_ix), RangeId::new_virtual(*src_ix)));
}
}
}
(false, true) => {
// V <- R
for dst_ix in &reg_to_ranges_maps.vreg_to_vlrs_map[dst_reg_ix] {
for src_ix in &reg_to_ranges_maps.rreg_to_rlrs_map[src_reg_ix] {
range_pairs
.push((RangeId::new_virtual(*dst_ix), RangeId::new_real(*src_ix)));
}
}
}
(false, false) => {
// V <- V
for dst_ix in &reg_to_ranges_maps.vreg_to_vlrs_map[dst_reg_ix] {
for src_ix in &reg_to_ranges_maps.vreg_to_vlrs_map[src_reg_ix] {
range_pairs
.push((RangeId::new_virtual(*dst_ix), RangeId::new_virtual(*src_ix)));
}
}
}
}
debug!(
"move from {:?} (range {:?}) to {:?} (range {:?}) at inst {:?}",
src, dst, src_range, dst_range, iix
);
range_pairs.push((dst_range, src_range));
}

// We have to hand the range-pairs that must be a superset of the moves that could possibly
Expand All @@ -97,6 +62,7 @@ pub fn do_reftypes_analysis(
// is buggy.
debug_assert!(vreg.get_class() == reftype_class);
for vlrix in &reg_to_ranges_maps.vreg_to_vlrs_map[vreg.get_index()] {
debug!("range {:?} is reffy due to reffy vreg {:?}", vlrix, vreg);
reftyped_ranges.insert(RangeId::new_virtual(*vlrix));
}
}
Expand All @@ -113,6 +79,7 @@ pub fn do_reftypes_analysis(

for (dst_lr_id, src_lr_id) in &range_pairs {
if reftyped_ranges.contains(*src_lr_id) {
debug!("reftyped range {:?} -> {:?}", src_lr_id, dst_lr_id);
reftyped_ranges.insert(*dst_lr_id);
}
}
Expand All @@ -132,10 +99,12 @@ pub fn do_reftypes_analysis(
if lr_id.is_real() {
let rrange = &mut rlr_env[lr_id.to_real()];
debug_assert!(!rrange.is_ref);
debug!(" -> rrange {:?} is reffy", lr_id.to_real());
rrange.is_ref = true;
} else {
let vrange = &mut vlr_env[lr_id.to_virtual()];
debug_assert!(!vrange.is_ref);
debug!(" -> rrange {:?} is reffy", lr_id.to_virtual());
vrange.is_ref = true;
}
}
Expand Down
1 change: 1 addition & 0 deletions lib/src/bt_coalescing_analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,7 @@ pub fn do_coalescing_analysis<F: Function>(
src,
iix,
est_freq,
..
} in &move_info.moves
{
debug!(
Expand Down
28 changes: 23 additions & 5 deletions lib/src/bt_main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -453,15 +453,27 @@ fn get_stackmap_artefacts_at(
}
let rci = rci.unwrap();

debug!("computing stackmap info at {:?}", pt);

for rreg_no in rci.first..rci.last + 1 {
// Get the RangeId, if any, assigned for `rreg_no` at `iix.u`. From that we can figure
// out if it is reftyped.
let mb_range_id = per_real_reg[rreg_no].committed.lookup_inst_point(pt);
if let Some(range_id) = mb_range_id {
// `rreg_no` is live at `iix.u`.
let is_ref = if range_id.is_real() {
debug!(
" real reg {:?} is real-range {:?}",
rreg_no,
rlr_env[range_id.to_real()]
);
rlr_env[range_id.to_real()].is_ref
} else {
debug!(
" real reg {:?} is virtual-range {:?}",
rreg_no,
vlr_env[range_id.to_virtual()]
);
vlr_env[range_id.to_virtual()].is_ref
};
if is_ref {
Expand All @@ -472,6 +484,8 @@ fn get_stackmap_artefacts_at(
}
}

debug!("Sbefore = {:?}", s_before);

// Compute Safter.

let mut s_after = s_before.clone();
Expand All @@ -490,6 +504,8 @@ fn get_stackmap_artefacts_at(
}
}

debug!("Safter = {:?}", s_before);

// Create the spill insns, as defined by Sbefore. This has the side effect of recording the
// spill in `spill_slot_allocator`, so we can later ask it to tell us all the reftyped spill
// slots.
Expand Down Expand Up @@ -533,6 +549,8 @@ fn get_stackmap_artefacts_at(

let reftyped_spillslots = spill_slot_allocator.get_reftyped_spillslots_at_inst_point(pt);

debug!("reftyped_spillslots = {:?}", reftyped_spillslots);

// And we're done!

Ok((spill_insns, reload_insns, reftyped_spillslots))
Expand Down Expand Up @@ -641,7 +659,7 @@ impl fmt::Debug for EditListItem {
pub fn alloc_main<F: Function>(
func: &mut F,
reg_universe: &RealRegUniverse,
stackmap_request: &Option<StackmapRequestInfo>,
stackmap_request: Option<&StackmapRequestInfo>,
use_checker: bool,
opts: &BacktrackingOptions,
) -> Result<RegAllocResult<F>, RegAllocError> {
Expand All @@ -650,11 +668,11 @@ pub fn alloc_main<F: Function>(
let empty_vec_iixs = vec![];
let (client_wants_stackmaps, reftype_class, reftyped_vregs, safepoint_insns) =
match stackmap_request {
Some(StackmapRequestInfo {
Some(&StackmapRequestInfo {
reftype_class,
reftyped_vregs,
safepoint_insns,
}) => (true, *reftype_class, reftyped_vregs, safepoint_insns),
ref reftyped_vregs,
ref safepoint_insns,
}) => (true, reftype_class, reftyped_vregs, safepoint_insns),
None => (false, RegClass::INVALID, &empty_vec_vregs, &empty_vec_iixs),
};

Expand Down
2 changes: 1 addition & 1 deletion lib/src/bt_spillslot_allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ impl SpillSlotAllocator {
// must succeed. Calling recursively is a bit stupid in the sense that we then search
// again to find the slot we just allocated, but hey.
self.add_new_slot(1 /*word*/);
return self.alloc_reftyped_spillslot_for_frag(frag); // \o/ tailcall \o/
self.alloc_reftyped_spillslot_for_frag(frag) // \o/ tailcall \o/
}

// STACKMAP SUPPORT
Expand Down
Loading