Skip to content

Commit d75f48e

Browse files
committed
Auto merge of rust-lang#80449 - m-ou-se:rollup-kp2e5n8, r=m-ou-se
Rollup of 11 pull requests Successful merges: - rust-lang#80383 (clarify wrapping ptr arithmetic docs) - rust-lang#80390 (BTreeMap: rename the area access methods) - rust-lang#80393 (Add links to the source for the rustc and rustdoc books.) - rust-lang#80398 (Use raw version of align_of in rc data_offset) - rust-lang#80402 (Document `InferTy` & co.) - rust-lang#80403 (fix: small typo error in chalk/mod.rs) - rust-lang#80410 (rustdoc book: fix example) - rust-lang#80419 (Add regression test for rust-lang#80375) - rust-lang#80430 (Add "length" as doc alias to len methods) - rust-lang#80431 (Add "chr" as doc alias to char::from_u32) - rust-lang#80448 (Fix stabilization version of deque_range feature.) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
2 parents 2987785 + e3d26e0 commit d75f48e

File tree

25 files changed

+231
-131
lines changed

25 files changed

+231
-131
lines changed

compiler/rustc_middle/src/ty/sty.rs

+30-3
Original file line numberDiff line numberDiff line change
@@ -1424,28 +1424,33 @@ pub struct EarlyBoundRegion {
14241424
pub name: Symbol,
14251425
}
14261426

1427+
/// A **ty**pe **v**ariable **ID**.
14271428
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
14281429
pub struct TyVid {
14291430
pub index: u32,
14301431
}
14311432

1433+
/// A **`const`** **v**ariable **ID**.
14321434
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
14331435
pub struct ConstVid<'tcx> {
14341436
pub index: u32,
14351437
pub phantom: PhantomData<&'tcx ()>,
14361438
}
14371439

1440+
/// An **int**egral (`u32`, `i32`, `usize`, etc.) type **v**ariable **ID**.
14381441
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
14391442
pub struct IntVid {
14401443
pub index: u32,
14411444
}
14421445

1446+
/// An **float**ing-point (`f32` or `f64`) type **v**ariable **ID**.
14431447
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
14441448
pub struct FloatVid {
14451449
pub index: u32,
14461450
}
14471451

14481452
rustc_index::newtype_index! {
1453+
/// A **region** (lifetime) **v**ariable **ID**.
14491454
pub struct RegionVid {
14501455
DEBUG_FORMAT = custom,
14511456
}
@@ -1457,18 +1462,40 @@ impl Atom for RegionVid {
14571462
}
14581463
}
14591464

1465+
/// A placeholder for a type that hasn't been inferred yet.
1466+
///
1467+
/// E.g., if we have an empty array (`[]`), then we create a fresh
1468+
/// type variable for the element type since we won't know until it's
1469+
/// used what the element type is supposed to be.
14601470
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
14611471
#[derive(HashStable)]
14621472
pub enum InferTy {
1473+
/// A type variable.
14631474
TyVar(TyVid),
1475+
/// An integral type variable (`{integer}`).
1476+
///
1477+
/// These are created when the compiler sees an integer literal like
1478+
/// `1` that could be several different types (`u8`, `i32`, `u32`, etc.).
1479+
/// We don't know until it's used what type it's supposed to be, so
1480+
/// we create a fresh type variable.
14641481
IntVar(IntVid),
1482+
/// A floating-point type variable (`{float}`).
1483+
///
1484+
/// These are created when the compiler sees an float literal like
1485+
/// `1.0` that could be either an `f32` or an `f64`.
1486+
/// We don't know until it's used what type it's supposed to be, so
1487+
/// we create a fresh type variable.
14651488
FloatVar(FloatVid),
14661489

1467-
/// A `FreshTy` is one that is generated as a replacement for an
1468-
/// unbound type variable. This is convenient for caching etc. See
1469-
/// `infer::freshen` for more details.
1490+
/// A [`FreshTy`][Self::FreshTy] is one that is generated as a replacement
1491+
/// for an unbound type variable. This is convenient for caching etc. See
1492+
/// `rustc_infer::infer::freshen` for more details.
1493+
///
1494+
/// Compare with [`TyVar`][Self::TyVar].
14701495
FreshTy(u32),
1496+
/// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`IntVar`][Self::IntVar].
14711497
FreshIntTy(u32),
1498+
/// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`FloatVar`][Self::FloatVar].
14721499
FreshFloatTy(u32),
14731500
}
14741501

compiler/rustc_traits/src/chalk/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ crate fn evaluate_goal<'tcx>(
9898
let mut solver = chalk_engine::solve::SLGSolver::new(32, None);
9999
let db = ChalkRustIrDatabase { interner, reempty_placeholder };
100100
let solution = solver.solve(&db, &lowered_goal);
101-
debug!(?obligation, ?solution, "evaluatate goal");
101+
debug!(?obligation, ?solution, "evaluate goal");
102102

103103
// Ideally, the code to convert *back* to rustc types would live close to
104104
// the code to convert *from* rustc types. Right now though, we don't

library/alloc/src/collections/binary_heap.rs

+1
Original file line numberDiff line numberDiff line change
@@ -915,6 +915,7 @@ impl<T> BinaryHeap<T> {
915915
///
916916
/// assert_eq!(heap.len(), 2);
917917
/// ```
918+
#[doc(alias = "length")]
918919
#[stable(feature = "rust1", since = "1.0.0")]
919920
pub fn len(&self) -> usize {
920921
self.data.len()

library/alloc/src/collections/btree/map.rs

+1
Original file line numberDiff line numberDiff line change
@@ -2132,6 +2132,7 @@ impl<K, V> BTreeMap<K, V> {
21322132
/// a.insert(1, "a");
21332133
/// assert_eq!(a.len(), 1);
21342134
/// ```
2135+
#[doc(alias = "length")]
21352136
#[stable(feature = "rust1", since = "1.0.0")]
21362137
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
21372138
pub const fn len(&self) -> usize {

library/alloc/src/collections/btree/node.rs

+48-50
Original file line numberDiff line numberDiff line change
@@ -489,7 +489,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
489489
///
490490
/// # Safety
491491
/// `index` is in bounds of 0..CAPACITY
492-
unsafe fn key_area_mut_at<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
492+
unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
493493
where
494494
I: SliceIndex<[MaybeUninit<K>], Output = Output>,
495495
{
@@ -503,7 +503,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
503503
///
504504
/// # Safety
505505
/// `index` is in bounds of 0..CAPACITY
506-
unsafe fn val_area_mut_at<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
506+
unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
507507
where
508508
I: SliceIndex<[MaybeUninit<V>], Output = Output>,
509509
{
@@ -519,7 +519,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
519519
///
520520
/// # Safety
521521
/// `index` is in bounds of 0..CAPACITY + 1
522-
unsafe fn edge_area_mut_at<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
522+
unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
523523
where
524524
I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
525525
{
@@ -583,8 +583,8 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
583583
assert!(idx < CAPACITY);
584584
*len += 1;
585585
unsafe {
586-
self.key_area_mut_at(idx).write(key);
587-
self.val_area_mut_at(idx).write(val);
586+
self.key_area_mut(idx).write(key);
587+
self.val_area_mut(idx).write(val);
588588
}
589589
}
590590

@@ -593,8 +593,8 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
593593
let new_len = self.len() + 1;
594594
assert!(new_len <= CAPACITY);
595595
unsafe {
596-
slice_insert(self.key_area_mut_at(..new_len), 0, key);
597-
slice_insert(self.val_area_mut_at(..new_len), 0, val);
596+
slice_insert(self.key_area_mut(..new_len), 0, key);
597+
slice_insert(self.val_area_mut(..new_len), 0, val);
598598
*self.len_mut() = new_len as u16;
599599
}
600600
}
@@ -627,9 +627,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
627627
assert!(idx < CAPACITY);
628628
*len += 1;
629629
unsafe {
630-
self.key_area_mut_at(idx).write(key);
631-
self.val_area_mut_at(idx).write(val);
632-
self.edge_area_mut_at(idx + 1).write(edge.node);
630+
self.key_area_mut(idx).write(key);
631+
self.val_area_mut(idx).write(val);
632+
self.edge_area_mut(idx + 1).write(edge.node);
633633
Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
634634
}
635635
}
@@ -642,9 +642,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
642642
assert!(new_len <= CAPACITY);
643643

644644
unsafe {
645-
slice_insert(self.key_area_mut_at(..new_len), 0, key);
646-
slice_insert(self.val_area_mut_at(..new_len), 0, val);
647-
slice_insert(self.edge_area_mut_at(..new_len + 1), 0, edge.node);
645+
slice_insert(self.key_area_mut(..new_len), 0, key);
646+
slice_insert(self.val_area_mut(..new_len), 0, val);
647+
slice_insert(self.edge_area_mut(..new_len + 1), 0, edge.node);
648648
*self.len_mut() = new_len as u16;
649649
}
650650

@@ -662,12 +662,12 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
662662
let idx = self.len() - 1;
663663

664664
unsafe {
665-
let key = self.key_area_mut_at(idx).assume_init_read();
666-
let val = self.val_area_mut_at(idx).assume_init_read();
665+
let key = self.key_area_mut(idx).assume_init_read();
666+
let val = self.val_area_mut(idx).assume_init_read();
667667
let edge = match self.reborrow_mut().force() {
668668
ForceResult::Leaf(_) => None,
669669
ForceResult::Internal(mut internal) => {
670-
let node = internal.edge_area_mut_at(idx + 1).assume_init_read();
670+
let node = internal.edge_area_mut(idx + 1).assume_init_read();
671671
let mut edge = Root { node, height: internal.height - 1, _marker: PhantomData };
672672
// Currently, clearing the parent link is superfluous, because we will
673673
// insert the node elsewhere and set its parent link again.
@@ -690,12 +690,12 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
690690
let old_len = self.len();
691691

692692
unsafe {
693-
let key = slice_remove(self.key_area_mut_at(..old_len), 0);
694-
let val = slice_remove(self.val_area_mut_at(..old_len), 0);
693+
let key = slice_remove(self.key_area_mut(..old_len), 0);
694+
let val = slice_remove(self.val_area_mut(..old_len), 0);
695695
let edge = match self.reborrow_mut().force() {
696696
ForceResult::Leaf(_) => None,
697697
ForceResult::Internal(mut internal) => {
698-
let node = slice_remove(internal.edge_area_mut_at(..old_len + 1), 0);
698+
let node = slice_remove(internal.edge_area_mut(..old_len + 1), 0);
699699
let mut edge = Root { node, height: internal.height - 1, _marker: PhantomData };
700700
// Currently, clearing the parent link is superfluous, because we will
701701
// insert the node elsewhere and set its parent link again.
@@ -919,11 +919,11 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
919919
let new_len = self.node.len() + 1;
920920

921921
unsafe {
922-
slice_insert(self.node.key_area_mut_at(..new_len), self.idx, key);
923-
slice_insert(self.node.val_area_mut_at(..new_len), self.idx, val);
922+
slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
923+
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
924924
*self.node.len_mut() = new_len as u16;
925925

926-
self.node.val_area_mut_at(self.idx).assume_init_mut()
926+
self.node.val_area_mut(self.idx).assume_init_mut()
927927
}
928928
}
929929
}
@@ -978,9 +978,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
978978
let new_len = self.node.len() + 1;
979979

980980
unsafe {
981-
slice_insert(self.node.key_area_mut_at(..new_len), self.idx, key);
982-
slice_insert(self.node.val_area_mut_at(..new_len), self.idx, val);
983-
slice_insert(self.node.edge_area_mut_at(..new_len + 1), self.idx + 1, edge.node);
981+
slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
982+
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
983+
slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
984984
*self.node.len_mut() = new_len as u16;
985985

986986
self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
@@ -1085,7 +1085,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeTyp
10851085

10861086
impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
10871087
pub fn key_mut(&mut self) -> &mut K {
1088-
unsafe { self.node.key_area_mut_at(self.idx).assume_init_mut() }
1088+
unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
10891089
}
10901090

10911091
pub fn into_val_mut(self) -> &'a mut V {
@@ -1127,16 +1127,16 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
11271127
let new_len = self.node.len() - self.idx - 1;
11281128
new_node.len = new_len as u16;
11291129
unsafe {
1130-
let k = self.node.key_area_mut_at(self.idx).assume_init_read();
1131-
let v = self.node.val_area_mut_at(self.idx).assume_init_read();
1130+
let k = self.node.key_area_mut(self.idx).assume_init_read();
1131+
let v = self.node.val_area_mut(self.idx).assume_init_read();
11321132

11331133
ptr::copy_nonoverlapping(
1134-
self.node.key_area_mut_at(self.idx + 1..).as_ptr(),
1134+
self.node.key_area_mut(self.idx + 1..).as_ptr(),
11351135
new_node.keys.as_mut_ptr(),
11361136
new_len,
11371137
);
11381138
ptr::copy_nonoverlapping(
1139-
self.node.val_area_mut_at(self.idx + 1..).as_ptr(),
1139+
self.node.val_area_mut(self.idx + 1..).as_ptr(),
11401140
new_node.vals.as_mut_ptr(),
11411141
new_len,
11421142
);
@@ -1173,8 +1173,8 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
11731173
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
11741174
let old_len = self.node.len();
11751175
unsafe {
1176-
let k = slice_remove(self.node.key_area_mut_at(..old_len), self.idx);
1177-
let v = slice_remove(self.node.val_area_mut_at(..old_len), self.idx);
1176+
let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
1177+
let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
11781178
*self.node.len_mut() = (old_len - 1) as u16;
11791179
((k, v), self.left_edge())
11801180
}
@@ -1195,7 +1195,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
11951195
let kv = self.split_leaf_data(&mut new_node.data);
11961196
let new_len = usize::from(new_node.data.len);
11971197
ptr::copy_nonoverlapping(
1198-
self.node.edge_area_mut_at(self.idx + 1..).as_ptr(),
1198+
self.node.edge_area_mut(self.idx + 1..).as_ptr(),
11991199
new_node.edges.as_mut_ptr(),
12001200
new_len + 1,
12011201
);
@@ -1321,25 +1321,23 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
13211321
unsafe {
13221322
*left_node.len_mut() = new_left_len as u16;
13231323

1324-
let parent_key =
1325-
slice_remove(parent_node.key_area_mut_at(..old_parent_len), parent_idx);
1326-
left_node.key_area_mut_at(old_left_len).write(parent_key);
1324+
let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
1325+
left_node.key_area_mut(old_left_len).write(parent_key);
13271326
ptr::copy_nonoverlapping(
1328-
right_node.key_area_mut_at(..).as_ptr(),
1329-
left_node.key_area_mut_at(old_left_len + 1..).as_mut_ptr(),
1327+
right_node.key_area_mut(..).as_ptr(),
1328+
left_node.key_area_mut(old_left_len + 1..).as_mut_ptr(),
13301329
right_len,
13311330
);
13321331

1333-
let parent_val =
1334-
slice_remove(parent_node.val_area_mut_at(..old_parent_len), parent_idx);
1335-
left_node.val_area_mut_at(old_left_len).write(parent_val);
1332+
let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
1333+
left_node.val_area_mut(old_left_len).write(parent_val);
13361334
ptr::copy_nonoverlapping(
1337-
right_node.val_area_mut_at(..).as_ptr(),
1338-
left_node.val_area_mut_at(old_left_len + 1..).as_mut_ptr(),
1335+
right_node.val_area_mut(..).as_ptr(),
1336+
left_node.val_area_mut(old_left_len + 1..).as_mut_ptr(),
13391337
right_len,
13401338
);
13411339

1342-
slice_remove(&mut parent_node.edge_area_mut_at(..old_parent_len + 1), parent_idx + 1);
1340+
slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
13431341
parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
13441342
*parent_node.len_mut() -= 1;
13451343

@@ -1349,8 +1347,8 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
13491347
let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
13501348
let mut right_node = right_node.cast_to_internal_unchecked();
13511349
ptr::copy_nonoverlapping(
1352-
right_node.edge_area_mut_at(..).as_ptr(),
1353-
left_node.edge_area_mut_at(old_left_len + 1..).as_mut_ptr(),
1350+
right_node.edge_area_mut(..).as_ptr(),
1351+
left_node.edge_area_mut(old_left_len + 1..).as_mut_ptr(),
13541352
right_len + 1,
13551353
);
13561354

@@ -1458,7 +1456,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
14581456
match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
14591457
(ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
14601458
// Make room for stolen edges.
1461-
let right_edges = right.edge_area_mut_at(..).as_mut_ptr();
1459+
let right_edges = right.edge_area_mut(..).as_mut_ptr();
14621460
ptr::copy(right_edges, right_edges.add(count), old_right_len + 1);
14631461
right.correct_childrens_parent_links(count..new_right_len + 1);
14641462

@@ -1518,7 +1516,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
15181516
move_edges(right.reborrow_mut(), 0, left, old_left_len + 1, count);
15191517

15201518
// Fill gap where stolen edges used to be.
1521-
let right_edges = right.edge_area_mut_at(..).as_mut_ptr();
1519+
let right_edges = right.edge_area_mut(..).as_mut_ptr();
15221520
ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
15231521
right.correct_childrens_parent_links(0..=new_right_len);
15241522
}
@@ -1551,8 +1549,8 @@ unsafe fn move_edges<'a, K: 'a, V: 'a>(
15511549
count: usize,
15521550
) {
15531551
unsafe {
1554-
let source_ptr = source.edge_area_mut_at(..).as_ptr();
1555-
let dest_ptr = dest.edge_area_mut_at(dest_offset..).as_mut_ptr();
1552+
let source_ptr = source.edge_area_mut(..).as_ptr();
1553+
let dest_ptr = dest.edge_area_mut(dest_offset..).as_mut_ptr();
15561554
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr, count);
15571555
dest.correct_childrens_parent_links(dest_offset..dest_offset + count);
15581556
}

library/alloc/src/collections/btree/set.rs

+1
Original file line numberDiff line numberDiff line change
@@ -975,6 +975,7 @@ impl<T> BTreeSet<T> {
975975
/// v.insert(1);
976976
/// assert_eq!(v.len(), 1);
977977
/// ```
978+
#[doc(alias = "length")]
978979
#[stable(feature = "rust1", since = "1.0.0")]
979980
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
980981
pub const fn len(&self) -> usize {

library/alloc/src/collections/linked_list.rs

+1
Original file line numberDiff line numberDiff line change
@@ -593,6 +593,7 @@ impl<T> LinkedList<T> {
593593
/// dl.push_back(3);
594594
/// assert_eq!(dl.len(), 3);
595595
/// ```
596+
#[doc(alias = "length")]
596597
#[inline]
597598
#[stable(feature = "rust1", since = "1.0.0")]
598599
pub fn len(&self) -> usize {

0 commit comments

Comments
 (0)