@@ -630,7 +630,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
630630 range : AllocRange ,
631631 ) -> & mut [ u8 ] {
632632 self . mark_init ( range, true ) ;
633- self . provenance . clear ( range, cx) ;
633+ self . provenance . clear ( range, & self . bytes , cx) ;
634634
635635 & mut self . bytes [ range. start . bytes_usize ( ) ..range. end ( ) . bytes_usize ( ) ]
636636 }
@@ -643,7 +643,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
643643 range : AllocRange ,
644644 ) -> * mut [ u8 ] {
645645 self . mark_init ( range, true ) ;
646- self . provenance . clear ( range, cx) ;
646+ self . provenance . clear ( range, & self . bytes , cx) ;
647647
648648 assert ! ( range. end( ) . bytes_usize( ) <= self . bytes. len( ) ) ; // need to do our own bounds-check
649649 // Crucially, we go via `AllocBytes::as_mut_ptr`, not `AllocBytes::deref_mut`.
@@ -722,37 +722,44 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
722722 if self . provenance . range_empty ( range, cx) {
723723 return Ok ( Scalar :: from_uint ( bits, range. size ) ) ;
724724 }
725- // If we get here, we have to check per-byte provenance, and join them together .
725+ // If we get here, we have to check per-byte provenance.
726726 let prov = ' prov: {
727- if !Prov :: OFFSET_IS_ADDR {
728- // FIXME(#146291): We need to ensure that we don't mix different pointers with
729- // the same provenance.
730- return Err ( AllocError :: ReadPartialPointer ( range. start ) ) ;
731- }
732- // Initialize with first fragment. Must have index 0.
733- let Some ( ( mut joint_prov, 0 ) ) = self . provenance . get_byte ( range. start , cx) else {
734- break ' prov None ;
735- } ;
736- // Update with the remaining fragments.
737- for offset in Size :: from_bytes ( 1 ) ..range. size {
738- // Ensure there is provenance here and it has the right index.
739- let Some ( ( frag_prov, frag_idx) ) =
740- self . provenance . get_byte ( range. start + offset, cx)
741- else {
727+ // Scan all fragments, and ensure their indices, provenance, and bytes match.
728+ // However, we have to ignore wildcard fragments for this (this is needed for Miri's
729+ // native-lib mode). Therefore, we will only know the expected provenance and bytes
730+ // once we find the first non-wildcard fragment.
731+ let mut expected = None ;
732+ for idx in Size :: ZERO ..range. size {
733+ // Ensure there is provenance here.
734+ let Some ( frag) = self . provenance . get_byte ( range. start + idx, cx) else {
742735 break ' prov None ;
743736 } ;
744- // Wildcard provenance is allowed to come with any index (this is needed
745- // for Miri's native-lib mode to work).
746- if u64:: from ( frag_idx) != offset. bytes ( ) && Some ( frag_prov) != Prov :: WILDCARD {
737+ // If this is wildcard provenance, ignore this fragment.
738+ if Some ( frag. prov ) == Prov :: WILDCARD {
739+ continue ;
740+ }
741+ // For non-wildcard fragments, the index must match.
742+ if u64:: from ( frag. idx ) != idx. bytes ( ) {
747743 break ' prov None ;
748744 }
749- // Merge this byte's provenance with the previous ones.
750- joint_prov = match Prov :: join ( joint_prov, frag_prov) {
751- Some ( prov) => prov,
752- None => break ' prov None ,
753- } ;
745+ // If there are expectations registered, check them.
746+ // If not, record this fragment as setting the expectations.
747+ match expected {
748+ Some ( expected) => {
749+ if ( frag. prov , frag. bytes ) != expected {
750+ break ' prov None ;
751+ }
752+ }
753+ None => {
754+ expected = Some ( ( frag. prov , frag. bytes ) ) ;
755+ }
756+ }
754757 }
755- break ' prov Some ( joint_prov) ;
758+ // The final provenance is the expected one we found along the way, or wildcard if
759+ // we didn't find any.
760+ break ' prov Some (
761+ expected. map ( |( prov, _addr) | prov) . or_else ( || Prov :: WILDCARD ) . unwrap ( ) ,
762+ ) ;
756763 } ;
757764 if prov. is_none ( ) && !Prov :: OFFSET_IS_ADDR {
758765 // There are some bytes with provenance here but overall the provenance does not add up.
@@ -816,7 +823,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
816823 /// Write "uninit" to the given memory range.
817824 pub fn write_uninit ( & mut self , cx : & impl HasDataLayout , range : AllocRange ) {
818825 self . mark_init ( range, false ) ;
819- self . provenance . clear ( range, cx) ;
826+ self . provenance . clear ( range, & self . bytes , cx) ;
820827 }
821828
822829 /// Mark all bytes in the given range as initialised and reset the provenance
@@ -831,21 +838,28 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
831838 size : Size :: from_bytes ( self . len ( ) ) ,
832839 } ) ;
833840 self . mark_init ( range, true ) ;
834- self . provenance . write_wildcards ( cx, range) ;
841+ self . provenance . write_wildcards ( cx, & self . bytes , range) ;
835842 }
836843
837844 /// Remove all provenance in the given memory range.
838845 pub fn clear_provenance ( & mut self , cx : & impl HasDataLayout , range : AllocRange ) {
839- self . provenance . clear ( range, cx) ;
846+ self . provenance . clear ( range, & self . bytes , cx) ;
840847 }
841848
842849 pub fn provenance_merge_bytes ( & mut self , cx : & impl HasDataLayout ) -> bool {
843850 self . provenance . merge_bytes ( cx)
844851 }
845852
853+ pub fn provenance_prepare_copy (
854+ & self ,
855+ range : AllocRange ,
856+ cx : & impl HasDataLayout ,
857+ ) -> ProvenanceCopy < Prov > {
858+ self . provenance . prepare_copy ( range, & self . bytes , cx)
859+ }
860+
846861 /// Applies a previously prepared provenance copy.
847- /// The affected range, as defined in the parameters to `provenance().prepare_copy` is expected
848- /// to be clear of provenance.
862+ /// The affected range is expected to be clear of provenance.
849863 ///
850864 /// This is dangerous to use as it can violate internal `Allocation` invariants!
851865 /// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
0 commit comments