Skip to content

Commit 4e023d0

Browse files
committed
const-eval: fix and re-enable pointer fragment support
1 parent 907705a commit 4e023d0

File tree

15 files changed

+252
-256
lines changed

15 files changed

+252
-256
lines changed

compiler/rustc_const_eval/src/interpret/memory.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1502,10 +1502,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
15021502
// `get_bytes_mut` will clear the provenance, which is correct,
15031503
// since we don't want to keep any provenance at the target.
15041504
// This will also error if copying partial provenance is not supported.
1505-
let provenance = src_alloc
1506-
.provenance()
1507-
.prepare_copy(src_range, self)
1508-
.map_err(|e| e.to_interp_error(src_alloc_id))?;
1505+
let provenance = src_alloc.provenance_prepare_copy(src_range, self);
15091506
// Prepare a copy of the initialization mask.
15101507
let init = src_alloc.init_mask().prepare_copy(src_range);
15111508

compiler/rustc_middle/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
#![feature(negative_impls)]
5252
#![feature(never_type)]
5353
#![feature(ptr_alignment_type)]
54+
#![feature(range_bounds_is_empty)]
5455
#![feature(rustc_attrs)]
5556
#![feature(rustdoc_internals)]
5657
#![feature(sized_hierarchy)]

compiler/rustc_middle/src/mir/interpret/allocation.rs

Lines changed: 50 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -630,7 +630,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
630630
range: AllocRange,
631631
) -> &mut [u8] {
632632
self.mark_init(range, true);
633-
self.provenance.clear(range, cx);
633+
self.provenance.clear(range, &self.bytes, cx);
634634

635635
&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]
636636
}
@@ -643,7 +643,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
643643
range: AllocRange,
644644
) -> *mut [u8] {
645645
self.mark_init(range, true);
646-
self.provenance.clear(range, cx);
646+
self.provenance.clear(range, &self.bytes, cx);
647647

648648
assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
649649
// Crucially, we go via `AllocBytes::as_mut_ptr`, not `AllocBytes::deref_mut`.
@@ -722,37 +722,49 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
722722
if self.provenance.range_empty(range, cx) {
723723
return Ok(Scalar::from_uint(bits, range.size));
724724
}
725-
// If we get here, we have to check per-byte provenance, and join them together.
725+
// If we get here, we have to check whether we can merge per-byte provenance.
726726
let prov = 'prov: {
727-
if !Prov::OFFSET_IS_ADDR {
728-
// FIXME(#146291): We need to ensure that we don't mix different pointers with
729-
// the same provenance.
730-
return Err(AllocError::ReadPartialPointer(range.start));
731-
}
732-
// Initialize with first fragment. Must have index 0.
733-
let Some((mut joint_prov, 0)) = self.provenance.get_byte(range.start, cx) else {
727+
// If there is any ptr-sized provenance overlapping with this range,
728+
// this is definitely mixing multiple pointers and we can bail.
729+
if !self.provenance.range_ptrs_is_empty(range, cx) {
734730
break 'prov None;
735-
};
736-
// Update with the remaining fragments.
737-
for offset in Size::from_bytes(1)..range.size {
738-
// Ensure there is provenance here and it has the right index.
739-
let Some((frag_prov, frag_idx)) =
740-
self.provenance.get_byte(range.start + offset, cx)
741-
else {
731+
}
732+
// Scan all fragments, and ensure their indices, provenance, and bytes match.
733+
// However, we have to ignore wildcard fragments for this (this is needed for Miri's
734+
// native-lib mode). Therefore, we will only know the expected provenance and bytes
735+
// once we find the first non-wildcard fragment.
736+
let mut expected = None;
737+
for idx in Size::ZERO..range.size {
738+
// Ensure there is provenance here.
739+
let Some(frag) = self.provenance.get_byte(range.start + idx, cx) else {
742740
break 'prov None;
743741
};
744-
// Wildcard provenance is allowed to come with any index (this is needed
745-
// for Miri's native-lib mode to work).
746-
if u64::from(frag_idx) != offset.bytes() && Some(frag_prov) != Prov::WILDCARD {
742+
// If this is wildcard provenance, ignore this fragment.
743+
if Some(frag.prov) == Prov::WILDCARD {
744+
continue;
745+
}
746+
// For non-wildcard fragments, the index must match.
747+
if u64::from(frag.idx) != idx.bytes() {
747748
break 'prov None;
748749
}
749-
// Merge this byte's provenance with the previous ones.
750-
joint_prov = match Prov::join(joint_prov, frag_prov) {
751-
Some(prov) => prov,
752-
None => break 'prov None,
753-
};
750+
// If there are expectations registered, check them.
751+
// If not, record this fragment as setting the expectations.
752+
match expected {
753+
Some(expected) => {
754+
if (frag.prov, frag.bytes) != expected {
755+
break 'prov None;
756+
}
757+
}
758+
None => {
759+
expected = Some((frag.prov, frag.bytes));
760+
}
761+
}
754762
}
755-
break 'prov Some(joint_prov);
763+
// The final provenance is the expected one we found along the way, or wildcard if
764+
// we didn't find any.
765+
break 'prov Some(
766+
expected.map(|(prov, _addr)| prov).or_else(|| Prov::WILDCARD).unwrap(),
767+
);
756768
};
757769
if prov.is_none() && !Prov::OFFSET_IS_ADDR {
758770
// There are some bytes with provenance here but overall the provenance does not add up.
@@ -816,7 +828,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
816828
/// Write "uninit" to the given memory range.
817829
pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
818830
self.mark_init(range, false);
819-
self.provenance.clear(range, cx);
831+
self.provenance.clear(range, &self.bytes, cx);
820832
}
821833

822834
/// Mark all bytes in the given range as initialised and reset the provenance
@@ -831,21 +843,28 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
831843
size: Size::from_bytes(self.len()),
832844
});
833845
self.mark_init(range, true);
834-
self.provenance.write_wildcards(cx, range);
846+
self.provenance.write_wildcards(cx, &self.bytes, range);
835847
}
836848

837849
/// Remove all provenance in the given memory range.
838850
pub fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) {
839-
self.provenance.clear(range, cx);
851+
self.provenance.clear(range, &self.bytes, cx);
840852
}
841853

842854
pub fn provenance_merge_bytes(&mut self, cx: &impl HasDataLayout) -> bool {
843855
self.provenance.merge_bytes(cx)
844856
}
845857

858+
pub fn provenance_prepare_copy(
859+
&self,
860+
range: AllocRange,
861+
cx: &impl HasDataLayout,
862+
) -> ProvenanceCopy<Prov> {
863+
self.provenance.prepare_copy(range, &self.bytes, cx)
864+
}
865+
846866
/// Applies a previously prepared provenance copy.
847-
/// The affected range, as defined in the parameters to `provenance().prepare_copy` is expected
848-
/// to be clear of provenance.
867+
/// The affected range is expected to be clear of provenance.
849868
///
850869
/// This is dangerous to use as it can violate internal `Allocation` invariants!
851870
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.

0 commit comments

Comments
 (0)