From d495b84a9a5a0bfcfe353c53b332664c7414e315 Mon Sep 17 00:00:00 2001 From: Xiangfei Ding Date: Mon, 13 May 2024 03:16:31 +0800 Subject: [PATCH 01/21] PinCoerceUnsized trait into core --- compiler/rustc_span/src/symbol.rs | 1 + library/alloc/src/boxed.rs | 5 +- library/alloc/src/lib.rs | 1 + library/alloc/src/rc.rs | 10 ++++ library/alloc/src/sync.rs | 8 ++- library/alloc/tests/arc.rs | 14 ++++++ library/alloc/tests/boxed.rs | 37 ++++++++++++++ library/alloc/tests/lib.rs | 1 + library/alloc/tests/rc.rs | 17 +++++++ library/core/src/cell.rs | 19 +++++++ library/core/src/pin.rs | 50 ++++++++++++++++++- library/core/src/ptr/non_null.rs | 4 ++ library/core/src/ptr/unique.rs | 4 ++ library/core/tests/pin.rs | 46 +++++++++++++++++ library/std/src/lib.rs | 1 + .../src/sys/pal/sgx/abi/usercalls/alloc.rs | 4 ++ 16 files changed, 218 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index e8563f50158a5..9bacb60eca98b 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -276,6 +276,7 @@ symbols! { Path, PathBuf, Pending, + PinCoerceUnsized, Pointer, Poll, ProcMacro, diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index ef40d3b203f48..4b8706a8426e6 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -200,7 +200,7 @@ use core::ops::{ AsyncFn, AsyncFnMut, AsyncFnOnce, CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut, DerefPure, DispatchFromDyn, Receiver, }; -use core::pin::Pin; +use core::pin::{Pin, PinCoerceUnsized}; use core::ptr::{self, addr_of_mut, NonNull, Unique}; use core::task::{Context, Poll}; use core::{borrow, fmt, slice}; @@ -2724,3 +2724,6 @@ impl core::error::Error for Box { core::error::Error::provide(&**self, request); } } + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Box {} diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 28b08ef561143..3e44adf73f045 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -138,6 +138,7 @@ #![feature(maybe_uninit_uninit_array_transpose)] #![feature(panic_internals)] #![feature(pattern)] +#![feature(pin_coerce_unsized_trait)] #![feature(ptr_internals)] #![feature(ptr_metadata)] #![feature(ptr_sub_ptr)] diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index bc0874fc13f46..9352af99cdd45 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -256,6 +256,7 @@ use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, Rece use core::panic::{RefUnwindSafe, UnwindSafe}; #[cfg(not(no_global_oom_handling))] use core::pin::Pin; +use core::pin::PinCoerceUnsized; use core::ptr::{self, drop_in_place, NonNull}; #[cfg(not(no_global_oom_handling))] use core::slice::from_raw_parts_mut; @@ -2176,6 +2177,12 @@ impl Deref for Rc { } } +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Rc {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Weak {} + #[unstable(feature = "deref_pure_trait", issue = "87121")] unsafe impl DerefPure for Rc {} @@ -3689,6 +3696,9 @@ impl Deref for UniqueRc { } } +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for UniqueRc {} + #[unstable(feature = "unique_rc_arc", issue = "112566")] impl DerefMut for UniqueRc { fn deref_mut(&mut self) -> &mut T { diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 3ad0dae77dbde..2c0d19b0ada09 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -20,7 +20,7 @@ use core::marker::{PhantomData, Unsize}; use core::mem::{self, align_of_val_raw, ManuallyDrop}; use core::ops::{CoerceUnsized, Deref, DerefPure, DispatchFromDyn, Receiver}; use core::panic::{RefUnwindSafe, UnwindSafe}; -use core::pin::Pin; +use core::pin::{Pin, PinCoerceUnsized}; use core::ptr::{self, NonNull}; #[cfg(not(no_global_oom_handling))] use core::slice::from_raw_parts_mut; @@ -2142,6 +2142,12 @@ impl Deref for Arc { } } +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Arc {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Weak {} + #[unstable(feature = "deref_pure_trait", issue = "87121")] unsafe impl DerefPure for Arc {} diff --git a/library/alloc/tests/arc.rs b/library/alloc/tests/arc.rs index c37a80dca95c8..dc27c578b57ef 100644 --- a/library/alloc/tests/arc.rs +++ b/library/alloc/tests/arc.rs @@ -227,3 +227,17 @@ fn make_mut_unsized() { assert_eq!(*data, [11, 21, 31]); assert_eq!(*other_data, [110, 20, 30]); } + +#[allow(unused)] +mod pin_coerce_unsized { + use alloc::sync::Arc; + use core::pin::Pin; + + pub trait MyTrait {} + impl MyTrait for String {} + + // Pin coercion should work for Arc + pub fn pin_arc(arg: Pin>) -> Pin> { + arg + } +} diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs index 4cacee0414d7d..faee64b2f6738 100644 --- a/library/alloc/tests/boxed.rs +++ b/library/alloc/tests/boxed.rs @@ -179,3 +179,40 @@ unsafe impl Allocator for ConstAllocator { self } } + +#[allow(unused)] +mod pin_coerce_unsized { + use alloc::boxed::Box; + use core::pin::Pin; + + trait MyTrait { + fn action(&self) -> &str; + } + impl MyTrait for String { + fn action(&self) -> &str { + &*self + } + } + struct MyStruct; + impl MyTrait for MyStruct { + fn action(&self) -> &str { + "MyStruct" + } + } + + // Pin coercion should work for Box + fn pin_box(arg: Pin>) -> Pin> { + arg + } + + #[test] + fn pin_coerce_unsized_box() { + let my_string = "my string"; + let a_string = Box::pin(String::from(my_string)); + let pin_box_str = pin_box(a_string); + assert_eq!(pin_box_str.as_ref().action(), my_string); + let a_struct = Box::pin(MyStruct); + let pin_box_struct = pin_box(a_struct); + assert_eq!(pin_box_struct.as_ref().action(), "MyStruct"); + } +} diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs index 89538f272f069..3d4add6fae452 100644 --- a/library/alloc/tests/lib.rs +++ b/library/alloc/tests/lib.rs @@ -40,6 +40,7 @@ #![feature(drain_keep_rest)] #![feature(local_waker)] #![feature(vec_pop_if)] +#![feature(unique_rc_arc)] #![allow(internal_features)] #![deny(fuzzy_provenance_casts)] #![deny(unsafe_op_in_unsafe_fn)] diff --git a/library/alloc/tests/rc.rs b/library/alloc/tests/rc.rs index 499740e738ab0..29dbdcf225eb5 100644 --- a/library/alloc/tests/rc.rs +++ b/library/alloc/tests/rc.rs @@ -205,3 +205,20 @@ fn weak_may_dangle() { // `val` dropped here while still borrowed // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::rc::Weak` } + +#[allow(unused)] +mod pin_coerce_unsized { + use alloc::rc::{Rc, UniqueRc}; + use core::pin::Pin; + + pub trait MyTrait {} + impl MyTrait for String {} + + // Pin coercion should work for Rc + pub fn pin_rc(arg: Pin>) -> Pin> { + arg + } + pub fn pin_unique_rc(arg: Pin>) -> Pin> { + arg + } +} diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index 0d66c2b52c84e..d860f3415d982 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -255,6 +255,7 @@ use crate::fmt::{self, Debug, Display}; use crate::marker::{PhantomData, Unsize}; use crate::mem; use crate::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn}; +use crate::pin::PinCoerceUnsized; use crate::ptr::{self, NonNull}; mod lazy; @@ -2396,3 +2397,21 @@ fn assert_coerce_unsized( let _: Cell<&dyn Send> = c; let _: RefCell<&dyn Send> = d; } + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for UnsafeCell {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for SyncUnsafeCell {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Cell {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for RefCell {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl<'b, T: ?Sized> PinCoerceUnsized for Ref<'b, T> {} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl<'b, T: ?Sized> PinCoerceUnsized for RefMut<'b, T> {} diff --git a/library/core/src/pin.rs b/library/core/src/pin.rs index d752151d10cc8..0569b8b762433 100644 --- a/library/core/src/pin.rs +++ b/library/core/src/pin.rs @@ -1715,10 +1715,56 @@ impl fmt::Pointer for Pin { // for other reasons, though, so we just need to take care not to allow such // impls to land in std. #[stable(feature = "pin", since = "1.33.0")] -impl CoerceUnsized> for Pin where Ptr: CoerceUnsized {} +impl CoerceUnsized> for Pin +where + Ptr: CoerceUnsized + PinCoerceUnsized, + U: PinCoerceUnsized, +{ +} + +#[stable(feature = "pin", since = "1.33.0")] +impl DispatchFromDyn> for Pin +where + Ptr: DispatchFromDyn + PinCoerceUnsized, + U: PinCoerceUnsized, +{ +} + +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +/// Trait that indicates that this is a pointer or a wrapper for one, where +/// unsizing can be performed on the pointee when it is pinned. +/// +/// # Safety +/// +/// If this type implements `Deref`, then the concrete type returned by `deref` +/// and `deref_mut` must not change without a modification. The following +/// operations are not considered modifications: +/// +/// * Moving the pointer. +/// * Performing unsizing coercions on the pointer. +/// * Performing dynamic dispatch with the pointer. +/// * Calling `deref` or `deref_mut` on the pointer. +/// +/// The concrete type of a trait object is the type that the vtable corresponds +/// to. The concrete type of a slice is an array of the same element type and +/// the length specified in the metadata. The concrete type of a sized type +/// is the type itself. +pub unsafe trait PinCoerceUnsized {} + +#[stable(feature = "pin", since = "1.33.0")] +unsafe impl<'a, T: ?Sized> PinCoerceUnsized for &'a T {} + +#[stable(feature = "pin", since = "1.33.0")] +unsafe impl<'a, T: ?Sized> PinCoerceUnsized for &'a mut T {} + +#[stable(feature = "pin", since = "1.33.0")] +unsafe impl PinCoerceUnsized for Pin {} + +#[stable(feature = "pin", since = "1.33.0")] +unsafe impl PinCoerceUnsized for *const T {} #[stable(feature = "pin", since = "1.33.0")] -impl DispatchFromDyn> for Pin where Ptr: DispatchFromDyn {} +unsafe impl PinCoerceUnsized for *mut T {} /// Constructs a [Pin]<[&mut] T>, by pinning a `value: T` locally. /// diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 4a716a7503964..44db227b79efd 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -3,6 +3,7 @@ use crate::marker::Unsize; use crate::mem::{MaybeUninit, SizedTypeProperties}; use crate::num::NonZero; use crate::ops::{CoerceUnsized, DispatchFromDyn}; +use crate::pin::PinCoerceUnsized; use crate::ptr::Unique; use crate::slice::{self, SliceIndex}; use crate::ub_checks::assert_unsafe_precondition; @@ -1724,6 +1725,9 @@ impl CoerceUnsized> for NonNull where T: Uns #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl DispatchFromDyn> for NonNull where T: Unsize {} +#[stable(feature = "pin", since = "1.33.0")] +unsafe impl PinCoerceUnsized for NonNull {} + #[stable(feature = "nonnull", since = "1.25.0")] impl fmt::Debug for NonNull { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/library/core/src/ptr/unique.rs b/library/core/src/ptr/unique.rs index b74d691e45427..4810ebe01f9bb 100644 --- a/library/core/src/ptr/unique.rs +++ b/library/core/src/ptr/unique.rs @@ -1,6 +1,7 @@ use crate::fmt; use crate::marker::{PhantomData, Unsize}; use crate::ops::{CoerceUnsized, DispatchFromDyn}; +use crate::pin::PinCoerceUnsized; use crate::ptr::NonNull; /// A wrapper around a raw non-null `*mut T` that indicates that the possessor @@ -166,6 +167,9 @@ impl CoerceUnsized> for Unique where T: Unsiz #[unstable(feature = "ptr_internals", issue = "none")] impl DispatchFromDyn> for Unique where T: Unsize {} +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for Unique {} + #[unstable(feature = "ptr_internals", issue = "none")] impl fmt::Debug for Unique { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { diff --git a/library/core/tests/pin.rs b/library/core/tests/pin.rs index 6f617c8d0c297..7a6af46a74323 100644 --- a/library/core/tests/pin.rs +++ b/library/core/tests/pin.rs @@ -29,3 +29,49 @@ fn pin_const() { pin_mut_const(); } + +#[allow(unused)] +mod pin_coerce_unsized { + use core::cell::{Cell, RefCell, UnsafeCell}; + use core::pin::Pin; + use core::ptr::NonNull; + + pub trait MyTrait {} + impl MyTrait for String {} + + // These Pins should continue to compile. + // Do note that these instances of Pin types cannot be used + // meaningfully because all methods require a Deref/DerefMut + // bounds on the pointer type and Cell, RefCell and UnsafeCell + // do not implement Deref/DerefMut. + + pub fn cell(arg: Pin>>) -> Pin>> { + arg + } + pub fn ref_cell(arg: Pin>>) -> Pin>> { + arg + } + pub fn unsafe_cell(arg: Pin>>) -> Pin>> { + arg + } + + // These sensible Pin coercions are possible. + pub fn pin_mut_ref(arg: Pin<&mut String>) -> Pin<&mut dyn MyTrait> { + arg + } + pub fn pin_ref(arg: Pin<&String>) -> Pin<&dyn MyTrait> { + arg + } + pub fn pin_ptr(arg: Pin<*const String>) -> Pin<*const dyn MyTrait> { + arg + } + pub fn pin_ptr_mut(arg: Pin<*mut String>) -> Pin<*mut dyn MyTrait> { + arg + } + pub fn pin_non_null(arg: Pin>) -> Pin> { + arg + } + pub fn nesting_pins(arg: Pin>) -> Pin> { + arg + } +} diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 6bd9c59a949c2..58d3c4a994b35 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -342,6 +342,7 @@ #![feature(maybe_uninit_write_slice)] #![feature(panic_can_unwind)] #![feature(panic_internals)] +#![feature(pin_coerce_unsized_trait)] #![feature(pointer_is_aligned_to)] #![feature(portable_simd)] #![feature(prelude_2024)] diff --git a/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs b/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs index 298095257396a..5069ab82ccc90 100644 --- a/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs +++ b/library/std/src/sys/pal/sgx/abi/usercalls/alloc.rs @@ -8,6 +8,7 @@ use crate::cell::UnsafeCell; use crate::convert::TryInto; use crate::mem::{self, ManuallyDrop}; use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut}; +use crate::pin::PinCoerceUnsized; use crate::ptr::{self, NonNull}; use crate::slice::SliceIndex; use crate::{cmp, intrinsics, slice}; @@ -751,6 +752,9 @@ where #[unstable(feature = "sgx_platform", issue = "56975")] impl, U> CoerceUnsized> for UserRef {} +#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] +unsafe impl PinCoerceUnsized for UserRef {} + #[unstable(feature = "sgx_platform", issue = "56975")] impl Index for UserRef<[T]> where From c2c46e3e305f82b7f4ce0afa4ff7977075421318 Mon Sep 17 00:00:00 2001 From: B I Mohammed Abbas Date: Sat, 3 Aug 2024 08:00:39 +0530 Subject: [PATCH 02/21] Forbid unsafe_op_in_unsafe_fn in vxworks specific os and sys files --- library/std/src/os/vxworks/mod.rs | 1 + library/std/src/sys/pal/unix/process/process_vxworks.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/library/std/src/os/vxworks/mod.rs b/library/std/src/os/vxworks/mod.rs index 0a7ac641dd3e1..b09aa72f72693 100644 --- a/library/std/src/os/vxworks/mod.rs +++ b/library/std/src/os/vxworks/mod.rs @@ -1,6 +1,7 @@ //! VxWorks-specific definitions #![stable(feature = "raw_ext", since = "1.1.0")] +#![forbid(unsafe_op_in_unsafe_fn)] pub mod fs; pub mod raw; diff --git a/library/std/src/sys/pal/unix/process/process_vxworks.rs b/library/std/src/sys/pal/unix/process/process_vxworks.rs index 6a9d8fab1d412..0477b3d9a70da 100644 --- a/library/std/src/sys/pal/unix/process/process_vxworks.rs +++ b/library/std/src/sys/pal/unix/process/process_vxworks.rs @@ -1,3 +1,4 @@ +#![forbid(unsafe_op_in_unsafe_fn)] use libc::{self, c_char, c_int, RTP_ID}; use crate::io::{self, ErrorKind}; From c369c2034f7b4fb4af2c5e28806aaec65f38adc9 Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Mon, 5 Aug 2024 12:39:35 -0500 Subject: [PATCH 03/21] Add a triagebot mention for `library/Cargo.lock` Since [1], `Cargo.lock` was split into `Cargo.lock` and `library/Cargo.lock`. Update Triagebot to give the same warning for both. [1]: https://github.com/rust-lang/rust/pull/128534 --- triagebot.toml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/triagebot.toml b/triagebot.toml index 2795f93728470..33108f743cb94 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -778,6 +778,14 @@ If this was unintentional then you should revert the changes before this PR is m Otherwise, you can ignore this comment. """ +[mentions."library/Cargo.lock"] +message = """ +These commits modify the `library/Cargo.lock` file. Unintentional changes to `library/Cargo.lock` can be introduced when switching branches and rebasing PRs. + +If this was unintentional then you should revert the changes before this PR is merged. +Otherwise, you can ignore this comment. +""" + [mentions."src/tools/x"] message = "`src/tools/x` was changed. Bump version of Cargo.toml in `src/tools/x` so tidy will suggest installing the new version." From 4af77dfea5290b0053efcb31cad774cabf71af59 Mon Sep 17 00:00:00 2001 From: binarycat Date: Tue, 30 Jul 2024 14:27:20 -0400 Subject: [PATCH 04/21] implement BufReader::peek --- library/std/src/io/buffered/bufreader.rs | 34 +++++++++++++++++++ .../std/src/io/buffered/bufreader/buffer.rs | 21 ++++++++++++ 2 files changed, 55 insertions(+) diff --git a/library/std/src/io/buffered/bufreader.rs b/library/std/src/io/buffered/bufreader.rs index f11dd50c5e2b7..0b12e5777c840 100644 --- a/library/std/src/io/buffered/bufreader.rs +++ b/library/std/src/io/buffered/bufreader.rs @@ -94,6 +94,40 @@ impl BufReader { pub fn with_capacity(capacity: usize, inner: R) -> BufReader { BufReader { inner, buf: Buffer::with_capacity(capacity) } } + + /// Attempt to look ahead `n` bytes. + /// + /// `n` must be less than `capacity`. + /// + /// ## Examples + /// + /// ```rust + /// #![feature(bufreader_peek)] + /// use std::io::{Read, BufReader}; + /// + /// let mut bytes = &b"oh, hello"[..]; + /// let mut rdr = BufReader::with_capacity(6, &mut bytes); + /// assert_eq!(rdr.peek(2).unwrap(), b"oh"); + /// let mut buf = [0; 4]; + /// rdr.read(&mut buf[..]).unwrap(); + /// assert_eq!(&buf, b"oh, "); + /// assert_eq!(rdr.peek(2).unwrap(), b"he"); + /// let mut s = String::new(); + /// rdr.read_to_string(&mut s).unwrap(); + /// assert_eq!(&s, "hello"); + /// ``` + #[unstable(feature = "bufreader_peek", issue = "128405")] + pub fn peek(&mut self, n: usize) -> io::Result<&[u8]> { + assert!(n <= self.capacity()); + while n > self.buf.buffer().len() { + if self.buf.pos() > 0 { + self.buf.backshift(); + } + self.buf.read_more(&mut self.inner)?; + debug_assert_eq!(self.buf.pos(), 0); + } + Ok(&self.buf.buffer()[..n]) + } } impl BufReader { diff --git a/library/std/src/io/buffered/bufreader/buffer.rs b/library/std/src/io/buffered/bufreader/buffer.rs index 796137c0123e7..ccd67fafb45b4 100644 --- a/library/std/src/io/buffered/bufreader/buffer.rs +++ b/library/std/src/io/buffered/bufreader/buffer.rs @@ -97,6 +97,27 @@ impl Buffer { self.pos = self.pos.saturating_sub(amt); } + /// Read more bytes into the buffer without discarding any of its contents + pub fn read_more(&mut self, mut reader: impl Read) -> io::Result<()> { + let mut buf = BorrowedBuf::from(&mut self.buf[self.pos..]); + let old_init = self.initialized - self.pos; + unsafe { + buf.set_init(old_init); + } + reader.read_buf(buf.unfilled())?; + self.filled += buf.len(); + self.initialized += buf.init_len() - old_init; + Ok(()) + } + + /// Remove bytes that have already been read from the buffer. + pub fn backshift(&mut self) { + self.buf.copy_within(self.pos.., 0); + self.initialized -= self.pos; + self.filled -= self.pos; + self.pos = 0; + } + #[inline] pub fn fill_buf(&mut self, mut reader: impl Read) -> io::Result<&[u8]> { // If we've reached the end of our internal buffer then we need to fetch From c8d50ef2ee71b6b586e52f0834657a7fd4b42800 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Mon, 5 Aug 2024 22:13:12 +0000 Subject: [PATCH 05/21] Windows: Test if `\\.\NUL` works as an input file --- tests/run-make/dos-device-input/rmake.rs | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 tests/run-make/dos-device-input/rmake.rs diff --git a/tests/run-make/dos-device-input/rmake.rs b/tests/run-make/dos-device-input/rmake.rs new file mode 100644 index 0000000000000..cc34dd551071d --- /dev/null +++ b/tests/run-make/dos-device-input/rmake.rs @@ -0,0 +1,9 @@ +//@ only-windows +// Reason: dos devices are a Windows thing + +use run_make_support::rustc; + +fn main() { + rustc().input(r"\\.\NUL").crate_type("staticlib").run(); + rustc().input(r"\\?\NUL").crate_type("staticlib").run(); +} From c6d94821f06320108b9fd35219a13c5ec4094ed4 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Mon, 5 Aug 2024 22:30:13 +0000 Subject: [PATCH 06/21] Don't ICE if getting the input's file_stem fails --- compiler/rustc_session/src/config.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index f58a991a616da..a60af3f2d7137 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -838,10 +838,14 @@ pub enum Input { impl Input { pub fn filestem(&self) -> &str { - match *self { - Input::File(ref ifile) => ifile.file_stem().unwrap().to_str().unwrap(), - Input::Str { .. } => "rust_out", + if let Input::File(ifile) = self { + // If for some reason getting the file stem as a UTF-8 string fails, + // then fallback to a fixed name. + if let Some(name) = ifile.file_stem().and_then(OsStr::to_str) { + return name; + } } + "rust_out" } pub fn source_name(&self) -> FileName { From 3fd645e254564b0f71027f42b940625670bc35ff Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Mon, 5 Aug 2024 23:50:15 +0000 Subject: [PATCH 07/21] Check staticlib name falls back to `rust_out` --- tests/run-make/dos-device-input/rmake.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/run-make/dos-device-input/rmake.rs b/tests/run-make/dos-device-input/rmake.rs index cc34dd551071d..dee3b86f09591 100644 --- a/tests/run-make/dos-device-input/rmake.rs +++ b/tests/run-make/dos-device-input/rmake.rs @@ -1,9 +1,13 @@ //@ only-windows // Reason: dos devices are a Windows thing -use run_make_support::rustc; +use std::path::Path; + +use run_make_support::{rustc, static_lib_name}; fn main() { rustc().input(r"\\.\NUL").crate_type("staticlib").run(); rustc().input(r"\\?\NUL").crate_type("staticlib").run(); + + assert!(Path::new(&static_lib_name("rust_out")).exists()); } From fdb64b94786a7e090100c6dbc411a00678a3e971 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AE=B8=E6=9D=B0=E5=8F=8B=20Jieyou=20Xu=20=28Joe=29?= Date: Tue, 6 Aug 2024 04:27:30 +0000 Subject: [PATCH 08/21] tests: add regression test to make sure `cfg_attr` isn't considered unhandled --- tests/ui/attributes/check-cfg_attr-ice.rs | 68 ++++++++++++ tests/ui/attributes/check-cfg_attr-ice.stderr | 101 ++++++++++++++++++ 2 files changed, 169 insertions(+) create mode 100644 tests/ui/attributes/check-cfg_attr-ice.rs create mode 100644 tests/ui/attributes/check-cfg_attr-ice.stderr diff --git a/tests/ui/attributes/check-cfg_attr-ice.rs b/tests/ui/attributes/check-cfg_attr-ice.rs new file mode 100644 index 0000000000000..5bf8baab14140 --- /dev/null +++ b/tests/ui/attributes/check-cfg_attr-ice.rs @@ -0,0 +1,68 @@ +//! I missed a `cfg_attr` match in #128581, it should have had the same treatment as `cfg`. If +//! an invalid attribute starting with `cfg_attr` is passed, then it would trigger an ICE because +//! it was not considered "checked" (e.g. `#[cfg_attr::skip]` or `#[cfg_attr::no_such_thing]`). +//! +//! This test is not exhaustive, there's too many possible positions to check, instead it just does +//! a basic smoke test in a few select positions to make sure we don't ICE for e.g. +//! `#[cfg_attr::no_such_thing]`. +//! +//! issue: rust-lang/rust#128716 +#![crate_type = "lib"] + +#[cfg_attr::no_such_thing] +//~^ ERROR failed to resolve +mod we_are_no_strangers_to_love {} + +#[cfg_attr::no_such_thing] +//~^ ERROR failed to resolve +struct YouKnowTheRules { + #[cfg_attr::no_such_thing] + //~^ ERROR failed to resolve + and_so_do_i: u8, +} + +#[cfg_attr::no_such_thing] +//~^ ERROR failed to resolve +fn a_full_commitment() { + #[cfg_attr::no_such_thing] + //~^ ERROR failed to resolve + let is_what_i_am_thinking_of = (); +} + +#[cfg_attr::no_such_thing] +//~^ ERROR failed to resolve +union AnyOtherGuy { + owo: () +} +struct This; + +#[cfg_attr(FALSE, doc = "you wouldn't get this")] +impl From for This { + #[cfg_attr::no_such_thing] + //~^ ERROR failed to resolve + fn from(#[cfg_attr::no_such_thing] any_other_guy: AnyOtherGuy) -> This { + //~^ ERROR failed to resolve + #[cfg_attr::no_such_thing] + //~^ ERROR attributes on expressions are experimental + //~| ERROR failed to resolve + unreachable!() + } +} + +#[cfg_attr::no_such_thing] +//~^ ERROR failed to resolve +enum NeverGonna { + #[cfg_attr::no_such_thing] + //~^ ERROR failed to resolve + GiveYouUp(#[cfg_attr::no_such_thing] u8), + //~^ ERROR failed to resolve + LetYouDown { + #![cfg_attr::no_such_thing] + //~^ ERROR an inner attribute is not permitted in this context + never_gonna: (), + round_around: (), + #[cfg_attr::no_such_thing] + //~^ ERROR failed to resolve + and_desert_you: (), + }, +} diff --git a/tests/ui/attributes/check-cfg_attr-ice.stderr b/tests/ui/attributes/check-cfg_attr-ice.stderr new file mode 100644 index 0000000000000..dbdf32597f7b2 --- /dev/null +++ b/tests/ui/attributes/check-cfg_attr-ice.stderr @@ -0,0 +1,101 @@ +error: an inner attribute is not permitted in this context + --> $DIR/check-cfg_attr-ice.rs:60:9 + | +LL | #![cfg_attr::no_such_thing] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: inner attributes, like `#![no_std]`, annotate the item enclosing them, and are usually found at the beginning of source files + = note: outer attributes, like `#[test]`, annotate the item following them + +error[E0658]: attributes on expressions are experimental + --> $DIR/check-cfg_attr-ice.rs:45:9 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #15701 for more information + = help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:52:3 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:55:7 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:57:17 + | +LL | GiveYouUp(#[cfg_attr::no_such_thing] u8), + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:64:11 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:41:7 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:43:15 + | +LL | fn from(#[cfg_attr::no_such_thing] any_other_guy: AnyOtherGuy) -> This { + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:45:11 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:32:3 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:24:3 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:27:7 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:16:3 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:19:7 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error[E0433]: failed to resolve: use of undeclared crate or module `cfg_attr` + --> $DIR/check-cfg_attr-ice.rs:12:3 + | +LL | #[cfg_attr::no_such_thing] + | ^^^^^^^^ use of undeclared crate or module `cfg_attr` + +error: aborting due to 15 previous errors + +Some errors have detailed explanations: E0433, E0658. +For more information about an error, try `rustc --explain E0433`. From 97cbc2083fd2a961f49016cd5ce9c9a757b51f9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AE=B8=E6=9D=B0=E5=8F=8B=20Jieyou=20Xu=20=28Joe=29?= Date: Tue, 6 Aug 2024 04:26:37 +0000 Subject: [PATCH 09/21] check_attr: treat cfg_attr like cfg --- compiler/rustc_passes/src/check_attr.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index a0d0d80b9572e..cf7d724ab63ae 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -250,6 +250,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | sym::deny | sym::forbid | sym::cfg + | sym::cfg_attr // need to be fixed | sym::cfi_encoding // FIXME(cfi_encoding) | sym::may_dangle // FIXME(dropck_eyepatch) From 522af10cccd9c23f5147d123cd6373ae0aa0795d Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 5 Aug 2024 17:34:44 +0200 Subject: [PATCH 10/21] interpret: refactor function call handling to be better-abstracted --- .../src/const_eval/eval_queries.rs | 4 +- .../src/const_eval/machine.rs | 17 +- .../src/interpret/{terminator.rs => call.rs} | 809 ++++++++---------- .../src/interpret/eval_context.rs | 763 +---------------- .../rustc_const_eval/src/interpret/machine.rs | 2 +- .../rustc_const_eval/src/interpret/mod.rs | 8 +- .../rustc_const_eval/src/interpret/operand.rs | 1 + .../rustc_const_eval/src/interpret/stack.rs | 638 ++++++++++++++ .../rustc_const_eval/src/interpret/step.rs | 250 +++++- src/tools/miri/src/concurrency/thread.rs | 4 +- src/tools/miri/src/eval.rs | 23 +- src/tools/miri/src/helpers.rs | 55 +- src/tools/miri/src/shims/panic.rs | 26 +- src/tools/miri/src/shims/tls.rs | 8 +- src/tools/miri/src/shims/unix/thread.rs | 3 +- .../libc_pthread_create_too_few_args.rs | 3 +- .../libc_pthread_create_too_few_args.stderr | 10 +- .../libc_pthread_create_too_many_args.rs | 3 +- .../libc_pthread_create_too_many_args.stderr | 10 +- .../fail/function_calls/check_callback_abi.rs | 2 +- .../function_calls/check_callback_abi.stderr | 4 +- .../miri/tests/pass/alloc-access-tracking.rs | 4 +- 22 files changed, 1334 insertions(+), 1313 deletions(-) rename compiler/rustc_const_eval/src/interpret/{terminator.rs => call.rs} (59%) create mode 100644 compiler/rustc_const_eval/src/interpret/stack.rs diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index 6d5bca5731331..ff27e4000163a 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -73,7 +73,9 @@ fn eval_body_using_ecx<'tcx, R: InterpretationResult<'tcx>>( cid.promoted.map_or_else(String::new, |p| format!("::{p:?}")) ); - ecx.push_stack_frame( + // This can't use `init_stack_frame` since `body` is not a function, + // so computing its ABI would fail. It's also not worth it since there are no arguments to pass. + ecx.push_stack_frame_raw( cid.instance, body, &ret.clone().into(), diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index 79e8e2127765a..a075bdc191181 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -24,8 +24,9 @@ use crate::errors::{LongRunning, LongRunningWarn}; use crate::fluent_generated as fluent; use crate::interpret::{ self, compile_time_machine, err_ub, throw_exhaust, throw_inval, throw_ub_custom, throw_unsup, - throw_unsup_format, AllocId, AllocRange, ConstAllocation, CtfeProvenance, FnArg, FnVal, Frame, + throw_unsup_format, AllocId, AllocRange, ConstAllocation, CtfeProvenance, FnArg, Frame, GlobalAlloc, ImmTy, InterpCx, InterpResult, MPlaceTy, OpTy, Pointer, PointerArithmetic, Scalar, + StackPopCleanup, }; /// When hitting this many interpreted terminators we emit a deny by default lint @@ -306,17 +307,15 @@ impl<'tcx> CompileTimeInterpCx<'tcx> { let align = ImmTy::from_uint(target_align, args[1].layout).into(); let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?; - // We replace the entire function call with a "tail call". - // Note that this happens before the frame of the original function - // is pushed on the stack. - self.eval_fn_call( - FnVal::Instance(instance), - (CallAbi::Rust, fn_abi), + // Push the stack frame with our own adjusted arguments. + self.init_stack_frame( + instance, + self.load_mir(instance.def, None)?, + fn_abi, &[FnArg::Copy(addr), FnArg::Copy(align)], /* with_caller_location = */ false, dest, - ret, - mir::UnwindAction::Unreachable, + StackPopCleanup::Goto { ret, unwind: mir::UnwindAction::Unreachable }, )?; Ok(ControlFlow::Break(())) } else { diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/call.rs similarity index 59% rename from compiler/rustc_const_eval/src/interpret/terminator.rs rename to compiler/rustc_const_eval/src/interpret/call.rs index 47d0e22b527d3..9d2870a4a3130 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -1,24 +1,23 @@ +//! Manages calling a concrete function (with known MIR body) with argument passing, +//! and returning the return value to the caller. use std::borrow::Cow; -use either::Either; +use either::{Left, Right}; use rustc_middle::ty::layout::{FnAbiOf, IntegerExt, LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, AdtDef, Instance, Ty}; use rustc_middle::{bug, mir, span_bug}; -use rustc_span::source_map::Spanned; use rustc_span::sym; use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode}; use rustc_target::abi::{self, FieldIdx, Integer}; use rustc_target::spec::abi::Abi; -use tracing::trace; +use tracing::{info, instrument, trace}; use super::{ throw_ub, throw_ub_custom, throw_unsup_format, CtfeProvenance, FnVal, ImmTy, InterpCx, - InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Projectable, Provenance, Scalar, - StackPopCleanup, + InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Projectable, Provenance, ReturnAction, Scalar, + StackPopCleanup, StackPopInfo, }; use crate::fluent_generated as fluent; -use crate::interpret::eval_context::StackPopInfo; -use crate::interpret::ReturnAction; /// An argment passed to a function. #[derive(Clone, Debug)] @@ -39,15 +38,6 @@ impl<'tcx, Prov: Provenance> FnArg<'tcx, Prov> { } } -struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> { - callee: FnVal<'tcx, M::ExtraFnVal>, - args: Vec>, - fn_sig: ty::FnSig<'tcx>, - fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>, - /// True if the function is marked as `#[track_caller]` ([`ty::InstanceKind::requires_caller_location`]) - with_caller_location: bool, -} - impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Make a copy of the given fn_arg. Any `InPlace` are degenerated to copies, no protection of the /// original memory occurs. @@ -67,7 +57,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { args.iter().map(|fn_arg| self.copy_fn_arg(fn_arg)).collect() } - pub fn fn_arg_field( + /// Helper function for argument untupling. + pub(super) fn fn_arg_field( &self, arg: &FnArg<'tcx, M::Provenance>, field: usize, @@ -78,190 +69,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { }) } - pub(super) fn eval_terminator( - &mut self, - terminator: &mir::Terminator<'tcx>, - ) -> InterpResult<'tcx> { - use rustc_middle::mir::TerminatorKind::*; - match terminator.kind { - Return => { - self.return_from_current_stack_frame(/* unwinding */ false)? - } - - Goto { target } => self.go_to_block(target), - - SwitchInt { ref discr, ref targets } => { - let discr = self.read_immediate(&self.eval_operand(discr, None)?)?; - trace!("SwitchInt({:?})", *discr); - - // Branch to the `otherwise` case by default, if no match is found. - let mut target_block = targets.otherwise(); - - for (const_int, target) in targets.iter() { - // Compare using MIR BinOp::Eq, to also support pointer values. - // (Avoiding `self.binary_op` as that does some redundant layout computation.) - let res = self.binary_op( - mir::BinOp::Eq, - &discr, - &ImmTy::from_uint(const_int, discr.layout), - )?; - if res.to_scalar().to_bool()? { - target_block = target; - break; - } - } - - self.go_to_block(target_block); - } - - Call { - ref func, - ref args, - destination, - target, - unwind, - call_source: _, - fn_span: _, - } => { - let old_stack = self.frame_idx(); - let old_loc = self.frame().loc; - - let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = - self.eval_callee_and_args(terminator, func, args)?; - - let destination = self.force_allocation(&self.eval_place(destination)?)?; - self.eval_fn_call( - callee, - (fn_sig.abi, fn_abi), - &args, - with_caller_location, - &destination, - target, - if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable }, - )?; - // Sanity-check that `eval_fn_call` either pushed a new frame or - // did a jump to another block. - if self.frame_idx() == old_stack && self.frame().loc == old_loc { - span_bug!(terminator.source_info.span, "evaluating this call made no progress"); - } - } - - TailCall { ref func, ref args, fn_span: _ } => { - let old_frame_idx = self.frame_idx(); - - let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = - self.eval_callee_and_args(terminator, func, args)?; - - self.eval_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?; - - if self.frame_idx() != old_frame_idx { - span_bug!( - terminator.source_info.span, - "evaluating this tail call pushed a new stack frame" - ); - } - } - - Drop { place, target, unwind, replace: _ } => { - let place = self.eval_place(place)?; - let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty); - if let ty::InstanceKind::DropGlue(_, None) = instance.def { - // This is the branch we enter if and only if the dropped type has no drop glue - // whatsoever. This can happen as a result of monomorphizing a drop of a - // generic. In order to make sure that generic and non-generic code behaves - // roughly the same (and in keeping with Mir semantics) we do nothing here. - self.go_to_block(target); - return Ok(()); - } - trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty); - self.drop_in_place(&place, instance, target, unwind)?; - } - - Assert { ref cond, expected, ref msg, target, unwind } => { - let ignored = - M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check(); - let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?; - if ignored || expected == cond_val { - self.go_to_block(target); - } else { - M::assert_panic(self, msg, unwind)?; - } - } - - UnwindTerminate(reason) => { - M::unwind_terminate(self, reason)?; - } - - // When we encounter Resume, we've finished unwinding - // cleanup for the current stack frame. We pop it in order - // to continue unwinding the next frame - UnwindResume => { - trace!("unwinding: resuming from cleanup"); - // By definition, a Resume terminator means - // that we're unwinding - self.return_from_current_stack_frame(/* unwinding */ true)?; - return Ok(()); - } - - // It is UB to ever encounter this. - Unreachable => throw_ub!(Unreachable), - - // These should never occur for MIR we actually run. - FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!( - terminator.source_info.span, - "{:#?} should have been eliminated by MIR pass", - terminator.kind - ), - - InlineAsm { template, ref operands, options, ref targets, .. } => { - M::eval_inline_asm(self, template, operands, options, targets)?; - } - } - - Ok(()) - } - - /// Evaluate the arguments of a function call - pub(super) fn eval_fn_call_arguments( - &self, - ops: &[Spanned>], - ) -> InterpResult<'tcx, Vec>> { - ops.iter() - .map(|op| { - let arg = match &op.node { - mir::Operand::Copy(_) | mir::Operand::Constant(_) => { - // Make a regular copy. - let op = self.eval_operand(&op.node, None)?; - FnArg::Copy(op) - } - mir::Operand::Move(place) => { - // If this place lives in memory, preserve its location. - // We call `place_to_op` which will be an `MPlaceTy` whenever there exists - // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local` - // which can return a local even if that has an mplace.) - let place = self.eval_place(*place)?; - let op = self.place_to_op(&place)?; - - match op.as_mplace_or_imm() { - Either::Left(mplace) => FnArg::InPlace(mplace), - Either::Right(_imm) => { - // This argument doesn't live in memory, so there's no place - // to make inaccessible during the call. - // We rely on there not being any stray `PlaceTy` that would let the - // caller directly access this local! - // This is also crucial for tail calls, where we want the `FnArg` to - // stay valid when the old stack frame gets popped. - FnArg::Copy(op) - } - } - } - }; - - Ok(arg) - }) - .collect() - } - /// Find the wrapped inner type of a transparent wrapper. /// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field"). /// @@ -503,46 +310,209 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { Ok(()) } - /// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the - /// necessary information about callee and arguments to make a call. - fn eval_callee_and_args( - &self, - terminator: &mir::Terminator<'tcx>, - func: &mir::Operand<'tcx>, - args: &[Spanned>], - ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> { - let func = self.eval_operand(func, None)?; - let args = self.eval_fn_call_arguments(args)?; - - let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx); - let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, fn_sig_binder); - let extra_args = &args[fn_sig.inputs().len()..]; - let extra_args = - self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty)); - - let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() { - ty::FnPtr(_sig) => { - let fn_ptr = self.read_pointer(&func)?; - let fn_val = self.get_ptr_fn(fn_ptr)?; - (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false) - } - ty::FnDef(def_id, args) => { - let instance = self.resolve(def_id, args)?; - ( - FnVal::Instance(instance), - self.fn_abi_of_instance(instance, extra_args)?, - instance.def.requires_caller_location(*self.tcx), + fn check_fn_target_features(&self, instance: ty::Instance<'tcx>) -> InterpResult<'tcx, ()> { + // Calling functions with `#[target_feature]` is not unsafe on WASM, see #84988 + let attrs = self.tcx.codegen_fn_attrs(instance.def_id()); + if !self.tcx.sess.target.is_like_wasm + && attrs + .target_features + .iter() + .any(|feature| !self.tcx.sess.target_features.contains(feature)) + { + throw_ub_custom!( + fluent::const_eval_unavailable_target_features_for_fn, + unavailable_feats = attrs + .target_features + .iter() + .filter(|&feature| !self.tcx.sess.target_features.contains(feature)) + .fold(String::new(), |mut s, feature| { + if !s.is_empty() { + s.push_str(", "); + } + s.push_str(feature.as_str()); + s + }), + ); + } + Ok(()) + } + + /// The main entry point for creating a new stack frame: performs ABI checks and initializes + /// arguments. + #[instrument(skip(self), level = "trace")] + pub fn init_stack_frame( + &mut self, + instance: Instance<'tcx>, + body: &'tcx mir::Body<'tcx>, + caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>, + args: &[FnArg<'tcx, M::Provenance>], + with_caller_location: bool, + destination: &MPlaceTy<'tcx, M::Provenance>, + mut stack_pop: StackPopCleanup, + ) -> InterpResult<'tcx> { + // Compute callee information. + // FIXME: for variadic support, do we have to somehow determine callee's extra_args? + let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?; + + if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic { + throw_unsup_format!("calling a c-variadic function is not supported"); + } + + if M::enforce_abi(self) { + if caller_fn_abi.conv != callee_fn_abi.conv { + throw_ub_custom!( + fluent::const_eval_incompatible_calling_conventions, + callee_conv = format!("{:?}", callee_fn_abi.conv), + caller_conv = format!("{:?}", caller_fn_abi.conv), ) } - _ => { - span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty) + } + + // Check that all target features required by the callee (i.e., from + // the attribute `#[target_feature(enable = ...)]`) are enabled at + // compile time. + self.check_fn_target_features(instance)?; + + if !callee_fn_abi.can_unwind { + // The callee cannot unwind, so force the `Unreachable` unwind handling. + match &mut stack_pop { + StackPopCleanup::Root { .. } => {} + StackPopCleanup::Goto { unwind, .. } => { + *unwind = mir::UnwindAction::Unreachable; + } + } + } + + self.push_stack_frame_raw(instance, body, destination, stack_pop)?; + + // If an error is raised here, pop the frame again to get an accurate backtrace. + // To this end, we wrap it all in a `try` block. + let res: InterpResult<'tcx> = try { + trace!( + "caller ABI: {:#?}, args: {:#?}", + caller_fn_abi, + args.iter() + .map(|arg| ( + arg.layout().ty, + match arg { + FnArg::Copy(op) => format!("copy({op:?})"), + FnArg::InPlace(mplace) => format!("in-place({mplace:?})"), + } + )) + .collect::>() + ); + trace!( + "spread_arg: {:?}, locals: {:#?}", + body.spread_arg, + body.args_iter() + .map(|local| ( + local, + self.layout_of_local(self.frame(), local, None).unwrap().ty, + )) + .collect::>() + ); + + // In principle, we have two iterators: Where the arguments come from, and where + // they go to. + + // The "where they come from" part is easy, we expect the caller to do any special handling + // that might be required here (e.g. for untupling). + // If `with_caller_location` is set we pretend there is an extra argument (that + // we will not pass). + assert_eq!( + args.len() + if with_caller_location { 1 } else { 0 }, + caller_fn_abi.args.len(), + "mismatch between caller ABI and caller arguments", + ); + let mut caller_args = args + .iter() + .zip(caller_fn_abi.args.iter()) + .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore)); + + // Now we have to spread them out across the callee's locals, + // taking into account the `spread_arg`. If we could write + // this is a single iterator (that handles `spread_arg`), then + // `pass_argument` would be the loop body. It takes care to + // not advance `caller_iter` for ignored arguments. + let mut callee_args_abis = callee_fn_abi.args.iter(); + for local in body.args_iter() { + // Construct the destination place for this argument. At this point all + // locals are still dead, so we cannot construct a `PlaceTy`. + let dest = mir::Place::from(local); + // `layout_of_local` does more than just the instantiation we need to get the + // type, but the result gets cached so this avoids calling the instantiation + // query *again* the next time this local is accessed. + let ty = self.layout_of_local(self.frame(), local, None)?.ty; + if Some(local) == body.spread_arg { + // Make the local live once, then fill in the value field by field. + self.storage_live(local)?; + // Must be a tuple + let ty::Tuple(fields) = ty.kind() else { + span_bug!(self.cur_span(), "non-tuple type for `spread_arg`: {ty}") + }; + for (i, field_ty) in fields.iter().enumerate() { + let dest = dest.project_deeper( + &[mir::ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)], + *self.tcx, + ); + let callee_abi = callee_args_abis.next().unwrap(); + self.pass_argument( + &mut caller_args, + callee_abi, + &dest, + field_ty, + /* already_live */ true, + )?; + } + } else { + // Normal argument. Cannot mark it as live yet, it might be unsized! + let callee_abi = callee_args_abis.next().unwrap(); + self.pass_argument( + &mut caller_args, + callee_abi, + &dest, + ty, + /* already_live */ false, + )?; + } + } + // If the callee needs a caller location, pretend we consume one more argument from the ABI. + if instance.def.requires_caller_location(*self.tcx) { + callee_args_abis.next().unwrap(); + } + // Now we should have no more caller args or callee arg ABIs + assert!( + callee_args_abis.next().is_none(), + "mismatch between callee ABI and callee body arguments" + ); + if caller_args.next().is_some() { + throw_ub_custom!(fluent::const_eval_too_many_caller_args); + } + // Don't forget to check the return type! + if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? { + throw_ub!(AbiMismatchReturn { + caller_ty: caller_fn_abi.ret.layout.ty, + callee_ty: callee_fn_abi.ret.layout.ty + }); } - }; - Ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location }) + // Protect return place for in-place return value passing. + M::protect_in_place_function_argument(self, &destination)?; + + // Don't forget to mark "initially live" locals as live. + self.storage_live_for_always_live_locals()?; + }; + match res { + Err(err) => { + // Don't show the incomplete stack frame in the error stacktrace. + self.stack_mut().pop(); + Err(err) + } + Ok(()) => Ok(()), + } } - /// Call this function -- pushing the stack frame and initializing the arguments. + /// Initiate a call to this function -- pushing the stack frame and initializing the arguments. /// /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way. /// However, we also need `caller_abi` to determine if we need to do untupling of arguments. @@ -550,7 +520,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// `with_caller_location` indicates whether the caller passed a caller location. Miri /// implements caller locations without argument passing, but to match `FnAbi` we need to know /// when those arguments are present. - pub(crate) fn eval_fn_call( + pub(super) fn init_fn_call( &mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>, (caller_abi, caller_fn_abi): (Abi, &FnAbi<'tcx, Ty<'tcx>>), @@ -558,9 +528,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { with_caller_location: bool, destination: &MPlaceTy<'tcx, M::Provenance>, target: Option, - mut unwind: mir::UnwindAction, + unwind: mir::UnwindAction, ) -> InterpResult<'tcx> { - trace!("eval_fn_call: {:#?}", fn_val); + trace!("init_fn_call: {:#?}", fn_val); let instance = match fn_val { FnVal::Instance(instance) => instance, @@ -591,7 +561,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { )? { assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden); assert!(matches!(fallback.def, ty::InstanceKind::Item(_))); - return self.eval_fn_call( + return self.init_fn_call( FnVal::Instance(fallback), (caller_abi, caller_fn_abi), args, @@ -630,189 +600,35 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { return Ok(()); }; - // Compute callee information using the `instance` returned by - // `find_mir_or_eval_fn`. - // FIXME: for variadic support, do we have to somehow determine callee's extra_args? - let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?; - - if callee_fn_abi.c_variadic || caller_fn_abi.c_variadic { - throw_unsup_format!("calling a c-variadic function is not supported"); - } - - if M::enforce_abi(self) { - if caller_fn_abi.conv != callee_fn_abi.conv { - throw_ub_custom!( - fluent::const_eval_incompatible_calling_conventions, - callee_conv = format!("{:?}", callee_fn_abi.conv), - caller_conv = format!("{:?}", caller_fn_abi.conv), + // Special handling for the closure ABI: untuple the last argument. + let args: Cow<'_, [FnArg<'tcx, M::Provenance>]> = + if caller_abi == Abi::RustCall && !args.is_empty() { + // Untuple + let (untuple_arg, args) = args.split_last().unwrap(); + trace!("init_fn_call: Will pass last argument by untupling"); + Cow::from( + args.iter() + .map(|a| Ok(a.clone())) + .chain( + (0..untuple_arg.layout().fields.count()) + .map(|i| self.fn_arg_field(untuple_arg, i)), + ) + .collect::>>()?, ) - } - } - - // Check that all target features required by the callee (i.e., from - // the attribute `#[target_feature(enable = ...)]`) are enabled at - // compile time. - self.check_fn_target_features(instance)?; - - if !callee_fn_abi.can_unwind { - // The callee cannot unwind, so force the `Unreachable` unwind handling. - unwind = mir::UnwindAction::Unreachable; - } + } else { + // Plain arg passing + Cow::from(args) + }; - self.push_stack_frame( + self.init_stack_frame( instance, body, + caller_fn_abi, + &args, + with_caller_location, destination, StackPopCleanup::Goto { ret: target, unwind }, - )?; - - // If an error is raised here, pop the frame again to get an accurate backtrace. - // To this end, we wrap it all in a `try` block. - let res: InterpResult<'tcx> = try { - trace!( - "caller ABI: {:?}, args: {:#?}", - caller_abi, - args.iter() - .map(|arg| ( - arg.layout().ty, - match arg { - FnArg::Copy(op) => format!("copy({op:?})"), - FnArg::InPlace(mplace) => format!("in-place({mplace:?})"), - } - )) - .collect::>() - ); - trace!( - "spread_arg: {:?}, locals: {:#?}", - body.spread_arg, - body.args_iter() - .map(|local| ( - local, - self.layout_of_local(self.frame(), local, None).unwrap().ty, - )) - .collect::>() - ); - - // In principle, we have two iterators: Where the arguments come from, and where - // they go to. - - // For where they come from: If the ABI is RustCall, we untuple the - // last incoming argument. These two iterators do not have the same type, - // so to keep the code paths uniform we accept an allocation - // (for RustCall ABI only). - let caller_args: Cow<'_, [FnArg<'tcx, M::Provenance>]> = - if caller_abi == Abi::RustCall && !args.is_empty() { - // Untuple - let (untuple_arg, args) = args.split_last().unwrap(); - trace!("eval_fn_call: Will pass last argument by untupling"); - Cow::from( - args.iter() - .map(|a| Ok(a.clone())) - .chain( - (0..untuple_arg.layout().fields.count()) - .map(|i| self.fn_arg_field(untuple_arg, i)), - ) - .collect::>>()?, - ) - } else { - // Plain arg passing - Cow::from(args) - }; - // If `with_caller_location` is set we pretend there is an extra argument (that - // we will not pass). - assert_eq!( - caller_args.len() + if with_caller_location { 1 } else { 0 }, - caller_fn_abi.args.len(), - "mismatch between caller ABI and caller arguments", - ); - let mut caller_args = caller_args - .iter() - .zip(caller_fn_abi.args.iter()) - .filter(|arg_and_abi| !matches!(arg_and_abi.1.mode, PassMode::Ignore)); - - // Now we have to spread them out across the callee's locals, - // taking into account the `spread_arg`. If we could write - // this is a single iterator (that handles `spread_arg`), then - // `pass_argument` would be the loop body. It takes care to - // not advance `caller_iter` for ignored arguments. - let mut callee_args_abis = callee_fn_abi.args.iter(); - for local in body.args_iter() { - // Construct the destination place for this argument. At this point all - // locals are still dead, so we cannot construct a `PlaceTy`. - let dest = mir::Place::from(local); - // `layout_of_local` does more than just the instantiation we need to get the - // type, but the result gets cached so this avoids calling the instantiation - // query *again* the next time this local is accessed. - let ty = self.layout_of_local(self.frame(), local, None)?.ty; - if Some(local) == body.spread_arg { - // Make the local live once, then fill in the value field by field. - self.storage_live(local)?; - // Must be a tuple - let ty::Tuple(fields) = ty.kind() else { - span_bug!(self.cur_span(), "non-tuple type for `spread_arg`: {ty}") - }; - for (i, field_ty) in fields.iter().enumerate() { - let dest = dest.project_deeper( - &[mir::ProjectionElem::Field( - FieldIdx::from_usize(i), - field_ty, - )], - *self.tcx, - ); - let callee_abi = callee_args_abis.next().unwrap(); - self.pass_argument( - &mut caller_args, - callee_abi, - &dest, - field_ty, - /* already_live */ true, - )?; - } - } else { - // Normal argument. Cannot mark it as live yet, it might be unsized! - let callee_abi = callee_args_abis.next().unwrap(); - self.pass_argument( - &mut caller_args, - callee_abi, - &dest, - ty, - /* already_live */ false, - )?; - } - } - // If the callee needs a caller location, pretend we consume one more argument from the ABI. - if instance.def.requires_caller_location(*self.tcx) { - callee_args_abis.next().unwrap(); - } - // Now we should have no more caller args or callee arg ABIs - assert!( - callee_args_abis.next().is_none(), - "mismatch between callee ABI and callee body arguments" - ); - if caller_args.next().is_some() { - throw_ub_custom!(fluent::const_eval_too_many_caller_args); - } - // Don't forget to check the return type! - if !self.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? { - throw_ub!(AbiMismatchReturn { - caller_ty: caller_fn_abi.ret.layout.ty, - callee_ty: callee_fn_abi.ret.layout.ty - }); - } - - // Protect return place for in-place return value passing. - M::protect_in_place_function_argument(self, &destination)?; - - // Don't forget to mark "initially live" locals as live. - self.storage_live_for_always_live_locals()?; - }; - match res { - Err(err) => { - self.stack_mut().pop(); - Err(err) - } - Ok(()) => Ok(()), - } + ) } // `InstanceKind::Virtual` does not have callable MIR. Calls to `Virtual` instances must be // codegen'd / interpreted as virtual calls through the vtable. @@ -935,7 +751,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { caller_fn_abi.args[0].layout.ty = receiver_ty; // recurse with concrete function - self.eval_fn_call( + self.init_fn_call( FnVal::Instance(fn_inst), (caller_abi, &caller_fn_abi), &args, @@ -948,30 +764,33 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } - pub(crate) fn eval_fn_tail_call( + /// Initiate a tail call to this function -- popping the current stack frame, pushing the new + /// stack frame and initializing the arguments. + pub(super) fn init_fn_tail_call( &mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>, (caller_abi, caller_fn_abi): (Abi, &FnAbi<'tcx, Ty<'tcx>>), args: &[FnArg<'tcx, M::Provenance>], with_caller_location: bool, ) -> InterpResult<'tcx> { - trace!("eval_fn_call: {:#?}", fn_val); + trace!("init_fn_tail_call: {:#?}", fn_val); // This is the "canonical" implementation of tails calls, // a pop of the current stack frame, followed by a normal call // which pushes a new stack frame, with the return address from // the popped stack frame. // - // Note that we are using `pop_stack_frame` and not `return_from_current_stack_frame`, + // Note that we are using `pop_stack_frame_raw` and not `return_from_current_stack_frame`, // as the latter "executes" the goto to the return block, but we don't want to, // only the tail called function should return to the current return block. M::before_stack_pop(self, self.frame())?; let StackPopInfo { return_action, return_to_block, return_place } = - self.pop_stack_frame(false)?; + self.pop_stack_frame_raw(false)?; assert_eq!(return_action, ReturnAction::Normal); + // Take the "stack pop cleanup" info, and use that to initiate the next call. let StackPopCleanup::Goto { ret, unwind } = return_to_block else { bug!("can't tailcall as root"); }; @@ -980,7 +799,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // we should check if both caller&callee can/n't unwind, // see - self.eval_fn_call( + self.init_fn_call( fn_val, (caller_abi, caller_fn_abi), args, @@ -991,41 +810,14 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { ) } - fn check_fn_target_features(&self, instance: ty::Instance<'tcx>) -> InterpResult<'tcx, ()> { - // Calling functions with `#[target_feature]` is not unsafe on WASM, see #84988 - let attrs = self.tcx.codegen_fn_attrs(instance.def_id()); - if !self.tcx.sess.target.is_like_wasm - && attrs - .target_features - .iter() - .any(|feature| !self.tcx.sess.target_features.contains(feature)) - { - throw_ub_custom!( - fluent::const_eval_unavailable_target_features_for_fn, - unavailable_feats = attrs - .target_features - .iter() - .filter(|&feature| !self.tcx.sess.target_features.contains(feature)) - .fold(String::new(), |mut s, feature| { - if !s.is_empty() { - s.push_str(", "); - } - s.push_str(feature.as_str()); - s - }), - ); - } - Ok(()) - } - - fn drop_in_place( + pub(super) fn init_drop_in_place_call( &mut self, place: &PlaceTy<'tcx, M::Provenance>, instance: ty::Instance<'tcx>, target: mir::BasicBlock, unwind: mir::UnwindAction, ) -> InterpResult<'tcx> { - trace!("drop_in_place: {:?},\n instance={:?}", place, instance); + trace!("init_drop_in_place_call: {:?},\n instance={:?}", place, instance); // We take the address of the object. This may well be unaligned, which is fine // for us here. However, unaligned accesses will probably make the actual drop // implementation fail -- a problem shared by rustc. @@ -1060,7 +852,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { let arg = self.mplace_to_ref(&place)?; let ret = MPlaceTy::fake_alloc_zst(self.layout_of(self.tcx.types.unit)?); - self.eval_fn_call( + self.init_fn_call( FnVal::Instance(instance), (Abi::Rust, fn_abi), &[FnArg::Copy(arg.into())], @@ -1070,4 +862,117 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { unwind, ) } + + /// Pops the current frame from the stack, copies the return value to the caller, deallocates + /// the memory for allocated locals, and jumps to an appropriate place. + /// + /// If `unwinding` is `false`, then we are performing a normal return + /// from a function. In this case, we jump back into the frame of the caller, + /// and continue execution as normal. + /// + /// If `unwinding` is `true`, then we are in the middle of a panic, + /// and need to unwind this frame. In this case, we jump to the + /// `cleanup` block for the function, which is responsible for running + /// `Drop` impls for any locals that have been initialized at this point. + /// The cleanup block ends with a special `Resume` terminator, which will + /// cause us to continue unwinding. + #[instrument(skip(self), level = "trace")] + pub(super) fn return_from_current_stack_frame( + &mut self, + unwinding: bool, + ) -> InterpResult<'tcx> { + info!( + "popping stack frame ({})", + if unwinding { "during unwinding" } else { "returning from function" } + ); + + // Check `unwinding`. + assert_eq!( + unwinding, + match self.frame().loc { + Left(loc) => self.body().basic_blocks[loc.block].is_cleanup, + Right(_) => true, + } + ); + if unwinding && self.frame_idx() == 0 { + throw_ub_custom!(fluent::const_eval_unwind_past_top); + } + + M::before_stack_pop(self, self.frame())?; + + // Copy return value. Must of course happen *before* we deallocate the locals. + // Must be *after* `before_stack_pop` as otherwise the return place might still be protected. + let copy_ret_result = if !unwinding { + let op = self + .local_to_op(mir::RETURN_PLACE, None) + .expect("return place should always be live"); + let dest = self.frame().return_place.clone(); + let err = if self.stack().len() == 1 { + // The initializer of constants and statics will get validated separately + // after the constant has been fully evaluated. While we could fall back to the default + // code path, that will cause -Zenforce-validity to cycle on static initializers. + // Reading from a static's memory is not allowed during its evaluation, and will always + // trigger a cycle error. Validation must read from the memory of the current item. + // For Miri this means we do not validate the root frame return value, + // but Miri anyway calls `read_target_isize` on that so separate validation + // is not needed. + self.copy_op_no_dest_validation(&op, &dest) + } else { + self.copy_op_allow_transmute(&op, &dest) + }; + trace!("return value: {:?}", self.dump_place(&dest.into())); + // We delay actually short-circuiting on this error until *after* the stack frame is + // popped, since we want this error to be attributed to the caller, whose type defines + // this transmute. + err + } else { + Ok(()) + }; + + // All right, now it is time to actually pop the frame. + let stack_pop_info = self.pop_stack_frame_raw(unwinding)?; + + // Report error from return value copy, if any. + copy_ret_result?; + + match stack_pop_info.return_action { + ReturnAction::Normal => {} + ReturnAction::NoJump => { + // The hook already did everything. + return Ok(()); + } + ReturnAction::NoCleanup => { + // If we are not doing cleanup, also skip everything else. + assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked"); + assert!(!unwinding, "tried to skip cleanup during unwinding"); + // Skip machine hook. + return Ok(()); + } + } + + // Normal return, figure out where to jump. + if unwinding { + // Follow the unwind edge. + let unwind = match stack_pop_info.return_to_block { + StackPopCleanup::Goto { unwind, .. } => unwind, + StackPopCleanup::Root { .. } => { + panic!("encountered StackPopCleanup::Root when unwinding!") + } + }; + // This must be the very last thing that happens, since it can in fact push a new stack frame. + self.unwind_to_block(unwind) + } else { + // Follow the normal return edge. + match stack_pop_info.return_to_block { + StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret), + StackPopCleanup::Root { .. } => { + assert!( + self.stack().is_empty(), + "only the topmost frame can have StackPopCleanup::Root" + ); + Ok(()) + } + } + } + } } diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index 18d585e4a7a7e..7a6bbdfdcb5e5 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -1,40 +1,29 @@ -use std::cell::Cell; -use std::{fmt, mem}; - -use either::{Either, Left, Right}; +use either::{Left, Right}; use rustc_errors::DiagCtxtHandle; use rustc_hir::def_id::DefId; -use rustc_hir::definitions::DefPathData; -use rustc_hir::{self as hir}; -use rustc_index::IndexVec; use rustc_infer::infer::at::ToTrace; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::ObligationCause; -use rustc_middle::mir::interpret::{ - CtfeProvenance, ErrorHandled, InvalidMetaKind, ReportedErrorInfo, -}; +use rustc_middle::mir::interpret::{ErrorHandled, InvalidMetaKind, ReportedErrorInfo}; use rustc_middle::query::TyCtxtAt; use rustc_middle::ty::layout::{ - self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers, - TyAndLayout, + self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOfHelpers, TyAndLayout, }; use rustc_middle::ty::{self, GenericArgsRef, ParamEnv, Ty, TyCtxt, TypeFoldable, Variance}; -use rustc_middle::{bug, mir, span_bug}; -use rustc_mir_dataflow::storage::always_storage_live_locals; +use rustc_middle::{mir, span_bug}; use rustc_session::Limit; use rustc_span::Span; use rustc_target::abi::call::FnAbi; use rustc_target::abi::{Align, HasDataLayout, Size, TargetDataLayout}; use rustc_trait_selection::traits::ObligationCtxt; -use tracing::{debug, info, info_span, instrument, trace}; +use tracing::{debug, trace}; use super::{ - err_inval, throw_inval, throw_ub, throw_ub_custom, throw_unsup, GlobalId, Immediate, - InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, MemoryKind, - OpTy, Operand, Place, PlaceTy, Pointer, PointerArithmetic, Projectable, Provenance, - ReturnAction, Scalar, + err_inval, throw_inval, throw_ub, throw_ub_custom, Frame, FrameInfo, GlobalId, InterpErrorInfo, + InterpResult, MPlaceTy, Machine, MemPlaceMeta, Memory, OpTy, Place, PlaceTy, PointerArithmetic, + Projectable, Provenance, }; -use crate::{errors, fluent_generated as fluent, util, ReportErrorExt}; +use crate::{fluent_generated as fluent, util, ReportErrorExt}; pub struct InterpCx<'tcx, M: Machine<'tcx>> { /// Stores the `Machine` instance. @@ -57,314 +46,6 @@ pub struct InterpCx<'tcx, M: Machine<'tcx>> { pub recursion_limit: Limit, } -// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread -// boundary and dropped in the other thread, it would exit the span in the other thread. -struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>); - -impl SpanGuard { - /// By default a `SpanGuard` does nothing. - fn new() -> Self { - Self(tracing::Span::none(), std::marker::PhantomData) - } - - /// If a span is entered, we exit the previous span (if any, normally none) and enter the - /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of - /// `Frame` by creating a dummy span to being with and then entering it once the frame has - /// been pushed. - fn enter(&mut self, span: tracing::Span) { - // This executes the destructor on the previous instance of `SpanGuard`, ensuring that - // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we - // can't protect the tracing stack, but that'll just lead to weird logging, no actual - // problems. - *self = Self(span, std::marker::PhantomData); - self.0.with_subscriber(|(id, dispatch)| { - dispatch.enter(id); - }); - } -} - -impl Drop for SpanGuard { - fn drop(&mut self) { - self.0.with_subscriber(|(id, dispatch)| { - dispatch.exit(id); - }); - } -} - -/// A stack frame. -pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> { - //////////////////////////////////////////////////////////////////////////////// - // Function and callsite information - //////////////////////////////////////////////////////////////////////////////// - /// The MIR for the function called on this frame. - pub body: &'tcx mir::Body<'tcx>, - - /// The def_id and args of the current function. - pub instance: ty::Instance<'tcx>, - - /// Extra data for the machine. - pub extra: Extra, - - //////////////////////////////////////////////////////////////////////////////// - // Return place and locals - //////////////////////////////////////////////////////////////////////////////// - /// Work to perform when returning from this function. - pub return_to_block: StackPopCleanup, - - /// The location where the result of the current stack frame should be written to, - /// and its layout in the caller. - pub return_place: MPlaceTy<'tcx, Prov>, - - /// The list of locals for this stack frame, stored in order as - /// `[return_ptr, arguments..., variables..., temporaries...]`. - /// The locals are stored as `Option`s. - /// `None` represents a local that is currently dead, while a live local - /// can either directly contain `Scalar` or refer to some part of an `Allocation`. - /// - /// Do *not* access this directly; always go through the machine hook! - pub locals: IndexVec>, - - /// The span of the `tracing` crate is stored here. - /// When the guard is dropped, the span is exited. This gives us - /// a full stack trace on all tracing statements. - tracing_span: SpanGuard, - - //////////////////////////////////////////////////////////////////////////////// - // Current position within the function - //////////////////////////////////////////////////////////////////////////////// - /// If this is `Right`, we are not currently executing any particular statement in - /// this frame (can happen e.g. during frame initialization, and during unwinding on - /// frames without cleanup code). - /// - /// Needs to be public because ConstProp does unspeakable things to it. - pub loc: Either, -} - -/// What we store about a frame in an interpreter backtrace. -#[derive(Clone, Debug)] -pub struct FrameInfo<'tcx> { - pub instance: ty::Instance<'tcx>, - pub span: Span, -} - -#[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these -pub enum StackPopCleanup { - /// Jump to the next block in the caller, or cause UB if None (that's a function - /// that may never return). Also store layout of return place so - /// we can validate it at that layout. - /// `ret` stores the block we jump to on a normal return, while `unwind` - /// stores the block used for cleanup during unwinding. - Goto { ret: Option, unwind: mir::UnwindAction }, - /// The root frame of the stack: nowhere else to jump to. - /// `cleanup` says whether locals are deallocated. Static computation - /// wants them leaked to intern what they need (and just throw away - /// the entire `ecx` when it is done). - Root { cleanup: bool }, -} - -/// Return type of [`InterpCx::pop_stack_frame`]. -pub struct StackPopInfo<'tcx, Prov: Provenance> { - /// Additional information about the action to be performed when returning from the popped - /// stack frame. - pub return_action: ReturnAction, - - /// [`return_to_block`](Frame::return_to_block) of the popped stack frame. - pub return_to_block: StackPopCleanup, - - /// [`return_place`](Frame::return_place) of the popped stack frame. - pub return_place: MPlaceTy<'tcx, Prov>, -} - -/// State of a local variable including a memoized layout -#[derive(Clone)] -pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> { - value: LocalValue, - /// Don't modify if `Some`, this is only used to prevent computing the layout twice. - /// Avoids computing the layout of locals that are never actually initialized. - layout: Cell>>, -} - -impl std::fmt::Debug for LocalState<'_, Prov> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("LocalState") - .field("value", &self.value) - .field("ty", &self.layout.get().map(|l| l.ty)) - .finish() - } -} - -/// Current value of a local variable -/// -/// This does not store the type of the local; the type is given by `body.local_decls` and can never -/// change, so by not storing here we avoid having to maintain that as an invariant. -#[derive(Copy, Clone, Debug)] // Miri debug-prints these -pub(super) enum LocalValue { - /// This local is not currently alive, and cannot be used at all. - Dead, - /// A normal, live local. - /// Mostly for convenience, we re-use the `Operand` type here. - /// This is an optimization over just always having a pointer here; - /// we can thus avoid doing an allocation when the local just stores - /// immediate values *and* never has its address taken. - Live(Operand), -} - -impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> { - pub fn make_live_uninit(&mut self) { - self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); - } - - /// This is a hack because Miri needs a way to visit all the provenance in a `LocalState` - /// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type - /// private. - pub fn as_mplace_or_imm( - &self, - ) -> Option>, MemPlaceMeta), Immediate>> { - match self.value { - LocalValue::Dead => None, - LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))), - LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)), - } - } - - /// Read the local's value or error if the local is not yet live or not live anymore. - #[inline(always)] - pub(super) fn access(&self) -> InterpResult<'tcx, &Operand> { - match &self.value { - LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? - LocalValue::Live(val) => Ok(val), - } - } - - /// Overwrite the local. If the local can be overwritten in place, return a reference - /// to do so; otherwise return the `MemPlace` to consult instead. - #[inline(always)] - pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand> { - match &mut self.value { - LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? - LocalValue::Live(val) => Ok(val), - } - } -} - -impl<'tcx, Prov: Provenance> Frame<'tcx, Prov> { - pub fn with_extra(self, extra: Extra) -> Frame<'tcx, Prov, Extra> { - Frame { - body: self.body, - instance: self.instance, - return_to_block: self.return_to_block, - return_place: self.return_place, - locals: self.locals, - loc: self.loc, - extra, - tracing_span: self.tracing_span, - } - } -} - -impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> { - /// Get the current location within the Frame. - /// - /// If this is `Right`, we are not currently executing any particular statement in - /// this frame (can happen e.g. during frame initialization, and during unwinding on - /// frames without cleanup code). - /// - /// Used by priroda. - pub fn current_loc(&self) -> Either { - self.loc - } - - /// Return the `SourceInfo` of the current instruction. - pub fn current_source_info(&self) -> Option<&mir::SourceInfo> { - self.loc.left().map(|loc| self.body.source_info(loc)) - } - - pub fn current_span(&self) -> Span { - match self.loc { - Left(loc) => self.body.source_info(loc).span, - Right(span) => span, - } - } - - pub fn lint_root(&self, tcx: TyCtxt<'tcx>) -> Option { - // We first try to get a HirId via the current source scope, - // and fall back to `body.source`. - self.current_source_info() - .and_then(|source_info| match &self.body.source_scopes[source_info.scope].local_data { - mir::ClearCrossCrate::Set(data) => Some(data.lint_root), - mir::ClearCrossCrate::Clear => None, - }) - .or_else(|| { - let def_id = self.body.source.def_id().as_local(); - def_id.map(|def_id| tcx.local_def_id_to_hir_id(def_id)) - }) - } - - /// Returns the address of the buffer where the locals are stored. This is used by `Place` as a - /// sanity check to detect bugs where we mix up which stack frame a place refers to. - #[inline(always)] - pub(super) fn locals_addr(&self) -> usize { - self.locals.raw.as_ptr().addr() - } - - #[must_use] - pub fn generate_stacktrace_from_stack(stack: &[Self]) -> Vec> { - let mut frames = Vec::new(); - // This deliberately does *not* honor `requires_caller_location` since it is used for much - // more than just panics. - for frame in stack.iter().rev() { - let span = match frame.loc { - Left(loc) => { - // If the stacktrace passes through MIR-inlined source scopes, add them. - let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc); - let mut scope_data = &frame.body.source_scopes[scope]; - while let Some((instance, call_span)) = scope_data.inlined { - frames.push(FrameInfo { span, instance }); - span = call_span; - scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()]; - } - span - } - Right(span) => span, - }; - frames.push(FrameInfo { span, instance: frame.instance }); - } - trace!("generate stacktrace: {:#?}", frames); - frames - } -} - -// FIXME: only used by miri, should be removed once translatable. -impl<'tcx> fmt::Display for FrameInfo<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - ty::tls::with(|tcx| { - if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure { - write!(f, "inside closure") - } else { - // Note: this triggers a `must_produce_diag` state, which means that if we ever - // get here we must emit a diagnostic. We should never display a `FrameInfo` unless - // we actually want to emit a warning or error to the user. - write!(f, "inside `{}`", self.instance) - } - }) - } -} - -impl<'tcx> FrameInfo<'tcx> { - pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote { - let span = self.span; - if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure { - errors::FrameNote { where_: "closure", span, instance: String::new(), times: 0 } - } else { - let instance = format!("{}", self.instance); - // Note: this triggers a `must_produce_diag` state, which means that if we ever get - // here we must emit a diagnostic. We should never display a `FrameInfo` unless we - // actually want to emit a warning or error to the user. - errors::FrameNote { where_: "instance", span, instance, times: 0 } - } - } -} - impl<'tcx, M: Machine<'tcx>> HasDataLayout for InterpCx<'tcx, M> { #[inline] fn data_layout(&self) -> &TargetDataLayout { @@ -703,30 +384,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { span_bug!(self.cur_span(), "no non-`#[track_caller]` frame found") } - #[inline(always)] - pub(super) fn layout_of_local( - &self, - frame: &Frame<'tcx, M::Provenance, M::FrameExtra>, - local: mir::Local, - layout: Option>, - ) -> InterpResult<'tcx, TyAndLayout<'tcx>> { - let state = &frame.locals[local]; - if let Some(layout) = state.layout.get() { - return Ok(layout); - } - - let layout = from_known_layout(self.tcx, self.param_env, layout, || { - let local_ty = frame.body.local_decls[local].ty; - let local_ty = - self.instantiate_from_frame_and_normalize_erasing_regions(frame, local_ty)?; - self.layout_of(local_ty) - })?; - - // Layouts of locals are requested a lot, so we cache them. - state.layout.set(Some(layout)); - Ok(layout) - } - /// Returns the actual dynamic size and alignment of the place at the given type. /// Only the "meta" (metadata) part of the place matters. /// This can fail to provide an answer for extern types. @@ -825,132 +482,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.size_and_align_of(&mplace.meta(), &mplace.layout) } - #[instrument(skip(self, body, return_place, return_to_block), level = "debug")] - pub fn push_stack_frame( - &mut self, - instance: ty::Instance<'tcx>, - body: &'tcx mir::Body<'tcx>, - return_place: &MPlaceTy<'tcx, M::Provenance>, - return_to_block: StackPopCleanup, - ) -> InterpResult<'tcx> { - trace!("body: {:#?}", body); - - // First push a stack frame so we have access to the local args - self.push_new_stack_frame(instance, body, return_to_block, return_place.clone())?; - - self.after_stack_frame_push(instance, body)?; - - Ok(()) - } - - /// Creates a new stack frame, initializes it and pushes it onto the stack. - /// A private helper for [`push_stack_frame`](InterpCx::push_stack_frame). - fn push_new_stack_frame( - &mut self, - instance: ty::Instance<'tcx>, - body: &'tcx mir::Body<'tcx>, - return_to_block: StackPopCleanup, - return_place: MPlaceTy<'tcx, M::Provenance>, - ) -> InterpResult<'tcx> { - let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) }; - let locals = IndexVec::from_elem(dead_local, &body.local_decls); - let pre_frame = Frame { - body, - loc: Right(body.span), // Span used for errors caused during preamble. - return_to_block, - return_place, - locals, - instance, - tracing_span: SpanGuard::new(), - extra: (), - }; - let frame = M::init_frame(self, pre_frame)?; - self.stack_mut().push(frame); - - Ok(()) - } - - /// A private helper for [`push_stack_frame`](InterpCx::push_stack_frame). - fn after_stack_frame_push( - &mut self, - instance: ty::Instance<'tcx>, - body: &'tcx mir::Body<'tcx>, - ) -> InterpResult<'tcx> { - // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). - for &const_ in body.required_consts() { - let c = - self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?; - c.eval(*self.tcx, self.param_env, const_.span).map_err(|err| { - err.emit_note(*self.tcx); - err - })?; - } - - // done - M::after_stack_push(self)?; - self.frame_mut().loc = Left(mir::Location::START); - - let span = info_span!("frame", "{}", instance); - self.frame_mut().tracing_span.enter(span); - - Ok(()) - } - - /// Pops a stack frame from the stack and returns some information about it. - /// - /// This also deallocates locals, if necessary. - /// - /// [`M::before_stack_pop`] should be called before calling this function. - /// [`M::after_stack_pop`] is called by this function automatically. - /// - /// [`M::before_stack_pop`]: Machine::before_stack_pop - /// [`M::after_stack_pop`]: Machine::after_stack_pop - pub fn pop_stack_frame( - &mut self, - unwinding: bool, - ) -> InterpResult<'tcx, StackPopInfo<'tcx, M::Provenance>> { - let cleanup = self.cleanup_current_frame_locals()?; - - let frame = - self.stack_mut().pop().expect("tried to pop a stack frame, but there were none"); - - let return_to_block = frame.return_to_block; - let return_place = frame.return_place.clone(); - - let return_action; - if cleanup { - return_action = M::after_stack_pop(self, frame, unwinding)?; - assert_ne!(return_action, ReturnAction::NoCleanup); - } else { - return_action = ReturnAction::NoCleanup; - }; - - Ok(StackPopInfo { return_action, return_to_block, return_place }) - } - - /// A private helper for [`pop_stack_frame`](InterpCx::pop_stack_frame). - /// Returns `true` if cleanup has been done, `false` otherwise. - fn cleanup_current_frame_locals(&mut self) -> InterpResult<'tcx, bool> { - // Cleanup: deallocate locals. - // Usually we want to clean up (deallocate locals), but in a few rare cases we don't. - // We do this while the frame is still on the stack, so errors point to the callee. - let return_to_block = self.frame().return_to_block; - let cleanup = match return_to_block { - StackPopCleanup::Goto { .. } => true, - StackPopCleanup::Root { cleanup, .. } => cleanup, - }; - - if cleanup { - // We need to take the locals out, since we need to mutate while iterating. - let locals = mem::take(&mut self.frame_mut().locals); - for local in &locals { - self.deallocate_local(local.value)?; - } - } - - Ok(cleanup) - } - /// Jump to the given block. #[inline] pub fn go_to_block(&mut self, target: mir::BasicBlock) { @@ -997,248 +528,6 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { Ok(()) } - /// Pops the current frame from the stack, deallocating the - /// memory for allocated locals, and jumps to an appropriate place. - /// - /// If `unwinding` is `false`, then we are performing a normal return - /// from a function. In this case, we jump back into the frame of the caller, - /// and continue execution as normal. - /// - /// If `unwinding` is `true`, then we are in the middle of a panic, - /// and need to unwind this frame. In this case, we jump to the - /// `cleanup` block for the function, which is responsible for running - /// `Drop` impls for any locals that have been initialized at this point. - /// The cleanup block ends with a special `Resume` terminator, which will - /// cause us to continue unwinding. - #[instrument(skip(self), level = "debug")] - pub(super) fn return_from_current_stack_frame( - &mut self, - unwinding: bool, - ) -> InterpResult<'tcx> { - info!( - "popping stack frame ({})", - if unwinding { "during unwinding" } else { "returning from function" } - ); - - // Check `unwinding`. - assert_eq!( - unwinding, - match self.frame().loc { - Left(loc) => self.body().basic_blocks[loc.block].is_cleanup, - Right(_) => true, - } - ); - if unwinding && self.frame_idx() == 0 { - throw_ub_custom!(fluent::const_eval_unwind_past_top); - } - - M::before_stack_pop(self, self.frame())?; - - // Copy return value. Must of course happen *before* we deallocate the locals. - let copy_ret_result = if !unwinding { - let op = self - .local_to_op(mir::RETURN_PLACE, None) - .expect("return place should always be live"); - let dest = self.frame().return_place.clone(); - let err = if self.stack().len() == 1 { - // The initializer of constants and statics will get validated separately - // after the constant has been fully evaluated. While we could fall back to the default - // code path, that will cause -Zenforce-validity to cycle on static initializers. - // Reading from a static's memory is not allowed during its evaluation, and will always - // trigger a cycle error. Validation must read from the memory of the current item. - // For Miri this means we do not validate the root frame return value, - // but Miri anyway calls `read_target_isize` on that so separate validation - // is not needed. - self.copy_op_no_dest_validation(&op, &dest) - } else { - self.copy_op_allow_transmute(&op, &dest) - }; - trace!("return value: {:?}", self.dump_place(&dest.into())); - // We delay actually short-circuiting on this error until *after* the stack frame is - // popped, since we want this error to be attributed to the caller, whose type defines - // this transmute. - err - } else { - Ok(()) - }; - - // All right, now it is time to actually pop the frame. - let stack_pop_info = self.pop_stack_frame(unwinding)?; - - // Report error from return value copy, if any. - copy_ret_result?; - - match stack_pop_info.return_action { - ReturnAction::Normal => {} - ReturnAction::NoJump => { - // The hook already did everything. - return Ok(()); - } - ReturnAction::NoCleanup => { - // If we are not doing cleanup, also skip everything else. - assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked"); - assert!(!unwinding, "tried to skip cleanup during unwinding"); - // Skip machine hook. - return Ok(()); - } - } - - // Normal return, figure out where to jump. - if unwinding { - // Follow the unwind edge. - let unwind = match stack_pop_info.return_to_block { - StackPopCleanup::Goto { unwind, .. } => unwind, - StackPopCleanup::Root { .. } => { - panic!("encountered StackPopCleanup::Root when unwinding!") - } - }; - // This must be the very last thing that happens, since it can in fact push a new stack frame. - self.unwind_to_block(unwind) - } else { - // Follow the normal return edge. - match stack_pop_info.return_to_block { - StackPopCleanup::Goto { ret, .. } => self.return_to_block(ret), - StackPopCleanup::Root { .. } => { - assert!( - self.stack().is_empty(), - "only the topmost frame can have StackPopCleanup::Root" - ); - Ok(()) - } - } - } - } - - /// In the current stack frame, mark all locals as live that are not arguments and don't have - /// `Storage*` annotations (this includes the return place). - pub fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> { - self.storage_live(mir::RETURN_PLACE)?; - - let body = self.body(); - let always_live = always_storage_live_locals(body); - for local in body.vars_and_temps_iter() { - if always_live.contains(local) { - self.storage_live(local)?; - } - } - Ok(()) - } - - pub fn storage_live_dyn( - &mut self, - local: mir::Local, - meta: MemPlaceMeta, - ) -> InterpResult<'tcx> { - trace!("{:?} is now live", local); - - // We avoid `ty.is_trivially_sized` since that does something expensive for ADTs. - fn is_very_trivially_sized(ty: Ty<'_>) -> bool { - match ty.kind() { - ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) - | ty::Uint(_) - | ty::Int(_) - | ty::Bool - | ty::Float(_) - | ty::FnDef(..) - | ty::FnPtr(_) - | ty::RawPtr(..) - | ty::Char - | ty::Ref(..) - | ty::Coroutine(..) - | ty::CoroutineWitness(..) - | ty::Array(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Never - | ty::Error(_) - | ty::Dynamic(_, _, ty::DynStar) => true, - - ty::Str | ty::Slice(_) | ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => false, - - ty::Tuple(tys) => tys.last().is_none_or(|ty| is_very_trivially_sized(*ty)), - - ty::Pat(ty, ..) => is_very_trivially_sized(*ty), - - // We don't want to do any queries, so there is not much we can do with ADTs. - ty::Adt(..) => false, - - ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false, - - ty::Infer(ty::TyVar(_)) => false, - - ty::Bound(..) - | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { - bug!("`is_very_trivially_sized` applied to unexpected type: {}", ty) - } - } - } - - // This is a hot function, we avoid computing the layout when possible. - // `unsized_` will be `None` for sized types and `Some(layout)` for unsized types. - let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) { - None - } else { - // We need the layout. - let layout = self.layout_of_local(self.frame(), local, None)?; - if layout.is_sized() { None } else { Some(layout) } - }; - - let local_val = LocalValue::Live(if let Some(layout) = unsized_ { - if !meta.has_meta() { - throw_unsup!(UnsizedLocal); - } - // Need to allocate some memory, since `Immediate::Uninit` cannot be unsized. - let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?; - Operand::Indirect(*dest_place.mplace()) - } else { - assert!(!meta.has_meta()); // we're dropping the metadata - // Just make this an efficient immediate. - // Note that not calling `layout_of` here does have one real consequence: - // if the type is too big, we'll only notice this when the local is actually initialized, - // which is a bit too late -- we should ideally notice this already here, when the memory - // is conceptually allocated. But given how rare that error is and that this is a hot function, - // we accept this downside for now. - Operand::Immediate(Immediate::Uninit) - }); - - // If the local is already live, deallocate its old memory. - let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); - self.deallocate_local(old)?; - Ok(()) - } - - /// Mark a storage as live, killing the previous content. - #[inline(always)] - pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> { - self.storage_live_dyn(local, MemPlaceMeta::None) - } - - pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> { - assert!(local != mir::RETURN_PLACE, "Cannot make return place dead"); - trace!("{:?} is now dead", local); - - // If the local is already dead, this is a NOP. - let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead); - self.deallocate_local(old)?; - Ok(()) - } - - #[instrument(skip(self), level = "debug")] - fn deallocate_local(&mut self, local: LocalValue) -> InterpResult<'tcx> { - if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local { - // All locals have a backing allocation, even if the allocation is empty - // due to the local having ZST type. Hence we can `unwrap`. - trace!( - "deallocating local {:?}: {:?}", - local, - // Locals always have a `alloc_id` (they are never the result of a int2ptr). - self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap()) - ); - self.deallocate_ptr(ptr, None, MemoryKind::Stack)?; - }; - Ok(()) - } - /// Call a query that can return `ErrorHandled`. Should be used for statics and other globals. /// (`mir::Const`/`ty::Const` have `eval` methods that can be used directly instead.) pub fn ctfe_query( @@ -1328,39 +617,7 @@ impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for PlacePrinter<'a, 'tcx, M> { } write!(fmt, ":")?; - match self.ecx.frame().locals[local].value { - LocalValue::Dead => write!(fmt, " is dead")?, - LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => { - write!(fmt, " is uninitialized")? - } - LocalValue::Live(Operand::Indirect(mplace)) => { - write!( - fmt, - " by {} ref {:?}:", - match mplace.meta { - MemPlaceMeta::Meta(meta) => format!(" meta({meta:?})"), - MemPlaceMeta::None => String::new(), - }, - mplace.ptr, - )?; - allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id)); - } - LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => { - write!(fmt, " {val:?}")?; - if let Scalar::Ptr(ptr, _size) = val { - allocs.push(ptr.provenance.get_alloc_id()); - } - } - LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => { - write!(fmt, " ({val1:?}, {val2:?})")?; - if let Scalar::Ptr(ptr, _size) = val1 { - allocs.push(ptr.provenance.get_alloc_id()); - } - if let Scalar::Ptr(ptr, _size) = val2 { - allocs.push(ptr.provenance.get_alloc_id()); - } - } - } + self.ecx.frame().locals[local].print(&mut allocs, fmt)?; write!(fmt, ": {:?}", self.ecx.dump_allocs(allocs.into_iter().flatten().collect())) } diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index bdce8253b2e76..7af4e0c285b0b 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -37,7 +37,7 @@ pub enum ReturnAction { /// took care of everything. NoJump, - /// Returned by [`InterpCx::pop_stack_frame`] when no cleanup should be done. + /// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done. NoCleanup, } diff --git a/compiler/rustc_const_eval/src/interpret/mod.rs b/compiler/rustc_const_eval/src/interpret/mod.rs index afa2303e38715..511756e3f86c9 100644 --- a/compiler/rustc_const_eval/src/interpret/mod.rs +++ b/compiler/rustc_const_eval/src/interpret/mod.rs @@ -1,5 +1,6 @@ //! An interpreter for MIR used in CTFE and by miri +mod call; mod cast; mod discriminant; mod eval_context; @@ -11,8 +12,8 @@ mod operand; mod operator; mod place; mod projection; +mod stack; mod step; -mod terminator; mod traits; mod util; mod validity; @@ -22,7 +23,8 @@ use eval_context::{from_known_layout, mir_assign_valid_types}; #[doc(no_inline)] pub use rustc_middle::mir::interpret::*; // have all the `interpret` symbols in one place: here -pub use self::eval_context::{format_interp_error, Frame, FrameInfo, InterpCx, StackPopCleanup}; +pub use self::call::FnArg; +pub use self::eval_context::{format_interp_error, InterpCx}; pub use self::intern::{ intern_const_alloc_for_constprop, intern_const_alloc_recursive, HasStaticRootDefId, InternKind, InternResult, @@ -35,7 +37,7 @@ pub use self::operand::{ImmTy, Immediate, OpTy, Readable}; pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable}; use self::place::{MemPlace, Place}; pub use self::projection::{OffsetMode, Projectable}; -pub use self::terminator::FnArg; +pub use self::stack::{Frame, FrameInfo, LocalState, StackPopCleanup, StackPopInfo}; pub(crate) use self::util::create_static_alloc; pub use self::validity::{CtfeValidationMode, RefTracking}; pub use self::visitor::ValueVisitor; diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index d4559e1e8c64e..4ced009ab39b6 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -184,6 +184,7 @@ impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> { #[inline] pub fn from_scalar(val: Scalar, layout: TyAndLayout<'tcx>) -> Self { debug_assert!(layout.abi.is_scalar(), "`ImmTy::from_scalar` on non-scalar layout"); + debug_assert_eq!(val.size(), layout.size); ImmTy { imm: val.into(), layout } } diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs new file mode 100644 index 0000000000000..b5f55434d5afa --- /dev/null +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -0,0 +1,638 @@ +//! Manages the low-level pushing and popping of stack frames and the (de)allocation of local variables. +//! For hadling of argument passing and return values, see the `call` module. +use std::cell::Cell; +use std::{fmt, mem}; + +use either::{Either, Left, Right}; +use rustc_hir as hir; +use rustc_hir::definitions::DefPathData; +use rustc_index::IndexVec; +use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_middle::{bug, mir}; +use rustc_mir_dataflow::storage::always_storage_live_locals; +use rustc_span::Span; +use tracing::{info_span, instrument, trace}; + +use super::{ + from_known_layout, throw_ub, throw_unsup, AllocId, CtfeProvenance, Immediate, InterpCx, + InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, MemoryKind, Operand, Pointer, + Provenance, ReturnAction, Scalar, +}; +use crate::errors; + +// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread +// boundary and dropped in the other thread, it would exit the span in the other thread. +struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>); + +impl SpanGuard { + /// By default a `SpanGuard` does nothing. + fn new() -> Self { + Self(tracing::Span::none(), std::marker::PhantomData) + } + + /// If a span is entered, we exit the previous span (if any, normally none) and enter the + /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of + /// `Frame` by creating a dummy span to being with and then entering it once the frame has + /// been pushed. + fn enter(&mut self, span: tracing::Span) { + // This executes the destructor on the previous instance of `SpanGuard`, ensuring that + // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we + // can't protect the tracing stack, but that'll just lead to weird logging, no actual + // problems. + *self = Self(span, std::marker::PhantomData); + self.0.with_subscriber(|(id, dispatch)| { + dispatch.enter(id); + }); + } +} + +impl Drop for SpanGuard { + fn drop(&mut self) { + self.0.with_subscriber(|(id, dispatch)| { + dispatch.exit(id); + }); + } +} + +/// A stack frame. +pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> { + //////////////////////////////////////////////////////////////////////////////// + // Function and callsite information + //////////////////////////////////////////////////////////////////////////////// + /// The MIR for the function called on this frame. + pub body: &'tcx mir::Body<'tcx>, + + /// The def_id and args of the current function. + pub instance: ty::Instance<'tcx>, + + /// Extra data for the machine. + pub extra: Extra, + + //////////////////////////////////////////////////////////////////////////////// + // Return place and locals + //////////////////////////////////////////////////////////////////////////////// + /// Work to perform when returning from this function. + pub return_to_block: StackPopCleanup, + + /// The location where the result of the current stack frame should be written to, + /// and its layout in the caller. + pub return_place: MPlaceTy<'tcx, Prov>, + + /// The list of locals for this stack frame, stored in order as + /// `[return_ptr, arguments..., variables..., temporaries...]`. + /// The locals are stored as `Option`s. + /// `None` represents a local that is currently dead, while a live local + /// can either directly contain `Scalar` or refer to some part of an `Allocation`. + /// + /// Do *not* access this directly; always go through the machine hook! + pub locals: IndexVec>, + + /// The span of the `tracing` crate is stored here. + /// When the guard is dropped, the span is exited. This gives us + /// a full stack trace on all tracing statements. + tracing_span: SpanGuard, + + //////////////////////////////////////////////////////////////////////////////// + // Current position within the function + //////////////////////////////////////////////////////////////////////////////// + /// If this is `Right`, we are not currently executing any particular statement in + /// this frame (can happen e.g. during frame initialization, and during unwinding on + /// frames without cleanup code). + /// + /// Needs to be public because ConstProp does unspeakable things to it. + pub loc: Either, +} + +#[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these +pub enum StackPopCleanup { + /// Jump to the next block in the caller, or cause UB if None (that's a function + /// that may never return). Also store layout of return place so + /// we can validate it at that layout. + /// `ret` stores the block we jump to on a normal return, while `unwind` + /// stores the block used for cleanup during unwinding. + Goto { ret: Option, unwind: mir::UnwindAction }, + /// The root frame of the stack: nowhere else to jump to. + /// `cleanup` says whether locals are deallocated. Static computation + /// wants them leaked to intern what they need (and just throw away + /// the entire `ecx` when it is done). + Root { cleanup: bool }, +} + +/// Return type of [`InterpCx::pop_stack_frame_raw`]. +pub struct StackPopInfo<'tcx, Prov: Provenance> { + /// Additional information about the action to be performed when returning from the popped + /// stack frame. + pub return_action: ReturnAction, + + /// [`return_to_block`](Frame::return_to_block) of the popped stack frame. + pub return_to_block: StackPopCleanup, + + /// [`return_place`](Frame::return_place) of the popped stack frame. + pub return_place: MPlaceTy<'tcx, Prov>, +} + +/// State of a local variable including a memoized layout +#[derive(Clone)] +pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> { + value: LocalValue, + /// Don't modify if `Some`, this is only used to prevent computing the layout twice. + /// Avoids computing the layout of locals that are never actually initialized. + layout: Cell>>, +} + +impl std::fmt::Debug for LocalState<'_, Prov> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("LocalState") + .field("value", &self.value) + .field("ty", &self.layout.get().map(|l| l.ty)) + .finish() + } +} + +/// Current value of a local variable +/// +/// This does not store the type of the local; the type is given by `body.local_decls` and can never +/// change, so by not storing here we avoid having to maintain that as an invariant. +#[derive(Copy, Clone, Debug)] // Miri debug-prints these +pub(super) enum LocalValue { + /// This local is not currently alive, and cannot be used at all. + Dead, + /// A normal, live local. + /// Mostly for convenience, we re-use the `Operand` type here. + /// This is an optimization over just always having a pointer here; + /// we can thus avoid doing an allocation when the local just stores + /// immediate values *and* never has its address taken. + Live(Operand), +} + +impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> { + pub fn make_live_uninit(&mut self) { + self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); + } + + /// This is a hack because Miri needs a way to visit all the provenance in a `LocalState` + /// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type + /// private. + pub fn as_mplace_or_imm( + &self, + ) -> Option>, MemPlaceMeta), Immediate>> { + match self.value { + LocalValue::Dead => None, + LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))), + LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)), + } + } + + /// Read the local's value or error if the local is not yet live or not live anymore. + #[inline(always)] + pub(super) fn access(&self) -> InterpResult<'tcx, &Operand> { + match &self.value { + LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? + LocalValue::Live(val) => Ok(val), + } + } + + /// Overwrite the local. If the local can be overwritten in place, return a reference + /// to do so; otherwise return the `MemPlace` to consult instead. + #[inline(always)] + pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand> { + match &mut self.value { + LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? + LocalValue::Live(val) => Ok(val), + } + } +} + +/// What we store about a frame in an interpreter backtrace. +#[derive(Clone, Debug)] +pub struct FrameInfo<'tcx> { + pub instance: ty::Instance<'tcx>, + pub span: Span, +} + +// FIXME: only used by miri, should be removed once translatable. +impl<'tcx> fmt::Display for FrameInfo<'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + ty::tls::with(|tcx| { + if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure { + write!(f, "inside closure") + } else { + // Note: this triggers a `must_produce_diag` state, which means that if we ever + // get here we must emit a diagnostic. We should never display a `FrameInfo` unless + // we actually want to emit a warning or error to the user. + write!(f, "inside `{}`", self.instance) + } + }) + } +} + +impl<'tcx> FrameInfo<'tcx> { + pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote { + let span = self.span; + if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure { + errors::FrameNote { where_: "closure", span, instance: String::new(), times: 0 } + } else { + let instance = format!("{}", self.instance); + // Note: this triggers a `must_produce_diag` state, which means that if we ever get + // here we must emit a diagnostic. We should never display a `FrameInfo` unless we + // actually want to emit a warning or error to the user. + errors::FrameNote { where_: "instance", span, instance, times: 0 } + } + } +} + +impl<'tcx, Prov: Provenance> Frame<'tcx, Prov> { + pub fn with_extra(self, extra: Extra) -> Frame<'tcx, Prov, Extra> { + Frame { + body: self.body, + instance: self.instance, + return_to_block: self.return_to_block, + return_place: self.return_place, + locals: self.locals, + loc: self.loc, + extra, + tracing_span: self.tracing_span, + } + } +} + +impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> { + /// Get the current location within the Frame. + /// + /// If this is `Right`, we are not currently executing any particular statement in + /// this frame (can happen e.g. during frame initialization, and during unwinding on + /// frames without cleanup code). + /// + /// Used by priroda. + pub fn current_loc(&self) -> Either { + self.loc + } + + /// Return the `SourceInfo` of the current instruction. + pub fn current_source_info(&self) -> Option<&mir::SourceInfo> { + self.loc.left().map(|loc| self.body.source_info(loc)) + } + + pub fn current_span(&self) -> Span { + match self.loc { + Left(loc) => self.body.source_info(loc).span, + Right(span) => span, + } + } + + pub fn lint_root(&self, tcx: TyCtxt<'tcx>) -> Option { + // We first try to get a HirId via the current source scope, + // and fall back to `body.source`. + self.current_source_info() + .and_then(|source_info| match &self.body.source_scopes[source_info.scope].local_data { + mir::ClearCrossCrate::Set(data) => Some(data.lint_root), + mir::ClearCrossCrate::Clear => None, + }) + .or_else(|| { + let def_id = self.body.source.def_id().as_local(); + def_id.map(|def_id| tcx.local_def_id_to_hir_id(def_id)) + }) + } + + /// Returns the address of the buffer where the locals are stored. This is used by `Place` as a + /// sanity check to detect bugs where we mix up which stack frame a place refers to. + #[inline(always)] + pub(super) fn locals_addr(&self) -> usize { + self.locals.raw.as_ptr().addr() + } + + #[must_use] + pub fn generate_stacktrace_from_stack(stack: &[Self]) -> Vec> { + let mut frames = Vec::new(); + // This deliberately does *not* honor `requires_caller_location` since it is used for much + // more than just panics. + for frame in stack.iter().rev() { + let span = match frame.loc { + Left(loc) => { + // If the stacktrace passes through MIR-inlined source scopes, add them. + let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc); + let mut scope_data = &frame.body.source_scopes[scope]; + while let Some((instance, call_span)) = scope_data.inlined { + frames.push(FrameInfo { span, instance }); + span = call_span; + scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()]; + } + span + } + Right(span) => span, + }; + frames.push(FrameInfo { span, instance: frame.instance }); + } + trace!("generate stacktrace: {:#?}", frames); + frames + } +} + +impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { + /// Very low-level helper that pushes a stack frame without initializing + /// the arguments or local variables. + #[instrument(skip(self, body, return_place, return_to_block), level = "debug")] + pub(crate) fn push_stack_frame_raw( + &mut self, + instance: ty::Instance<'tcx>, + body: &'tcx mir::Body<'tcx>, + return_place: &MPlaceTy<'tcx, M::Provenance>, + return_to_block: StackPopCleanup, + ) -> InterpResult<'tcx> { + trace!("body: {:#?}", body); + + // We can push a `Root` frame if and only if the stack is empty. + debug_assert_eq!( + self.stack().is_empty(), + matches!(return_to_block, StackPopCleanup::Root { .. }) + ); + + // First push a stack frame so we have access to `instantiate_from_current_frame` and other + // `self.frame()`-based functions. + let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) }; + let locals = IndexVec::from_elem(dead_local, &body.local_decls); + let pre_frame = Frame { + body, + loc: Right(body.span), // Span used for errors caused during preamble. + return_to_block, + return_place: return_place.clone(), + locals, + instance, + tracing_span: SpanGuard::new(), + extra: (), + }; + let frame = M::init_frame(self, pre_frame)?; + self.stack_mut().push(frame); + + // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). + for &const_ in body.required_consts() { + let c = + self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?; + c.eval(*self.tcx, self.param_env, const_.span).map_err(|err| { + err.emit_note(*self.tcx); + err + })?; + } + + // Finish things up. + M::after_stack_push(self)?; + self.frame_mut().loc = Left(mir::Location::START); + let span = info_span!("frame", "{}", instance); + self.frame_mut().tracing_span.enter(span); + + Ok(()) + } + + /// Pops a stack frame from the stack and returns some information about it. + /// + /// This also deallocates locals, if necessary. + /// + /// [`M::before_stack_pop`] should be called before calling this function. + /// [`M::after_stack_pop`] is called by this function automatically. + /// + /// [`M::before_stack_pop`]: Machine::before_stack_pop + /// [`M::after_stack_pop`]: Machine::after_stack_pop + pub(super) fn pop_stack_frame_raw( + &mut self, + unwinding: bool, + ) -> InterpResult<'tcx, StackPopInfo<'tcx, M::Provenance>> { + let cleanup = self.cleanup_current_frame_locals()?; + + let frame = + self.stack_mut().pop().expect("tried to pop a stack frame, but there were none"); + + let return_to_block = frame.return_to_block; + let return_place = frame.return_place.clone(); + + let return_action; + if cleanup { + return_action = M::after_stack_pop(self, frame, unwinding)?; + assert_ne!(return_action, ReturnAction::NoCleanup); + } else { + return_action = ReturnAction::NoCleanup; + }; + + Ok(StackPopInfo { return_action, return_to_block, return_place }) + } + + /// A private helper for [`pop_stack_frame_raw`](InterpCx::pop_stack_frame_raw). + /// Returns `true` if cleanup has been done, `false` otherwise. + fn cleanup_current_frame_locals(&mut self) -> InterpResult<'tcx, bool> { + // Cleanup: deallocate locals. + // Usually we want to clean up (deallocate locals), but in a few rare cases we don't. + // We do this while the frame is still on the stack, so errors point to the callee. + let return_to_block = self.frame().return_to_block; + let cleanup = match return_to_block { + StackPopCleanup::Goto { .. } => true, + StackPopCleanup::Root { cleanup, .. } => cleanup, + }; + + if cleanup { + // We need to take the locals out, since we need to mutate while iterating. + let locals = mem::take(&mut self.frame_mut().locals); + for local in &locals { + self.deallocate_local(local.value)?; + } + } + + Ok(cleanup) + } + + /// In the current stack frame, mark all locals as live that are not arguments and don't have + /// `Storage*` annotations (this includes the return place). + pub(crate) fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> { + self.storage_live(mir::RETURN_PLACE)?; + + let body = self.body(); + let always_live = always_storage_live_locals(body); + for local in body.vars_and_temps_iter() { + if always_live.contains(local) { + self.storage_live(local)?; + } + } + Ok(()) + } + + pub fn storage_live_dyn( + &mut self, + local: mir::Local, + meta: MemPlaceMeta, + ) -> InterpResult<'tcx> { + trace!("{:?} is now live", local); + + // We avoid `ty.is_trivially_sized` since that does something expensive for ADTs. + fn is_very_trivially_sized(ty: Ty<'_>) -> bool { + match ty.kind() { + ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) + | ty::Uint(_) + | ty::Int(_) + | ty::Bool + | ty::Float(_) + | ty::FnDef(..) + | ty::FnPtr(_) + | ty::RawPtr(..) + | ty::Char + | ty::Ref(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Array(..) + | ty::Closure(..) + | ty::CoroutineClosure(..) + | ty::Never + | ty::Error(_) + | ty::Dynamic(_, _, ty::DynStar) => true, + + ty::Str | ty::Slice(_) | ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => false, + + ty::Tuple(tys) => tys.last().is_none_or(|ty| is_very_trivially_sized(*ty)), + + ty::Pat(ty, ..) => is_very_trivially_sized(*ty), + + // We don't want to do any queries, so there is not much we can do with ADTs. + ty::Adt(..) => false, + + ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false, + + ty::Infer(ty::TyVar(_)) => false, + + ty::Bound(..) + | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { + bug!("`is_very_trivially_sized` applied to unexpected type: {}", ty) + } + } + } + + // This is a hot function, we avoid computing the layout when possible. + // `unsized_` will be `None` for sized types and `Some(layout)` for unsized types. + let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) { + None + } else { + // We need the layout. + let layout = self.layout_of_local(self.frame(), local, None)?; + if layout.is_sized() { None } else { Some(layout) } + }; + + let local_val = LocalValue::Live(if let Some(layout) = unsized_ { + if !meta.has_meta() { + throw_unsup!(UnsizedLocal); + } + // Need to allocate some memory, since `Immediate::Uninit` cannot be unsized. + let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?; + Operand::Indirect(*dest_place.mplace()) + } else { + assert!(!meta.has_meta()); // we're dropping the metadata + // Just make this an efficient immediate. + // Note that not calling `layout_of` here does have one real consequence: + // if the type is too big, we'll only notice this when the local is actually initialized, + // which is a bit too late -- we should ideally notice this already here, when the memory + // is conceptually allocated. But given how rare that error is and that this is a hot function, + // we accept this downside for now. + Operand::Immediate(Immediate::Uninit) + }); + + // If the local is already live, deallocate its old memory. + let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); + self.deallocate_local(old)?; + Ok(()) + } + + /// Mark a storage as live, killing the previous content. + #[inline(always)] + pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> { + self.storage_live_dyn(local, MemPlaceMeta::None) + } + + pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> { + assert!(local != mir::RETURN_PLACE, "Cannot make return place dead"); + trace!("{:?} is now dead", local); + + // If the local is already dead, this is a NOP. + let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead); + self.deallocate_local(old)?; + Ok(()) + } + + fn deallocate_local(&mut self, local: LocalValue) -> InterpResult<'tcx> { + if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local { + // All locals have a backing allocation, even if the allocation is empty + // due to the local having ZST type. Hence we can `unwrap`. + trace!( + "deallocating local {:?}: {:?}", + local, + // Locals always have a `alloc_id` (they are never the result of a int2ptr). + self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap()) + ); + self.deallocate_ptr(ptr, None, MemoryKind::Stack)?; + }; + Ok(()) + } + + #[inline(always)] + pub(super) fn layout_of_local( + &self, + frame: &Frame<'tcx, M::Provenance, M::FrameExtra>, + local: mir::Local, + layout: Option>, + ) -> InterpResult<'tcx, TyAndLayout<'tcx>> { + let state = &frame.locals[local]; + if let Some(layout) = state.layout.get() { + return Ok(layout); + } + + let layout = from_known_layout(self.tcx, self.param_env, layout, || { + let local_ty = frame.body.local_decls[local].ty; + let local_ty = + self.instantiate_from_frame_and_normalize_erasing_regions(frame, local_ty)?; + self.layout_of(local_ty) + })?; + + // Layouts of locals are requested a lot, so we cache them. + state.layout.set(Some(layout)); + Ok(layout) + } +} + +impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> { + pub(super) fn print( + &self, + allocs: &mut Vec>, + fmt: &mut std::fmt::Formatter<'_>, + ) -> std::fmt::Result { + match self.value { + LocalValue::Dead => write!(fmt, " is dead")?, + LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => { + write!(fmt, " is uninitialized")? + } + LocalValue::Live(Operand::Indirect(mplace)) => { + write!( + fmt, + " by {} ref {:?}:", + match mplace.meta { + MemPlaceMeta::Meta(meta) => format!(" meta({meta:?})"), + MemPlaceMeta::None => String::new(), + }, + mplace.ptr, + )?; + allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id)); + } + LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => { + write!(fmt, " {val:?}")?; + if let Scalar::Ptr(ptr, _size) = val { + allocs.push(ptr.provenance.get_alloc_id()); + } + } + LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => { + write!(fmt, " ({val1:?}, {val2:?})")?; + if let Scalar::Ptr(ptr, _size) = val1 { + allocs.push(ptr.provenance.get_alloc_id()); + } + if let Scalar::Ptr(ptr, _size) = val2 { + allocs.push(ptr.provenance.get_alloc_id()); + } + } + } + + Ok(()) + } +} diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index 28cf1068f4081..654c10765450f 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -4,15 +4,29 @@ use either::Either; use rustc_index::IndexSlice; -use rustc_middle::{bug, mir}; +use rustc_middle::ty::layout::FnAbiOf; +use rustc_middle::ty::{self, Instance, Ty}; +use rustc_middle::{bug, mir, span_bug}; +use rustc_span::source_map::Spanned; +use rustc_target::abi::call::FnAbi; use rustc_target::abi::{FieldIdx, FIRST_VARIANT}; use tracing::{info, instrument, trace}; use super::{ - ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy, Projectable, Scalar, + throw_ub, FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, + PlaceTy, Projectable, Scalar, }; use crate::util; +struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> { + callee: FnVal<'tcx, M::ExtraFnVal>, + args: Vec>, + fn_sig: ty::FnSig<'tcx>, + fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>, + /// True if the function is marked as `#[track_caller]` ([`ty::InstanceKind::requires_caller_location`]) + with_caller_location: bool, +} + impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Returns `true` as long as there are more things to do. /// @@ -36,7 +50,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { if let Some(stmt) = basic_block.statements.get(loc.statement_index) { let old_frames = self.frame_idx(); - self.statement(stmt)?; + self.eval_statement(stmt)?; // Make sure we are not updating `statement_index` of the wrong frame. assert_eq!(old_frames, self.frame_idx()); // Advance the program counter. @@ -47,7 +61,12 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { M::before_terminator(self)?; let terminator = basic_block.terminator(); - self.terminator(terminator)?; + self.eval_terminator(terminator)?; + if !self.stack().is_empty() { + if let Either::Left(loc) = self.frame().loc { + info!("// executing {:?}", loc.block); + } + } Ok(true) } @@ -55,7 +74,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// statement counter. /// /// This does NOT move the statement counter forward, the caller has to do that! - pub fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> { + pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> { info!("{:?}", stmt); use rustc_middle::mir::StatementKind::*; @@ -349,16 +368,225 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { Ok(()) } - /// Evaluate the given terminator. Will also adjust the stack frame and statement position accordingly. - fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> { + /// Evaluate the arguments of a function call + fn eval_fn_call_arguments( + &self, + ops: &[Spanned>], + ) -> InterpResult<'tcx, Vec>> { + ops.iter() + .map(|op| { + let arg = match &op.node { + mir::Operand::Copy(_) | mir::Operand::Constant(_) => { + // Make a regular copy. + let op = self.eval_operand(&op.node, None)?; + FnArg::Copy(op) + } + mir::Operand::Move(place) => { + // If this place lives in memory, preserve its location. + // We call `place_to_op` which will be an `MPlaceTy` whenever there exists + // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local` + // which can return a local even if that has an mplace.) + let place = self.eval_place(*place)?; + let op = self.place_to_op(&place)?; + + match op.as_mplace_or_imm() { + Either::Left(mplace) => FnArg::InPlace(mplace), + Either::Right(_imm) => { + // This argument doesn't live in memory, so there's no place + // to make inaccessible during the call. + // We rely on there not being any stray `PlaceTy` that would let the + // caller directly access this local! + // This is also crucial for tail calls, where we want the `FnArg` to + // stay valid when the old stack frame gets popped. + FnArg::Copy(op) + } + } + } + }; + + Ok(arg) + }) + .collect() + } + + /// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the + /// necessary information about callee and arguments to make a call. + fn eval_callee_and_args( + &self, + terminator: &mir::Terminator<'tcx>, + func: &mir::Operand<'tcx>, + args: &[Spanned>], + ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> { + let func = self.eval_operand(func, None)?; + let args = self.eval_fn_call_arguments(args)?; + + let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx); + let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, fn_sig_binder); + let extra_args = &args[fn_sig.inputs().len()..]; + let extra_args = + self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty)); + + let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() { + ty::FnPtr(_sig) => { + let fn_ptr = self.read_pointer(&func)?; + let fn_val = self.get_ptr_fn(fn_ptr)?; + (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false) + } + ty::FnDef(def_id, args) => { + let instance = self.resolve(def_id, args)?; + ( + FnVal::Instance(instance), + self.fn_abi_of_instance(instance, extra_args)?, + instance.def.requires_caller_location(*self.tcx), + ) + } + _ => { + span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty) + } + }; + + Ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location }) + } + + fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> { info!("{:?}", terminator.kind); - self.eval_terminator(terminator)?; - if !self.stack().is_empty() { - if let Either::Left(loc) = self.frame().loc { - info!("// executing {:?}", loc.block); + use rustc_middle::mir::TerminatorKind::*; + match terminator.kind { + Return => { + self.return_from_current_stack_frame(/* unwinding */ false)? + } + + Goto { target } => self.go_to_block(target), + + SwitchInt { ref discr, ref targets } => { + let discr = self.read_immediate(&self.eval_operand(discr, None)?)?; + trace!("SwitchInt({:?})", *discr); + + // Branch to the `otherwise` case by default, if no match is found. + let mut target_block = targets.otherwise(); + + for (const_int, target) in targets.iter() { + // Compare using MIR BinOp::Eq, to also support pointer values. + // (Avoiding `self.binary_op` as that does some redundant layout computation.) + let res = self.binary_op( + mir::BinOp::Eq, + &discr, + &ImmTy::from_uint(const_int, discr.layout), + )?; + if res.to_scalar().to_bool()? { + target_block = target; + break; + } + } + + self.go_to_block(target_block); + } + + Call { + ref func, + ref args, + destination, + target, + unwind, + call_source: _, + fn_span: _, + } => { + let old_stack = self.frame_idx(); + let old_loc = self.frame().loc; + + let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = + self.eval_callee_and_args(terminator, func, args)?; + + let destination = self.force_allocation(&self.eval_place(destination)?)?; + self.init_fn_call( + callee, + (fn_sig.abi, fn_abi), + &args, + with_caller_location, + &destination, + target, + if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable }, + )?; + // Sanity-check that `eval_fn_call` either pushed a new frame or + // did a jump to another block. + if self.frame_idx() == old_stack && self.frame().loc == old_loc { + span_bug!(terminator.source_info.span, "evaluating this call made no progress"); + } + } + + TailCall { ref func, ref args, fn_span: _ } => { + let old_frame_idx = self.frame_idx(); + + let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = + self.eval_callee_and_args(terminator, func, args)?; + + self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?; + + if self.frame_idx() != old_frame_idx { + span_bug!( + terminator.source_info.span, + "evaluating this tail call pushed a new stack frame" + ); + } + } + + Drop { place, target, unwind, replace: _ } => { + let place = self.eval_place(place)?; + let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty); + if let ty::InstanceKind::DropGlue(_, None) = instance.def { + // This is the branch we enter if and only if the dropped type has no drop glue + // whatsoever. This can happen as a result of monomorphizing a drop of a + // generic. In order to make sure that generic and non-generic code behaves + // roughly the same (and in keeping with Mir semantics) we do nothing here. + self.go_to_block(target); + return Ok(()); + } + trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty); + self.init_drop_in_place_call(&place, instance, target, unwind)?; + } + + Assert { ref cond, expected, ref msg, target, unwind } => { + let ignored = + M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check(); + let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?; + if ignored || expected == cond_val { + self.go_to_block(target); + } else { + M::assert_panic(self, msg, unwind)?; + } + } + + UnwindTerminate(reason) => { + M::unwind_terminate(self, reason)?; + } + + // When we encounter Resume, we've finished unwinding + // cleanup for the current stack frame. We pop it in order + // to continue unwinding the next frame + UnwindResume => { + trace!("unwinding: resuming from cleanup"); + // By definition, a Resume terminator means + // that we're unwinding + self.return_from_current_stack_frame(/* unwinding */ true)?; + return Ok(()); + } + + // It is UB to ever encounter this. + Unreachable => throw_ub!(Unreachable), + + // These should never occur for MIR we actually run. + FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!( + terminator.source_info.span, + "{:#?} should have been eliminated by MIR pass", + terminator.kind + ), + + InlineAsm { template, ref operands, options, ref targets, .. } => { + M::eval_inline_asm(self, template, operands, options, targets)?; } } + Ok(()) } } diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index a53dd7eac1e9b..afa91df78f982 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -256,7 +256,7 @@ pub struct Thread<'tcx> { /// which then forwards it to 'Resume'. However this argument is implicit in MIR, /// so we have to store it out-of-band. When there are multiple active unwinds, /// the innermost one is always caught first, so we can store them as a stack. - pub(crate) panic_payloads: Vec, + pub(crate) panic_payloads: Vec>, /// Last OS error location in memory. It is a 32-bit integer. pub(crate) last_error: Option>, @@ -952,7 +952,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( instance, start_abi, - &[*func_arg], + &[func_arg], Some(&ret_place), StackPopCleanup::Root { cleanup: true }, )?; diff --git a/src/tools/miri/src/eval.rs b/src/tools/miri/src/eval.rs index 53e877517089c..1173da4697564 100644 --- a/src/tools/miri/src/eval.rs +++ b/src/tools/miri/src/eval.rs @@ -307,7 +307,8 @@ pub fn create_ecx<'tcx>( // First argument is constructed later, because it's skipped if the entry function uses #[start]. // Second argument (argc): length of `config.args`. - let argc = Scalar::from_target_usize(u64::try_from(config.args.len()).unwrap(), &ecx); + let argc = + ImmTy::from_int(i64::try_from(config.args.len()).unwrap(), ecx.machine.layouts.isize); // Third argument (`argv`): created from `config.args`. let argv = { // Put each argument in memory, collect pointers. @@ -334,13 +335,11 @@ pub fn create_ecx<'tcx>( ecx.write_immediate(arg, &place)?; } ecx.mark_immutable(&argvs_place); - // A pointer to that place is the 3rd argument for main. - let argv = argvs_place.to_ref(&ecx); // Store `argc` and `argv` for macOS `_NSGetArg{c,v}`. { let argc_place = ecx.allocate(ecx.machine.layouts.isize, MiriMemoryKind::Machine.into())?; - ecx.write_scalar(argc, &argc_place)?; + ecx.write_immediate(*argc, &argc_place)?; ecx.mark_immutable(&argc_place); ecx.machine.argc = Some(argc_place.ptr()); @@ -348,7 +347,7 @@ pub fn create_ecx<'tcx>( ecx.layout_of(Ty::new_imm_ptr(tcx, tcx.types.unit))?, MiriMemoryKind::Machine.into(), )?; - ecx.write_immediate(argv, &argv_place)?; + ecx.write_pointer(argvs_place.ptr(), &argv_place)?; ecx.mark_immutable(&argv_place); ecx.machine.argv = Some(argv_place.ptr()); } @@ -369,7 +368,7 @@ pub fn create_ecx<'tcx>( } ecx.mark_immutable(&cmd_place); } - argv + ecx.mplace_to_ref(&argvs_place)? }; // Return place (in static memory so that it does not count as leak). @@ -405,10 +404,14 @@ pub fn create_ecx<'tcx>( start_instance, Abi::Rust, &[ - Scalar::from_pointer(main_ptr, &ecx).into(), - argc.into(), + ImmTy::from_scalar( + Scalar::from_pointer(main_ptr, &ecx), + // FIXME use a proper fn ptr type + ecx.machine.layouts.const_raw_ptr, + ), + argc, argv, - Scalar::from_u8(sigpipe).into(), + ImmTy::from_uint(sigpipe, ecx.machine.layouts.u8), ], Some(&ret_place), StackPopCleanup::Root { cleanup: true }, @@ -418,7 +421,7 @@ pub fn create_ecx<'tcx>( ecx.call_function( entry_instance, Abi::Rust, - &[argc.into(), argv], + &[argc, argv], Some(&ret_place), StackPopCleanup::Root { cleanup: true }, )?; diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index 8bc8188f05314..5c89889506f10 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -12,13 +12,14 @@ use rustc_apfloat::Float; use rustc_hir::{ def::{DefKind, Namespace}, def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}, + Safety, }; use rustc_index::IndexVec; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::dependency_format::Linkage; use rustc_middle::middle::exported_symbols::ExportedSymbol; use rustc_middle::mir; -use rustc_middle::ty::layout::MaybeResult; +use rustc_middle::ty::layout::{FnAbiOf, MaybeResult}; use rustc_middle::ty::{ self, layout::{LayoutOf, TyAndLayout}, @@ -492,48 +493,38 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &mut self, f: ty::Instance<'tcx>, caller_abi: Abi, - args: &[Immediate], + args: &[ImmTy<'tcx>], dest: Option<&MPlaceTy<'tcx>>, stack_pop: StackPopCleanup, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let param_env = ty::ParamEnv::reveal_all(); // in Miri this is always the param_env we use... and this.param_env is private. - let callee_abi = f.ty(*this.tcx, param_env).fn_sig(*this.tcx).abi(); - if callee_abi != caller_abi { - throw_ub_format!( - "calling a function with ABI {} using caller ABI {}", - callee_abi.name(), - caller_abi.name() - ) - } - // Push frame. + // Get MIR. let mir = this.load_mir(f.def, None)?; let dest = match dest { Some(dest) => dest.clone(), None => MPlaceTy::fake_alloc_zst(this.layout_of(mir.return_ty())?), }; - this.push_stack_frame(f, mir, &dest, stack_pop)?; - - // Initialize arguments. - let mut callee_args = this.frame().body.args_iter(); - for arg in args { - let local = callee_args - .next() - .ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?; - // Make the local live, and insert the initial value. - this.storage_live(local)?; - let callee_arg = this.local_to_place(local)?; - this.write_immediate(*arg, &callee_arg)?; - } - if callee_args.next().is_some() { - throw_ub_format!("callee has more arguments than expected"); - } - - // Initialize remaining locals. - this.storage_live_for_always_live_locals()?; - Ok(()) + // Construct a function pointer type representing the caller perspective. + let sig = this.tcx.mk_fn_sig( + args.iter().map(|a| a.layout.ty), + dest.layout.ty, + /*c_variadic*/ false, + Safety::Safe, + caller_abi, + ); + let caller_fn_abi = this.fn_abi_of_fn_ptr(ty::Binder::dummy(sig), ty::List::empty())?; + + this.init_stack_frame( + f, + mir, + caller_fn_abi, + &args.iter().map(|a| FnArg::Copy(a.clone().into())).collect::>(), + /*with_caller_location*/ false, + &dest, + stack_pop, + ) } /// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter diff --git a/src/tools/miri/src/shims/panic.rs b/src/tools/miri/src/shims/panic.rs index 306dce5edcd3d..2fb0319a843b2 100644 --- a/src/tools/miri/src/shims/panic.rs +++ b/src/tools/miri/src/shims/panic.rs @@ -25,7 +25,7 @@ pub struct CatchUnwindData<'tcx> { /// The `catch_fn` callback to call in case of a panic. catch_fn: Pointer, /// The `data` argument for that callback. - data: Scalar, + data: ImmTy<'tcx>, /// The return place from the original call to `try`. dest: MPlaceTy<'tcx>, /// The return block from the original call to `try`. @@ -50,7 +50,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { trace!("miri_start_unwind: {:?}", this.frame().instance); - let payload = this.read_scalar(payload)?; + let payload = this.read_immediate(payload)?; let thread = this.active_thread_mut(); thread.panic_payloads.push(payload); @@ -80,7 +80,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Get all the arguments. let [try_fn, data, catch_fn] = check_arg_count(args)?; let try_fn = this.read_pointer(try_fn)?; - let data = this.read_scalar(data)?; + let data = this.read_immediate(data)?; let catch_fn = this.read_pointer(catch_fn)?; // Now we make a function call, and pass `data` as first and only argument. @@ -89,7 +89,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( f_instance, Abi::Rust, - &[data.into()], + &[data.clone()], None, // Directly return to caller. StackPopCleanup::Goto { ret, unwind: mir::UnwindAction::Continue }, @@ -140,7 +140,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( f_instance, Abi::Rust, - &[catch_unwind.data.into(), payload.into()], + &[catch_unwind.data, payload], None, // Directly return to caller of `try`. StackPopCleanup::Goto { @@ -169,7 +169,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( panic, Abi::Rust, - &[msg.to_ref(this)], + &[this.mplace_to_ref(&msg)?], None, StackPopCleanup::Goto { ret: None, unwind }, ) @@ -188,7 +188,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( panic, Abi::Rust, - &[msg.to_ref(this)], + &[this.mplace_to_ref(&msg)?], None, StackPopCleanup::Goto { ret: None, unwind: mir::UnwindAction::Unreachable }, ) @@ -207,9 +207,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Forward to `panic_bounds_check` lang item. // First arg: index. - let index = this.read_scalar(&this.eval_operand(index, None)?)?; + let index = this.read_immediate(&this.eval_operand(index, None)?)?; // Second arg: len. - let len = this.read_scalar(&this.eval_operand(len, None)?)?; + let len = this.read_immediate(&this.eval_operand(len, None)?)?; // Call the lang item. let panic_bounds_check = this.tcx.lang_items().panic_bounds_check_fn().unwrap(); @@ -217,7 +217,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( panic_bounds_check, Abi::Rust, - &[index.into(), len.into()], + &[index, len], None, StackPopCleanup::Goto { ret: None, unwind }, )?; @@ -226,9 +226,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Forward to `panic_misaligned_pointer_dereference` lang item. // First arg: required. - let required = this.read_scalar(&this.eval_operand(required, None)?)?; + let required = this.read_immediate(&this.eval_operand(required, None)?)?; // Second arg: found. - let found = this.read_scalar(&this.eval_operand(found, None)?)?; + let found = this.read_immediate(&this.eval_operand(found, None)?)?; // Call the lang item. let panic_misaligned_pointer_dereference = @@ -238,7 +238,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( panic_misaligned_pointer_dereference, Abi::Rust, - &[required.into(), found.into()], + &[required, found], None, StackPopCleanup::Goto { ret: None, unwind }, )?; diff --git a/src/tools/miri/src/shims/tls.rs b/src/tools/miri/src/shims/tls.rs index 8707446878955..52d83cd72997e 100644 --- a/src/tools/miri/src/shims/tls.rs +++ b/src/tools/miri/src/shims/tls.rs @@ -315,6 +315,8 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // FIXME: Technically, the reason should be `DLL_PROCESS_DETACH` when the main thread exits // but std treats both the same. let reason = this.eval_windows("c", "DLL_THREAD_DETACH"); + let null_ptr = + ImmTy::from_scalar(Scalar::null_ptr(this), this.machine.layouts.const_raw_ptr); // The signature of this function is `unsafe extern "system" fn(h: c::LPVOID, dwReason: c::DWORD, pv: c::LPVOID)`. // FIXME: `h` should be a handle to the current module and what `pv` should be is unknown @@ -322,7 +324,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( thread_callback, Abi::System { unwind: false }, - &[Scalar::null_ptr(this).into(), reason.into(), Scalar::null_ptr(this).into()], + &[null_ptr.clone(), ImmTy::from_scalar(reason, this.machine.layouts.u32), null_ptr], None, StackPopCleanup::Root { cleanup: true }, )?; @@ -343,7 +345,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( instance, Abi::C { unwind: false }, - &[data.into()], + &[ImmTy::from_scalar(data, this.machine.layouts.mut_raw_ptr)], None, StackPopCleanup::Root { cleanup: true }, )?; @@ -380,7 +382,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.call_function( instance, Abi::C { unwind: false }, - &[ptr.into()], + &[ImmTy::from_scalar(ptr, this.machine.layouts.mut_raw_ptr)], None, StackPopCleanup::Root { cleanup: true }, )?; diff --git a/src/tools/miri/src/shims/unix/thread.rs b/src/tools/miri/src/shims/unix/thread.rs index 83bb95c797d97..56e8270aa62ac 100644 --- a/src/tools/miri/src/shims/unix/thread.rs +++ b/src/tools/miri/src/shims/unix/thread.rs @@ -1,5 +1,4 @@ use crate::*; -use rustc_middle::ty::layout::LayoutOf; use rustc_target::spec::abi::Abi; impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} @@ -24,7 +23,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { start_routine, Abi::C { unwind: false }, func_arg, - this.layout_of(this.tcx.types.usize)?, + this.machine.layouts.mut_raw_ptr, )?; Ok(()) diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.rs b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.rs index 96dd99e884428..39b1c3007cb00 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.rs @@ -1,11 +1,12 @@ //@ignore-target-windows: No pthreads on Windows +//~^ERROR: calling a function with more arguments than it expected //! The thread function must have exactly one argument. use std::{mem, ptr}; extern "C" fn thread_start() -> *mut libc::c_void { - panic!() //~ ERROR: callee has fewer arguments than expected + panic!() } fn main() { diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.stderr b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.stderr index ca6a05ac7dd43..aa67420c7538a 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.stderr +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_few_args.stderr @@ -1,14 +1,10 @@ -error: Undefined Behavior: callee has fewer arguments than expected - --> $DIR/libc_pthread_create_too_few_args.rs:LL:CC - | -LL | panic!() - | ^^^^^^^^ callee has fewer arguments than expected +error: Undefined Behavior: calling a function with more arguments than it expected | + = note: calling a function with more arguments than it expected + = note: (no span available) = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information = note: BACKTRACE on thread `unnamed-ID`: - = note: inside `thread_start` at RUSTLIB/core/src/panic.rs:LL:CC - = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.rs b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.rs index d8fbc68d344cd..fc2ab71dff735 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.rs +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.rs @@ -1,11 +1,12 @@ //@ignore-target-windows: No pthreads on Windows +//~^ERROR: calling a function with fewer arguments than it requires //! The thread function must have exactly one argument. use std::{mem, ptr}; extern "C" fn thread_start(_null: *mut libc::c_void, _x: i32) -> *mut libc::c_void { - panic!() //~ ERROR: callee has more arguments than expected + panic!() } fn main() { diff --git a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.stderr b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.stderr index 6ab48a7666673..4de947b1694bf 100644 --- a/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.stderr +++ b/src/tools/miri/tests/fail-dep/concurrency/libc_pthread_create_too_many_args.stderr @@ -1,14 +1,10 @@ -error: Undefined Behavior: callee has more arguments than expected - --> $DIR/libc_pthread_create_too_many_args.rs:LL:CC - | -LL | panic!() - | ^^^^^^^^ callee has more arguments than expected +error: Undefined Behavior: calling a function with fewer arguments than it requires | + = note: calling a function with fewer arguments than it requires + = note: (no span available) = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information = note: BACKTRACE on thread `unnamed-ID`: - = note: inside `thread_start` at RUSTLIB/core/src/panic.rs:LL:CC - = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) error: aborting due to 1 previous error diff --git a/src/tools/miri/tests/fail/function_calls/check_callback_abi.rs b/src/tools/miri/tests/fail/function_calls/check_callback_abi.rs index fd667fbe454c1..6a7a26710d16b 100644 --- a/src/tools/miri/tests/fail/function_calls/check_callback_abi.rs +++ b/src/tools/miri/tests/fail/function_calls/check_callback_abi.rs @@ -9,7 +9,7 @@ fn main() { // Make sure we check the ABI when Miri itself invokes a function // as part of a shim implementation. std::intrinsics::catch_unwind( - //~^ ERROR: calling a function with ABI C using caller ABI Rust + //~^ ERROR: calling a function with calling convention C using calling convention Rust std::mem::transmute::(try_fn), std::ptr::null_mut(), |_, _| unreachable!(), diff --git a/src/tools/miri/tests/fail/function_calls/check_callback_abi.stderr b/src/tools/miri/tests/fail/function_calls/check_callback_abi.stderr index 501f17c86d69c..890fed09e4819 100644 --- a/src/tools/miri/tests/fail/function_calls/check_callback_abi.stderr +++ b/src/tools/miri/tests/fail/function_calls/check_callback_abi.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with ABI C using caller ABI Rust +error: Undefined Behavior: calling a function with calling convention C using calling convention Rust --> $DIR/check_callback_abi.rs:LL:CC | LL | / std::intrinsics::catch_unwind( @@ -7,7 +7,7 @@ LL | | std::mem::transmute::(try_fn), LL | | std::ptr::null_mut(), LL | | |_, _| unreachable!(), LL | | ); - | |_________^ calling a function with ABI C using caller ABI Rust + | |_________^ calling a function with calling convention C using calling convention Rust | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information diff --git a/src/tools/miri/tests/pass/alloc-access-tracking.rs b/src/tools/miri/tests/pass/alloc-access-tracking.rs index a226783155b42..40b8e23a33c47 100644 --- a/src/tools/miri/tests/pass/alloc-access-tracking.rs +++ b/src/tools/miri/tests/pass/alloc-access-tracking.rs @@ -1,7 +1,7 @@ #![feature(start)] #![no_std] -//@compile-flags: -Zmiri-track-alloc-id=20 -Zmiri-track-alloc-accesses -Cpanic=abort -//@normalize-stderr-test: "id 20" -> "id $$ALLOC" +//@compile-flags: -Zmiri-track-alloc-id=21 -Zmiri-track-alloc-accesses -Cpanic=abort +//@normalize-stderr-test: "id 21" -> "id $$ALLOC" //@only-target-linux: alloc IDs differ between OSes (due to extern static allocations) extern "Rust" { From 5783e73f4662595ccb3261c3cb42cdc45eb157e0 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 5 Aug 2024 21:05:51 +0200 Subject: [PATCH 11/21] make some Frame fields more private --- compiler/rustc_const_eval/src/interpret/stack.rs | 16 ++++++++++++---- src/tools/miri/src/concurrency/thread.rs | 4 ---- src/tools/miri/src/helpers.rs | 10 +++++----- src/tools/miri/src/machine.rs | 8 ++++---- src/tools/miri/src/shims/backtrace.rs | 4 ++-- src/tools/miri/src/shims/panic.rs | 4 ++-- 6 files changed, 25 insertions(+), 21 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index b5f55434d5afa..5733e2316fd85 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -61,10 +61,10 @@ pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> { // Function and callsite information //////////////////////////////////////////////////////////////////////////////// /// The MIR for the function called on this frame. - pub body: &'tcx mir::Body<'tcx>, + pub(super) body: &'tcx mir::Body<'tcx>, /// The def_id and args of the current function. - pub instance: ty::Instance<'tcx>, + pub(super) instance: ty::Instance<'tcx>, /// Extra data for the machine. pub extra: Extra, @@ -73,7 +73,7 @@ pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> { // Return place and locals //////////////////////////////////////////////////////////////////////////////// /// Work to perform when returning from this function. - pub return_to_block: StackPopCleanup, + return_to_block: StackPopCleanup, /// The location where the result of the current stack frame should be written to, /// and its layout in the caller. @@ -101,7 +101,7 @@ pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> { /// frames without cleanup code). /// /// Needs to be public because ConstProp does unspeakable things to it. - pub loc: Either, + pub(super) loc: Either, } #[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these @@ -269,6 +269,14 @@ impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> { self.loc } + pub fn body(&self) -> &'tcx mir::Body<'tcx> { + self.body + } + + pub fn instance(&self) -> ty::Instance<'tcx> { + self.instance + } + /// Return the `SourceInfo` of the current instruction. pub fn current_source_info(&self) -> Option<&mir::SourceInfo> { self.loc.left().map(|loc| self.body.source_info(loc)) diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index afa91df78f982..f72591f0c4bdf 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -377,10 +377,6 @@ impl VisitProvenance for Frame<'_, Provenance, FrameExtra<'_>> { return_place, locals, extra, - body: _, - instance: _, - return_to_block: _, - loc: _, // There are some private fields we cannot access; they contain no tags. .. } = self; diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index 5c89889506f10..1bdf9f06dcdb0 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -1105,12 +1105,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Make an attempt to get at the instance of the function this is inlined from. let instance: Option<_> = try { let scope = frame.current_source_info()?.scope; - let inlined_parent = frame.body.source_scopes[scope].inlined_parent_scope?; - let source = &frame.body.source_scopes[inlined_parent]; + let inlined_parent = frame.body().source_scopes[scope].inlined_parent_scope?; + let source = &frame.body().source_scopes[inlined_parent]; source.inlined.expect("inlined_parent_scope points to scope without inline info").0 }; // Fall back to the instance of the function itself. - let instance = instance.unwrap_or(frame.instance); + let instance = instance.unwrap_or(frame.instance()); // Now check the crate it is in. We could try to be clever here and e.g. check if this is // the same crate as `start_fn`, but that would not work for running std tests in Miri, so // we'd need some more hacks anyway. So we just check the name of the crate. If someone @@ -1350,9 +1350,9 @@ impl<'tcx> MiriMachine<'tcx> { /// This is the source of truth for the `is_user_relevant` flag in our `FrameExtra`. pub fn is_user_relevant(&self, frame: &Frame<'tcx, Provenance>) -> bool { - let def_id = frame.instance.def_id(); + let def_id = frame.instance().def_id(); (def_id.is_local() || self.local_crates.contains(&def_id.krate)) - && !frame.instance.def.requires_caller_location(self.tcx) + && !frame.instance().def.requires_caller_location(self.tcx) } } diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index 3e45a3a7e1a12..94598e7d2e3cc 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -1352,7 +1352,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> { // Start recording our event before doing anything else let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() { - let fn_name = frame.instance.to_string(); + let fn_name = frame.instance().to_string(); let entry = ecx.machine.string_cache.entry(fn_name.clone()); let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name)); @@ -1443,7 +1443,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { // tracing-tree can autoamtically annotate scope changes, but it gets very confused by our // concurrency and what it prints is just plain wrong. So we print our own information // instead. (Cc https://github.com/rust-lang/miri/issues/2266) - info!("Leaving {}", ecx.frame().instance); + info!("Leaving {}", ecx.frame().instance()); Ok(()) } @@ -1473,7 +1473,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { // Needs to be done after dropping frame to show up on the right nesting level. // (Cc https://github.com/rust-lang/miri/issues/2266) if !ecx.active_thread_stack().is_empty() { - info!("Continuing in {}", ecx.frame().instance); + info!("Continuing in {}", ecx.frame().instance()); } res } @@ -1486,7 +1486,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else { panic!("after_local_allocated should only be called on fresh allocations"); }; - let local_decl = &ecx.frame().body.local_decls[local]; + let local_decl = &ecx.frame().body().local_decls[local]; let span = local_decl.source_info.span; ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None)); Ok(()) diff --git a/src/tools/miri/src/shims/backtrace.rs b/src/tools/miri/src/shims/backtrace.rs index 42babb4c78dc5..9bb6777a9b047 100644 --- a/src/tools/miri/src/shims/backtrace.rs +++ b/src/tools/miri/src/shims/backtrace.rs @@ -46,8 +46,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let mut data = Vec::new(); for frame in this.active_thread_stack().iter().rev() { // Match behavior of debuginfo (`FunctionCx::adjusted_span_and_dbg_scope`). - let span = hygiene::walk_chain_collapsed(frame.current_span(), frame.body.span); - data.push((frame.instance, span.lo())); + let span = hygiene::walk_chain_collapsed(frame.current_span(), frame.body().span); + data.push((frame.instance(), span.lo())); } let ptrs: Vec<_> = data diff --git a/src/tools/miri/src/shims/panic.rs b/src/tools/miri/src/shims/panic.rs index 2fb0319a843b2..ab705ddccab6e 100644 --- a/src/tools/miri/src/shims/panic.rs +++ b/src/tools/miri/src/shims/panic.rs @@ -48,7 +48,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn handle_miri_start_unwind(&mut self, payload: &OpTy<'tcx>) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - trace!("miri_start_unwind: {:?}", this.frame().instance); + trace!("miri_start_unwind: {:?}", this.frame().instance()); let payload = this.read_immediate(payload)?; let thread = this.active_thread_mut(); @@ -124,7 +124,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and we are unwinding, so we should catch that. trace!( "unwinding: found catch_panic frame during unwinding: {:?}", - this.frame().instance + this.frame().instance() ); // We set the return value of `try` to 1, since there was a panic. From 1c2705c6229c16d68c8769e570b5500a123aebfb Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 6 Aug 2024 13:38:54 +0200 Subject: [PATCH 12/21] various cleanups based on review --- .../rustc_const_eval/src/interpret/call.rs | 31 ++++----- .../rustc_const_eval/src/interpret/stack.rs | 9 ++- .../rustc_const_eval/src/interpret/step.rs | 69 +++++++++---------- 3 files changed, 54 insertions(+), 55 deletions(-) diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index 9d2870a4a3130..2c5147678e8cb 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -418,7 +418,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // The "where they come from" part is easy, we expect the caller to do any special handling // that might be required here (e.g. for untupling). // If `with_caller_location` is set we pretend there is an extra argument (that - // we will not pass). + // we will not pass; our `caller_location` intrinsic implementation walks the stack instead). assert_eq!( args.len() + if with_caller_location { 1 } else { 0 }, caller_fn_abi.args.len(), @@ -502,14 +502,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Don't forget to mark "initially live" locals as live. self.storage_live_for_always_live_locals()?; }; - match res { - Err(err) => { - // Don't show the incomplete stack frame in the error stacktrace. - self.stack_mut().pop(); - Err(err) - } - Ok(()) => Ok(()), - } + res.inspect_err(|_| { + // Don't show the incomplete stack frame in the error stacktrace. + self.stack_mut().pop(); + }) } /// Initiate a call to this function -- pushing the stack frame and initializing the arguments. @@ -907,7 +903,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { .local_to_op(mir::RETURN_PLACE, None) .expect("return place should always be live"); let dest = self.frame().return_place.clone(); - let err = if self.stack().len() == 1 { + let res = if self.stack().len() == 1 { // The initializer of constants and statics will get validated separately // after the constant has been fully evaluated. While we could fall back to the default // code path, that will cause -Zenforce-validity to cycle on static initializers. @@ -924,7 +920,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // We delay actually short-circuiting on this error until *after* the stack frame is // popped, since we want this error to be attributed to the caller, whose type defines // this transmute. - err + res } else { Ok(()) }; @@ -953,14 +949,15 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { // Normal return, figure out where to jump. if unwinding { // Follow the unwind edge. - let unwind = match stack_pop_info.return_to_block { - StackPopCleanup::Goto { unwind, .. } => unwind, + match stack_pop_info.return_to_block { + StackPopCleanup::Goto { unwind, .. } => { + // This must be the very last thing that happens, since it can in fact push a new stack frame. + self.unwind_to_block(unwind) + } StackPopCleanup::Root { .. } => { panic!("encountered StackPopCleanup::Root when unwinding!") } - }; - // This must be the very last thing that happens, since it can in fact push a new stack frame. - self.unwind_to_block(unwind) + } } else { // Follow the normal return edge. match stack_pop_info.return_to_block { @@ -968,7 +965,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { StackPopCleanup::Root { .. } => { assert!( self.stack().is_empty(), - "only the topmost frame can have StackPopCleanup::Root" + "only the bottommost frame can have StackPopCleanup::Root" ); Ok(()) } diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 5733e2316fd85..50dbced6a2a6a 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -264,7 +264,7 @@ impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> { /// this frame (can happen e.g. during frame initialization, and during unwinding on /// frames without cleanup code). /// - /// Used by priroda. + /// Used by [priroda](https://github.com/oli-obk/priroda). pub fn current_loc(&self) -> Either { self.loc } @@ -340,6 +340,8 @@ impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> { impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { /// Very low-level helper that pushes a stack frame without initializing /// the arguments or local variables. + /// + /// The high-level version of this is `init_stack_frame`. #[instrument(skip(self, body, return_place, return_to_block), level = "debug")] pub(crate) fn push_stack_frame_raw( &mut self, @@ -392,13 +394,16 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { Ok(()) } - /// Pops a stack frame from the stack and returns some information about it. + /// Low-level helper that pops a stack frame from the stack and returns some information about + /// it. /// /// This also deallocates locals, if necessary. /// /// [`M::before_stack_pop`] should be called before calling this function. /// [`M::after_stack_pop`] is called by this function automatically. /// + /// The high-level version of this is `return_from_current_stack_frame`. + /// /// [`M::before_stack_pop`]: Machine::before_stack_pop /// [`M::after_stack_pop`]: Machine::after_stack_pop pub(super) fn pop_stack_frame_raw( diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index 654c10765450f..2527eca344620 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -369,44 +369,38 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } /// Evaluate the arguments of a function call - fn eval_fn_call_arguments( + fn eval_fn_call_argument( &self, - ops: &[Spanned>], - ) -> InterpResult<'tcx, Vec>> { - ops.iter() - .map(|op| { - let arg = match &op.node { - mir::Operand::Copy(_) | mir::Operand::Constant(_) => { - // Make a regular copy. - let op = self.eval_operand(&op.node, None)?; + op: &mir::Operand<'tcx>, + ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> { + Ok(match op { + mir::Operand::Copy(_) | mir::Operand::Constant(_) => { + // Make a regular copy. + let op = self.eval_operand(op, None)?; + FnArg::Copy(op) + } + mir::Operand::Move(place) => { + // If this place lives in memory, preserve its location. + // We call `place_to_op` which will be an `MPlaceTy` whenever there exists + // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local` + // which can return a local even if that has an mplace.) + let place = self.eval_place(*place)?; + let op = self.place_to_op(&place)?; + + match op.as_mplace_or_imm() { + Either::Left(mplace) => FnArg::InPlace(mplace), + Either::Right(_imm) => { + // This argument doesn't live in memory, so there's no place + // to make inaccessible during the call. + // We rely on there not being any stray `PlaceTy` that would let the + // caller directly access this local! + // This is also crucial for tail calls, where we want the `FnArg` to + // stay valid when the old stack frame gets popped. FnArg::Copy(op) } - mir::Operand::Move(place) => { - // If this place lives in memory, preserve its location. - // We call `place_to_op` which will be an `MPlaceTy` whenever there exists - // an mplace for this place. (This is in contrast to `PlaceTy::as_mplace_or_local` - // which can return a local even if that has an mplace.) - let place = self.eval_place(*place)?; - let op = self.place_to_op(&place)?; - - match op.as_mplace_or_imm() { - Either::Left(mplace) => FnArg::InPlace(mplace), - Either::Right(_imm) => { - // This argument doesn't live in memory, so there's no place - // to make inaccessible during the call. - // We rely on there not being any stray `PlaceTy` that would let the - // caller directly access this local! - // This is also crucial for tail calls, where we want the `FnArg` to - // stay valid when the old stack frame gets popped. - FnArg::Copy(op) - } - } - } - }; - - Ok(arg) - }) - .collect() + } + } + }) } /// Shared part of `Call` and `TailCall` implementation — finding and evaluating all the @@ -418,7 +412,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { args: &[Spanned>], ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> { let func = self.eval_operand(func, None)?; - let args = self.eval_fn_call_arguments(args)?; + let args = args + .iter() + .map(|arg| self.eval_fn_call_argument(&arg.node)) + .collect::>>()?; let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx); let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, fn_sig_binder); From db6c05f9b93ffa2375773d8a5e69ca59b22d3c85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Thu, 9 May 2024 20:55:32 +0000 Subject: [PATCH 13/21] Maintain highlighting in `note` and `help` even when they have a span --- compiler/rustc_errors/src/diagnostic.rs | 21 +++++++++++++++++++++ compiler/rustc_errors/src/emitter.rs | 7 +++++-- 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs index e1dcbf5b2805b..67ca6d50cca4b 100644 --- a/compiler/rustc_errors/src/diagnostic.rs +++ b/compiler/rustc_errors/src/diagnostic.rs @@ -741,6 +741,16 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { self } + #[rustc_lint_diagnostics] + pub fn highlighted_span_note( + &mut self, + span: impl Into, + msg: Vec, + ) -> &mut Self { + self.sub_with_highlights(Level::Note, msg, span.into()); + self + } + /// This is like [`Diag::note()`], but it's only printed once. #[rustc_lint_diagnostics] pub fn note_once(&mut self, msg: impl Into) -> &mut Self { @@ -815,6 +825,17 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> { self } + /// Add a help message attached to this diagnostic with a customizable highlighted message. + #[rustc_lint_diagnostics] + pub fn highlighted_span_help( + &mut self, + span: impl Into, + msg: Vec, + ) -> &mut Self { + self.sub_with_highlights(Level::Help, msg, span.into()); + self + } + /// Prints the span with some help above it. /// This is like [`Diag::help()`], but it gets its own span. #[rustc_lint_diagnostics] diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index 483b757f20c18..8df3b685829df 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -1347,7 +1347,7 @@ impl HumanEmitter { label_width += 2; } let mut line = 0; - for (text, _) in msgs.iter() { + for (text, style) in msgs.iter() { let text = self.translate_message(text, args).map_err(Report::new).unwrap(); // Account for newlines to align output to its label. for text in normalize_whitespace(&text).lines() { @@ -1358,7 +1358,10 @@ impl HumanEmitter { if line == 0 { String::new() } else { " ".repeat(label_width) }, text ), - header_style, + match style { + Style::Highlight => *style, + _ => header_style, + }, ); line += 1; } From 4868680ee9156e16a71c3bf197c19bdc59871001 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Thu, 9 May 2024 21:05:05 +0000 Subject: [PATCH 14/21] On trait bound mismatch, detect multiple crate versions in dep tree When encountering an E0277, if the type and the trait both come from a crate with the same name but different crate number, we explain that there are multiple crate versions in the dependency tree. If there's a type that fulfills the bound, and it has the same name as the passed in type and has the same crate name, we explain that the same type in two different versions of the same crate *are different*. ``` error[E0277]: the trait bound `Type: dependency::Trait` is not satisfied --> src/main.rs:4:18 | 4 | do_something(Type); | ------------ ^^^^ the trait `dependency::Trait` is not implemented for `Type` | | | required by a bound introduced by this call | help: you have multiple different versions of crate `dependency` in your dependency graph --> src/main.rs:1:5 | 1 | use bar::do_something; | ^^^ one version of crate `dependency` is used here, as a dependency of crate `bar` 2 | use dependency::Type; | ^^^^^^^^^^ one version of crate `dependency` is used here, as a direct dependency of the current crate note: two types coming from two different versions of the same crate are different types even if they look the same --> /home/gh-estebank/crate_versions/baz-2/src/lib.rs:1:1 | 1 | pub struct Type; | ^^^^^^^^^^^^^^^ this type doesn't implement the required trait | ::: /home/gh-estebank/crate_versions/baz/src/lib.rs:1:1 | 1 | pub struct Type; | ^^^^^^^^^^^^^^^ this type implements the required trait 2 | pub trait Trait {} | --------------- this is the required trait note: required by a bound in `bar::do_something` --> /home/gh-estebank/crate_versions/baz/src/lib.rs:4:24 | 4 | pub fn do_something(_: X) {} | ^^^^^ required by this bound in `do_something` ``` Address #22750. --- .../traits/fulfillment_errors.rs | 163 +++++++++++++----- 1 file changed, 124 insertions(+), 39 deletions(-) diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 2ce1b955af521..03b93f1c139b5 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -1624,9 +1624,130 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { other: bool, param_env: ty::ParamEnv<'tcx>, ) -> bool { - // If we have a single implementation, try to unify it with the trait ref - // that failed. This should uncover a better hint for what *is* implemented. + let alternative_candidates = |def_id: DefId| { + let mut impl_candidates: Vec<_> = self + .tcx + .all_impls(def_id) + // ignore `do_not_recommend` items + .filter(|def_id| { + !self + .tcx + .has_attrs_with_path(*def_id, &[sym::diagnostic, sym::do_not_recommend]) + }) + // Ignore automatically derived impls and `!Trait` impls. + .filter_map(|def_id| self.tcx.impl_trait_header(def_id)) + .filter_map(|header| { + (header.polarity != ty::ImplPolarity::Negative + || self.tcx.is_automatically_derived(def_id)) + .then(|| header.trait_ref.instantiate_identity()) + }) + .filter(|trait_ref| { + let self_ty = trait_ref.self_ty(); + // Avoid mentioning type parameters. + if let ty::Param(_) = self_ty.kind() { + false + } + // Avoid mentioning types that are private to another crate + else if let ty::Adt(def, _) = self_ty.peel_refs().kind() { + // FIXME(compiler-errors): This could be generalized, both to + // be more granular, and probably look past other `#[fundamental]` + // types, too. + self.tcx.visibility(def.did()).is_accessible_from(body_def_id, self.tcx) + } else { + true + } + }) + .collect(); + + impl_candidates.sort_by_key(|tr| tr.to_string()); + impl_candidates.dedup(); + impl_candidates + }; + + // We'll check for the case where the reason for the mismatch is that the trait comes from + // one crate version and the type comes from another crate version, even though they both + // are from the same crate. + let trait_def_id = trait_ref.def_id(); + if let ty::Adt(def, _) = trait_ref.self_ty().skip_binder().peel_refs().kind() + && let found_type = def.did() + && trait_def_id.krate != found_type.krate + && self.tcx.crate_name(trait_def_id.krate) == self.tcx.crate_name(found_type.krate) + { + let name = self.tcx.crate_name(trait_def_id.krate); + let spans: Vec<_> = [trait_def_id, found_type] + .into_iter() + .filter_map(|def_id| self.tcx.extern_crate(def_id)) + .map(|data| { + let dependency = if data.dependency_of == LOCAL_CRATE { + "direct dependency of the current crate".to_string() + } else { + let dep = self.tcx.crate_name(data.dependency_of); + format!("dependency of crate `{dep}`") + }; + ( + data.span, + format!("one version of crate `{name}` is used here, as a {dependency}"), + ) + }) + .collect(); + let mut span: MultiSpan = spans.iter().map(|(sp, _)| *sp).collect::>().into(); + for (sp, label) in spans.into_iter() { + span.push_span_label(sp, label); + } + err.highlighted_span_help( + span, + vec![ + StringPart::normal("you have ".to_string()), + StringPart::highlighted("multiple different versions".to_string()), + StringPart::normal(" of crate `".to_string()), + StringPart::highlighted(format!("{name}")), + StringPart::normal("` in your dependency graph".to_string()), + ], + ); + let candidates = if impl_candidates.is_empty() { + alternative_candidates(trait_def_id) + } else { + impl_candidates.into_iter().map(|cand| cand.trait_ref).collect() + }; + if let Some((sp_candidate, sp_found)) = candidates.iter().find_map(|trait_ref| { + if let ty::Adt(def, _) = trait_ref.self_ty().peel_refs().kind() + && let candidate_def_id = def.did() + && let Some(name) = self.tcx.opt_item_name(candidate_def_id) + && let Some(found) = self.tcx.opt_item_name(found_type) + && name == found + && candidate_def_id.krate != found_type.krate + && self.tcx.crate_name(candidate_def_id.krate) + == self.tcx.crate_name(found_type.krate) + { + // A candidate was found of an item with the same name, from two separate + // versions of the same crate, let's clarify. + Some((self.tcx.def_span(candidate_def_id), self.tcx.def_span(found_type))) + } else { + None + } + }) { + let mut span: MultiSpan = vec![sp_candidate, sp_found].into(); + span.push_span_label(self.tcx.def_span(trait_def_id), "this is the required trait"); + span.push_span_label(sp_candidate, "this type implements the required trait"); + span.push_span_label(sp_found, "this type doesn't implement the required trait"); + err.highlighted_span_note( + span, + vec![ + StringPart::normal( + "two types coming from two different versions of the same crate are \ + different types " + .to_string(), + ), + StringPart::highlighted("even if they look the same".to_string()), + ], + ); + } + return true; + } + if let [single] = &impl_candidates { + // If we have a single implementation, try to unify it with the trait ref + // that failed. This should uncover a better hint for what *is* implemented. if self.probe(|_| { let ocx = ObligationCtxt::new(self); @@ -1798,43 +1919,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Mentioning implementers of `Copy`, `Debug` and friends is not useful. return false; } - let mut impl_candidates: Vec<_> = self - .tcx - .all_impls(def_id) - // ignore `do_not_recommend` items - .filter(|def_id| { - !self - .tcx - .has_attrs_with_path(*def_id, &[sym::diagnostic, sym::do_not_recommend]) - }) - // Ignore automatically derived impls and `!Trait` impls. - .filter_map(|def_id| self.tcx.impl_trait_header(def_id)) - .filter_map(|header| { - (header.polarity != ty::ImplPolarity::Negative - || self.tcx.is_automatically_derived(def_id)) - .then(|| header.trait_ref.instantiate_identity()) - }) - .filter(|trait_ref| { - let self_ty = trait_ref.self_ty(); - // Avoid mentioning type parameters. - if let ty::Param(_) = self_ty.kind() { - false - } - // Avoid mentioning types that are private to another crate - else if let ty::Adt(def, _) = self_ty.peel_refs().kind() { - // FIXME(compiler-errors): This could be generalized, both to - // be more granular, and probably look past other `#[fundamental]` - // types, too. - self.tcx.visibility(def.did()).is_accessible_from(body_def_id, self.tcx) - } else { - true - } - }) - .collect(); - - impl_candidates.sort_by_key(|tr| tr.to_string()); - impl_candidates.dedup(); - return report(impl_candidates, err); + return report(alternative_candidates(def_id), err); } // Sort impl candidates so that ordering is consistent for UI tests. From ba32673215847a27d4cab381f161a43a59002ee8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Mon, 13 May 2024 04:19:18 +0000 Subject: [PATCH 15/21] Add test for mixing types from two incompatible crate versions --- .../crate-loading/auxiliary/dep-2-reexport.rs | 4 + .../auxiliary/multiple-dep-versions-1.rs | 7 ++ .../auxiliary/multiple-dep-versions-2.rs | 7 ++ .../ui/crate-loading/multiple-dep-versions.rs | 14 +++ .../crate-loading/multiple-dep-versions.svg | 101 ++++++++++++++++++ 5 files changed, 133 insertions(+) create mode 100644 tests/ui/crate-loading/auxiliary/dep-2-reexport.rs create mode 100644 tests/ui/crate-loading/auxiliary/multiple-dep-versions-1.rs create mode 100644 tests/ui/crate-loading/auxiliary/multiple-dep-versions-2.rs create mode 100644 tests/ui/crate-loading/multiple-dep-versions.rs create mode 100644 tests/ui/crate-loading/multiple-dep-versions.svg diff --git a/tests/ui/crate-loading/auxiliary/dep-2-reexport.rs b/tests/ui/crate-loading/auxiliary/dep-2-reexport.rs new file mode 100644 index 0000000000000..3a793bbbb106e --- /dev/null +++ b/tests/ui/crate-loading/auxiliary/dep-2-reexport.rs @@ -0,0 +1,4 @@ +//@ edition:2021 +//@ aux-build:multiple-dep-versions-2.rs +extern crate dependency; +pub use dependency::do_something; diff --git a/tests/ui/crate-loading/auxiliary/multiple-dep-versions-1.rs b/tests/ui/crate-loading/auxiliary/multiple-dep-versions-1.rs new file mode 100644 index 0000000000000..b96bb6d9e6443 --- /dev/null +++ b/tests/ui/crate-loading/auxiliary/multiple-dep-versions-1.rs @@ -0,0 +1,7 @@ +#![crate_name="dependency"] +//@ edition:2021 +//@ compile-flags: -C metadata=1 -C extra-filename=-1 +pub struct Type; +pub trait Trait {} +impl Trait for Type {} +pub fn do_something(_: X) { } diff --git a/tests/ui/crate-loading/auxiliary/multiple-dep-versions-2.rs b/tests/ui/crate-loading/auxiliary/multiple-dep-versions-2.rs new file mode 100644 index 0000000000000..c79157fa4634f --- /dev/null +++ b/tests/ui/crate-loading/auxiliary/multiple-dep-versions-2.rs @@ -0,0 +1,7 @@ +#![crate_name="dependency"] +//@ edition:2021 +//@ compile-flags: -C metadata=2 -C extra-filename=-2 +pub struct Type(pub i32); +pub trait Trait {} +impl Trait for Type {} +pub fn do_something(_: X) {} diff --git a/tests/ui/crate-loading/multiple-dep-versions.rs b/tests/ui/crate-loading/multiple-dep-versions.rs new file mode 100644 index 0000000000000..9e0a5dc5c5dc9 --- /dev/null +++ b/tests/ui/crate-loading/multiple-dep-versions.rs @@ -0,0 +1,14 @@ +//@ aux-build:dep-2-reexport.rs +//@ aux-build:multiple-dep-versions-1.rs +//@ edition:2021 +//@ compile-flags: --error-format=human --color=always --crate-type bin --extern dependency={{build-base}}/crate-loading/multiple-dep-versions/auxiliary/libdependency-1.so --extern dep_2_reexport={{build-base}}/crate-loading/multiple-dep-versions/auxiliary/libdep_2_reexport.so +//@ ignore-windows + +extern crate dependency; +extern crate dep_2_reexport; +use dependency::Type; +use dep_2_reexport::do_something; + +fn main() { + do_something(Type); +} diff --git a/tests/ui/crate-loading/multiple-dep-versions.svg b/tests/ui/crate-loading/multiple-dep-versions.svg new file mode 100644 index 0000000000000..6b89ca691e0bc --- /dev/null +++ b/tests/ui/crate-loading/multiple-dep-versions.svg @@ -0,0 +1,101 @@ + + + + + + + error[E0277]: the trait bound `Type: dependency::Trait` is not satisfied + + --> $DIR/multiple-dep-versions.rs:13:18 + + | + + LL | do_something(Type); + + | ------------ ^^^^ the trait `dependency::Trait` is not implemented for `Type` + + | | + + | required by a bound introduced by this call + + | + + help: you have multiple different versions of crate `dependency` in your dependency graph + + --> $DIR/multiple-dep-versions.rs:7:1 + + | + + LL | extern crate dependency; + + | ^^^^^^^^^^^^^^^^^^^^^^^^ one version of crate `dependency` is used here, as a direct dependency of the current crate + + LL | extern crate dep_2_reexport; + + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ one version of crate `dependency` is used here, as a dependency of crate `dep_2_reexport` + + note: two types coming from two different versions of the same crate are different types even if they look the same + + --> $DIR/auxiliary/multiple-dep-versions-1.rs:4:1 + + | + + LL | pub struct Type; + + | ^^^^^^^^^^^^^^^ this type doesn't implement the required trait + + | + + ::: $DIR/auxiliary/multiple-dep-versions-2.rs:4:1 + + | + + LL | pub struct Type(pub i32); + + | ^^^^^^^^^^^^^^^ this type implements the required trait + + LL | pub trait Trait {} + + | --------------- this is the required trait + + note: required by a bound in `dep_2_reexport::do_something` + + --> $DIR/auxiliary/multiple-dep-versions-2.rs:7:24 + + | + + LL | pub fn do_something<X: Trait>(_: X) {} + + | ^^^^^ required by this bound in `do_something` + + + + error: aborting due to 1 previous error + + + + For more information about this error, try `rustc --explain E0277`. + + + + + + From d8b07718f4a32820d4d3c46a3c82ca0af74cfed3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Mon, 13 May 2024 04:30:14 +0000 Subject: [PATCH 16/21] Add `help` about using `cargo tree` --- .../traits/fulfillment_errors.rs | 1 + .../crate-loading/multiple-dep-versions.svg | 24 ++++++++++--------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 03b93f1c139b5..5a25d9a7ecdac 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -1742,6 +1742,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ], ); } + err.help("you can use `cargo tree` to explore your dependency tree"); return true; } diff --git a/tests/ui/crate-loading/multiple-dep-versions.svg b/tests/ui/crate-loading/multiple-dep-versions.svg index 6b89ca691e0bc..671f38d3eb013 100644 --- a/tests/ui/crate-loading/multiple-dep-versions.svg +++ b/tests/ui/crate-loading/multiple-dep-versions.svg @@ -1,4 +1,4 @@ - +