diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs index 356c03ee3c189..7de39cdb08590 100644 --- a/compiler/rustc_codegen_gcc/src/consts.rs +++ b/compiler/rustc_codegen_gcc/src/consts.rs @@ -32,7 +32,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { } impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> { - fn static_addr_of(&self, cv: RValue<'gcc>, align: Align, kind: Option<&str>) -> RValue<'gcc> { + fn static_addr_of(&self, cv: RValue<'gcc>, align: Align, needs_name: Option<(DefId, usize)>) -> RValue<'gcc> { // TODO(antoyo): implement a proper rvalue comparison in libgccjit instead of doing the // following: for (value, variable) in &*self.const_globals.borrow() { @@ -46,7 +46,7 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> { return *variable; } } - let global_value = self.static_addr_of_mut(cv, align, kind); + let global_value = self.static_addr_of_mut(cv, align, needs_name); #[cfg(feature = "master")] self.global_lvalues.borrow().get(&global_value) .expect("`static_addr_of_mut` did not add the global to `self.global_lvalues`") @@ -165,10 +165,11 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> { } impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> { - pub fn static_addr_of_mut(&self, cv: RValue<'gcc>, align: Align, kind: Option<&str>) -> RValue<'gcc> { + pub fn static_addr_of_mut(&self, cv: RValue<'gcc>, align: Align, needs_name: Option<(DefId, usize)>) -> RValue<'gcc> { let global = match kind { - Some(kind) if !self.tcx.sess.fewer_names() => { + Some((item, idx)) => { + let kind = needs_name.map(|(item, idx)|) let name = self.generate_local_symbol_name(kind); // TODO(antoyo): check if it's okay that no link_section is set. diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index 13e437cfbf7fb..a1a0eb7b24e03 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -12,7 +12,7 @@ use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::traits::*; use rustc_hir::def_id::DefId; use rustc_middle::bug; -use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar}; +use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar, StaticAllocation}; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::TyCtxt; use rustc_session::cstore::{DllCallingConvention, DllImport, PeImportNameType}; @@ -248,12 +248,17 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { GlobalAlloc::Memory(alloc) => { let init = const_alloc_to_llvm(self, alloc); let alloc = alloc.inner(); + let name = alloc.extra.map(|StaticAllocation { item, local_index }| { + let item_name = self.get_static_name(item); + format!("{item_name}[{local_index}]") + }); + let name = name.as_deref(); let value = match alloc.mutability { - Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), - _ => self.static_addr_of(init, alloc.align, None), + Mutability::Mut => self.static_addr_of_mut(init, alloc.align, name), + _ => self.static_addr_of(init, alloc.align, name), }; if !self.sess().fewer_names() { - llvm::set_value_name(value, format!("{:?}", alloc_id).as_bytes()); + self.set_value_name(value, &format!("{:?}", alloc_id)); } (value, AddressSpace::DATA) } diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs index a559f7f3d5703..55681c2b95abf 100644 --- a/compiler/rustc_codegen_llvm/src/consts.rs +++ b/compiler/rustc_codegen_llvm/src/consts.rs @@ -229,7 +229,7 @@ impl<'ll> CodegenCx<'ll, '_> { ) -> &'ll Value { unsafe { let gv = match kind { - Some(kind) if !self.tcx.sess.fewer_names() => { + Some(kind) => { let name = self.generate_local_symbol_name(kind); let gv = self.define_global(&name, self.val_ty(cv)).unwrap_or_else(|| { bug!("symbol `{}` is already defined", name); @@ -246,6 +246,11 @@ impl<'ll> CodegenCx<'ll, '_> { } } + pub(crate) fn get_static_name(&self, def_id: DefId) -> &str { + let instance = Instance::mono(self.tcx, def_id); + self.tcx.symbol_name(instance).name + } + pub(crate) fn get_static(&self, def_id: DefId) -> &'ll Value { let instance = Instance::mono(self.tcx, def_id); if let Some(&g) = self.instances.borrow().get(&instance) { @@ -262,7 +267,7 @@ impl<'ll> CodegenCx<'ll, '_> { ); let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all()); - let sym = self.tcx.symbol_name(instance).name; + let sym = self.get_static_name(def_id); let fn_attrs = self.tcx.codegen_fn_attrs(def_id); debug!("get_static: sym={} instance={:?} fn_attrs={:?}", sym, instance, fn_attrs); @@ -351,6 +356,10 @@ impl<'ll> CodegenCx<'ll, '_> { } impl<'ll> StaticMethods for CodegenCx<'ll, '_> { + fn set_value_name(&self, cv: Self::Value, name: &str) { + crate::llvm::set_value_name(cv, name.as_bytes()) + } + fn static_addr_of(&self, cv: &'ll Value, align: Align, kind: Option<&str>) -> &'ll Value { if let Some(&gv) = self.const_globals.borrow().get(&cv) { unsafe { diff --git a/compiler/rustc_codegen_ssa/src/meth.rs b/compiler/rustc_codegen_ssa/src/meth.rs index f8e982b775189..01c038b957936 100644 --- a/compiler/rustc_codegen_ssa/src/meth.rs +++ b/compiler/rustc_codegen_ssa/src/meth.rs @@ -104,7 +104,11 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>( let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory(); let vtable_const = cx.const_data_from_alloc(vtable_allocation); let align = cx.data_layout().pointer_align.abi; - let vtable = cx.static_addr_of(vtable_const, align, Some("vtable")); + let vtable = cx.static_addr_of(vtable_const, align, None); + + if !cx.sess().fewer_names() { + cx.set_value_name(vtable, "vtable"); + } cx.create_vtable_debuginfo(ty, trait_ref, vtable); cx.vtables().borrow_mut().insert((ty, trait_ref), vtable); diff --git a/compiler/rustc_codegen_ssa/src/traits/statics.rs b/compiler/rustc_codegen_ssa/src/traits/statics.rs index 413d31bb94293..351c4c84138b0 100644 --- a/compiler/rustc_codegen_ssa/src/traits/statics.rs +++ b/compiler/rustc_codegen_ssa/src/traits/statics.rs @@ -4,6 +4,7 @@ use rustc_target::abi::Align; pub trait StaticMethods: BackendTypes { fn static_addr_of(&self, cv: Self::Value, align: Align, kind: Option<&str>) -> Self::Value; + fn set_value_name(&self, cv: Self::Value, name: &str); fn codegen_static(&self, def_id: DefId, is_mutable: bool); /// Mark the given global value as "used", to prevent the compiler and linker from potentially diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index a2f14e753aebe..a5bdfc620b8c7 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -70,7 +70,7 @@ fn eval_body_using_ecx<'mir, 'tcx>( InternKind::Promoted } else { match tcx.static_mutability(cid.instance.def_id()) { - Some(m) => InternKind::Static(m), + Some(m) => InternKind::Static(m, cid.instance.def_id()), None => InternKind::Constant, } }; diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index 24dbc769529c3..f4d34d4389c4b 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -15,10 +15,11 @@ //! that contains allocations whose mutability we cannot identify.) use super::validity::RefTracking; +use hir::def_id::DefId; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; -use rustc_middle::mir::interpret::InterpResult; +use rustc_middle::mir::interpret::{InterpResult, StaticAllocation}; use rustc_middle::ty::{self, layout::TyAndLayout, Ty}; use rustc_ast::Mutability; @@ -36,7 +37,7 @@ pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine< Provenance = AllocId, ExtraFnVal = !, FrameExtra = (), - AllocExtra = (), + AllocExtra = Option, MemoryMap = FxHashMap, Allocation)>, >; @@ -48,6 +49,9 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_ev /// A list of all encountered allocations. After type-based interning, we traverse this list to /// also intern allocations that are only referenced by a raw pointer or inside a union. leftover_allocations: &'rt mut FxHashSet, + /// A counter to generate ids for allocations. These are used to uniquely identify an allocation + /// that is part of a static item. + running_item_local_index: &'rt mut usize, /// The root kind of the value that we're looking at. This field is never mutated for a /// particular allocation. It is primarily used to make as many allocations as possible /// read-only so LLVM can place them in const memory. @@ -62,7 +66,7 @@ enum InternMode { /// A static and its current mutability. Below shared references inside a `static mut`, /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this /// is *mutable*. - Static(hir::Mutability), + Static(hir::Mutability, DefId), /// A `const`. Const, } @@ -80,6 +84,7 @@ struct IsStaticOrFn; fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>( ecx: &'rt mut InterpCx<'mir, 'tcx, M>, leftover_allocations: &'rt mut FxHashSet, + running_item_local_index: &mut usize, alloc_id: AllocId, mode: InternMode, ty: Option>, @@ -111,7 +116,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval: // Set allocation mutability as appropriate. This is used by LLVM to put things into // read-only memory, and also by Miri when evaluating other globals that // access this one. - if let InternMode::Static(mutability) = mode { + if let InternMode::Static(mutability, item) = mode { // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume // no interior mutability. let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env)); @@ -125,6 +130,11 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval: // Just making sure we are not "upgrading" an immutable allocation to mutable. assert_eq!(alloc.mutability, Mutability::Mut); } + // Part of a static (e.g. `static FOO: &&i32 = &&42;`). Make sure all pointers point to + // what essentially amounts to an "inline static". So unlike anonymous statics, these + // have names and thus don't get (de)duplicated. + alloc.extra = Some(StaticAllocation { item, local_index: *running_item_local_index }); + *running_item_local_index += 1; } else { // No matter what, *constants are never mutable*. Mutating them is UB. // See const_eval::machine::MemoryExtra::can_access_statics for why @@ -149,7 +159,14 @@ impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> mode: InternMode, ty: Option>, ) -> Option { - intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty) + intern_shallow( + self.ecx, + self.leftover_allocations, + self.running_item_local_index, + alloc_id, + mode, + ty, + ) } } @@ -263,7 +280,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory // Compute the mode with which we intern this. Our goal here is to make as many // statics as we can immutable so they can be placed in read-only memory by LLVM. let ref_mode = match self.mode { - InternMode::Static(mutbl) => { + InternMode::Static(mutbl, item_id) => { // In statics, merge outer mutability with reference mutability and // take into account whether we are in an `UnsafeCell`. @@ -276,7 +293,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory _ if self.inside_unsafe_cell => { // Inside an `UnsafeCell` is like inside a `static mut`, the "outer" // mutability does not matter. - InternMode::Static(ref_mutability) + InternMode::Static(ref_mutability, item_id) } Mutability::Not => { // A shared reference, things become immutable. @@ -287,11 +304,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory // `UnsafeCell`). This makes sure that `&(&i32, &Cell)` still // has the left inner reference interned into a read-only // allocation. - InternMode::Static(Mutability::Not) + InternMode::Static(Mutability::Not, item_id) } Mutability::Mut => { // Mutable reference. - InternMode::Static(mutbl) + InternMode::Static(mutbl, item_id) } } } @@ -323,7 +340,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)] pub enum InternKind { /// The `mutability` of the static, ignoring the type which may have interior mutability. - Static(hir::Mutability), + Static(hir::Mutability, DefId), Constant, Promoted, } @@ -346,7 +363,7 @@ pub fn intern_const_alloc_recursive< ) -> Result<(), ErrorGuaranteed> { let tcx = ecx.tcx; let base_intern_mode = match intern_kind { - InternKind::Static(mutbl) => InternMode::Static(mutbl), + InternKind::Static(mutbl, item_id) => InternMode::Static(mutbl, item_id), // `Constant` includes array lengths. InternKind::Constant | InternKind::Promoted => InternMode::Const, }; @@ -358,11 +375,13 @@ pub fn intern_const_alloc_recursive< // be available in a typed way. They get interned at the end. let mut ref_tracking = RefTracking::empty(); let leftover_allocations = &mut FxHashSet::default(); + let mut running_item_local_index = 0; // start with the outermost allocation intern_shallow( ecx, leftover_allocations, + &mut running_item_local_index, // The outermost allocation must exist, because we allocated it with // `Memory::allocate`. ret.ptr.provenance.unwrap(), @@ -379,6 +398,7 @@ pub fn intern_const_alloc_recursive< mode, leftover_allocations, inside_unsafe_cell: false, + running_item_local_index: &mut running_item_local_index, } .visit_value(&mplace); // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining @@ -413,7 +433,11 @@ pub fn intern_const_alloc_recursive< // Statics may point to mutable allocations. // Even for immutable statics it would be ok to have mutable allocations behind // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`. - InternKind::Static(_) => {} + InternKind::Static(_, item) => { + alloc.extra = + Some(StaticAllocation { item, local_index: running_item_local_index }); + running_item_local_index += 1; + } // Raw pointers in promoteds may only point to immutable things so we mark // everything as immutable. // It is UB to mutate through a raw pointer obtained via an immutable reference: diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index 5aabb14fba884..b9e8c056a1b84 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -15,6 +15,7 @@ use rustc_target::spec::abi::Abi as CallAbi; use super::{ AllocId, AllocRange, Allocation, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar, StackPopUnwind, + StaticAllocation, }; /// Data returned by Machine::stack_pop, @@ -419,7 +420,7 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) { rustc_data_structures::fx::FxHashMap, Allocation)>; const GLOBAL_KIND: Option = None; // no copying of globals from `tcx` to machine memory - type AllocExtra = (); + type AllocExtra = Option; type FrameExtra = (); #[inline(always)] diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index 37ec04b07f847..1c2cc247cc433 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -11,6 +11,7 @@ use std::ptr; use rustc_ast::Mutability; use rustc_data_structures::intern::Interned; use rustc_data_structures::sorted_map::SortedMap; +use rustc_hir::def_id::DefId; use rustc_span::DUMMY_SP; use rustc_target::abi::{Align, HasDataLayout, Size}; @@ -21,6 +22,17 @@ use super::{ }; use crate::ty; +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] +#[derive(HashStable)] +/// Allocations inside statics must not get (de)duplicated. So we mark them with the static +/// that they are part of and a running item-local index. This allows backends to give +/// these allocations generated unique names, just like with the main static item. + +pub struct StaticAllocation { + pub item: DefId, + pub local_index: usize, +} + /// This type represents an Allocation in the Miri/CTFE core engine. /// /// Its public API is rather low-level, working directly with allocation offsets and a custom error @@ -30,7 +42,7 @@ use crate::ty; // hashed. (see the `Hash` impl below for more details), so the impl is not derived. #[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)] #[derive(HashStable)] -pub struct Allocation { +pub struct Allocation> { /// The actual bytes of the allocation. /// Note that the bytes of a pointer represent the offset of the pointer. bytes: Box<[u8]>, @@ -102,7 +114,7 @@ impl hash::Hash for Allocation { /// (`ConstAllocation`) are used quite a bit. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)] #[rustc_pass_by_value] -pub struct ConstAllocation<'tcx, Prov = AllocId, Extra = ()>( +pub struct ConstAllocation<'tcx, Prov = AllocId, Extra = Option>( pub Interned<'tcx, Allocation>, ); @@ -220,7 +232,7 @@ impl Allocation { init_mask: InitMask::new(size, true), align, mutability, - extra: (), + extra: None, } } @@ -255,7 +267,7 @@ impl Allocation { init_mask: InitMask::new(size, false), align, mutability: Mutability::Mut, - extra: (), + extra: None, }) } } diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index 0fc1217d57196..22b48ccd7af8b 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -128,7 +128,7 @@ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar}; pub use self::allocation::{ alloc_range, AllocRange, Allocation, ConstAllocation, InitChunk, InitChunkIter, InitMask, - ProvenanceMap, + ProvenanceMap, StaticAllocation, }; pub use self::pointer::{Pointer, PointerArithmetic, Provenance}; diff --git a/src/test/ui/auxiliary/const_mut_refs_crate.rs b/src/test/ui/auxiliary/const_mut_refs_crate.rs new file mode 100644 index 0000000000000..b87d27f774edf --- /dev/null +++ b/src/test/ui/auxiliary/const_mut_refs_crate.rs @@ -0,0 +1,19 @@ +// This is a support file for ../const-mut-refs-crate.rs + +// This is to test that static inners from an external +// crate like this one, still preserves the alloc. +// That is, the address from the standpoint of rustc+llvm +// is the same. +// The need for this test originated from the GH issue +// https://github.com/rust-lang/rust/issues/57349 + +// See also ../const-mut-refs-crate.rs for more details +// about this test. + +pub static FOO: &'static i32 = &42; +pub static BAR: &'static i32 = FOO; + +pub mod inner { + pub static INNER_MOD_FOO: &'static i32 = &43; + pub static INNER_MOD_BAR: &'static i32 = INNER_MOD_FOO; +} diff --git a/src/test/ui/const-mut-refs-crate.rs b/src/test/ui/const-mut-refs-crate.rs new file mode 100644 index 0000000000000..68673b2d89d4e --- /dev/null +++ b/src/test/ui/const-mut-refs-crate.rs @@ -0,0 +1,25 @@ +// run-pass +// aux-build:const_mut_refs_crate.rs + +extern crate const_mut_refs_crate as other; + +use other::{ + inner::{INNER_MOD_BAR, INNER_MOD_FOO}, + BAR, FOO, +}; + +pub static LOCAL_FOO: &'static i32 = &41; +pub static LOCAL_BAR: &'static i32 = LOCAL_FOO; +pub static LOCAL_BAZ: &'static i32 = FOO; + +static DOUBLE_REF: &&i32 = &&99; +static ONE_STEP_ABOVE: &i32 = *DOUBLE_REF; + +pub fn main() { + assert_eq!(FOO as *const i32, BAR as *const i32); + assert_eq!(INNER_MOD_FOO as *const i32, INNER_MOD_BAR as *const i32); + assert_eq!(LOCAL_FOO as *const i32, LOCAL_BAR as *const i32); + assert_eq!(*DOUBLE_REF as *const i32, ONE_STEP_ABOVE as *const i32); + + assert_eq!(FOO as *const i32, LOCAL_BAZ as *const i32); +}