diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index 0d3e7083d56ad..d55a5c18516b3 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -64,6 +64,11 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) } } + fn is_undef(&self, _val: RValue<'gcc>) -> bool { + // FIXME: actually check for undef + false + } + fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> { let local = self.current_func.borrow().expect("func").new_local(None, typ, "undefined"); if typ.is_struct().is_some() { diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index adfe8aeb5c539..b4e9b9f44f4ab 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -126,6 +126,10 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { unsafe { llvm::LLVMGetUndef(t) } } + fn is_undef(&self, v: &'ll Value) -> bool { + unsafe { llvm::LLVMIsUndef(v) == True } + } + fn const_poison(&self, t: &'ll Type) -> &'ll Value { unsafe { llvm::LLVMGetPoison(t) } } diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index cb4a8c9a5f21c..57ed47c47e155 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -917,6 +917,7 @@ unsafe extern "C" { pub fn LLVMMetadataTypeInContext(C: &Context) -> &Type; // Operations on all values + pub fn LLVMIsUndef(Val: &Value) -> Bool; pub fn LLVMTypeOf(Val: &Value) -> &Type; pub fn LLVMGetValueName2(Val: &Value, Length: *mut size_t) -> *const c_char; pub fn LLVMSetValueName2(Val: &Value, Name: *const c_char, NameLen: size_t); diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index 19101ec2d1ba3..11512931c4f53 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -205,13 +205,22 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { assert!(alloc_align >= layout.align.abi); let read_scalar = |start, size, s: abi::Scalar, ty| { + let range = alloc_range(start, size); match alloc.0.read_scalar( bx, - alloc_range(start, size), + range, /*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)), ) { - Ok(val) => bx.scalar_to_backend(val, s, ty), - Err(_) => bx.const_poison(ty), + Ok(val) => Some(bx.scalar_to_backend(val, s, ty)), + Err(_) => { + if alloc.0.provenance().range_empty(range, &bx.tcx()) + && alloc.0.init_mask().is_range_initialized(range).unwrap_err() == range + { + Some(bx.const_undef(ty)) + } else { + None + } + } } }; @@ -222,16 +231,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { // check that walks over the type of `mplace` to make sure it is truly correct to treat this // like a `Scalar` (or `ScalarPair`). match layout.backend_repr { - BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => { + BackendRepr::Scalar(s) => { let size = s.size(bx); assert_eq!(size, layout.size, "abi::Scalar size does not match layout size"); - let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout)); - OperandRef { val: OperandValue::Immediate(val), layout } + if let Some(val) = read_scalar(offset, size, s, bx.immediate_backend_type(layout)) { + return OperandRef { val: OperandValue::Immediate(val), layout }; + } } - BackendRepr::ScalarPair( - a @ abi::Scalar::Initialized { .. }, - b @ abi::Scalar::Initialized { .. }, - ) => { + BackendRepr::ScalarPair(a, b) => { let (a_size, b_size) = (a.size(bx), b.size(bx)); let b_offset = (offset + a_size).align_to(b.align(bx).abi); assert!(b_offset.bytes() > 0); @@ -247,20 +254,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { b, bx.scalar_pair_element_backend_type(layout, 1, true), ); - OperandRef { val: OperandValue::Pair(a_val, b_val), layout } - } - _ if layout.is_zst() => OperandRef::zero_sized(layout), - _ => { - // Neither a scalar nor scalar pair. Load from a place - // FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the - // same `ConstAllocation`? - let init = bx.const_data_from_alloc(alloc); - let base_addr = bx.static_addr_of(init, alloc_align, None); - - let llval = bx.const_ptr_byte_offset(base_addr, offset); - bx.load_operand(PlaceRef::new_sized(llval, layout)) + if let (Some(a_val), Some(b_val)) = (a_val, b_val) { + return OperandRef { val: OperandValue::Pair(a_val, b_val), layout }; + } } + _ if layout.is_zst() => return OperandRef::zero_sized(layout), + _ => {} } + // Neither a scalar nor scalar pair. Load from a place + // FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the + // same `ConstAllocation`? + let init = bx.const_data_from_alloc(alloc); + let base_addr = bx.static_addr_of(init, alloc_align, None); + + let llval = bx.const_ptr_byte_offset(base_addr, offset); + bx.load_operand(PlaceRef::new_sized(llval, layout)) } /// Asserts that this operand refers to a scalar and returns diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index 31793641d75ed..379b711995aba 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -8,7 +8,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; use rustc_middle::{bug, mir, span_bug}; use rustc_session::config::OptLevel; use rustc_span::{DUMMY_SP, Span}; -use tracing::{debug, instrument}; +use tracing::{debug, instrument, trace}; use super::FunctionCx; use super::operand::{OperandRef, OperandValue}; @@ -117,13 +117,33 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { false }; + trace!(?cg_elem.val); match cg_elem.val { OperandValue::Immediate(v) => { if try_init_all_same(bx, v) { return; } } - _ => (), + OperandValue::Pair(a, b) => { + let a_is_undef = bx.cx().is_undef(a); + match (a_is_undef, bx.cx().is_undef(b)) { + // Can happen for uninit unions + (true, true) => { + // FIXME: can we produce better output here? + } + (false, true) | (true, false) => { + let val = if a_is_undef { b } else { a }; + if try_init_all_same(bx, val) { + return; + } + } + (false, false) => { + // FIXME: if both are the same value, use try_init_all_same + } + } + } + OperandValue::ZeroSized => unreachable!("checked above"), + OperandValue::Ref(..) => {} } let count = self diff --git a/compiler/rustc_codegen_ssa/src/traits/consts.rs b/compiler/rustc_codegen_ssa/src/traits/consts.rs index 9af463a691af0..d0de7ff0b5ff1 100644 --- a/compiler/rustc_codegen_ssa/src/traits/consts.rs +++ b/compiler/rustc_codegen_ssa/src/traits/consts.rs @@ -9,6 +9,7 @@ pub trait ConstCodegenMethods<'tcx>: BackendTypes { /// Generate an uninitialized value (matching uninitialized memory in MIR). /// Whether memory is initialized or not is tracked byte-for-byte. fn const_undef(&self, t: Self::Type) -> Self::Value; + fn is_undef(&self, v: Self::Value) -> bool; /// Generate a fake value. Poison always affects the entire value, even if just a single byte is /// poison. This can only be used in codepaths that are already UB, i.e., UB-free Rust code /// (including code that e.g. copies uninit memory with `MaybeUninit`) can never encounter a diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index d6f8fed755f08..1b07846e0cf6c 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -222,7 +222,7 @@ impl AllocError { } /// The information that makes up a memory access: offset and size. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, PartialEq)] pub struct AllocRange { pub start: Size, pub size: Size, diff --git a/tests/codegen/overaligned-constant.rs b/tests/codegen/overaligned-constant.rs index 7cd8d19c21135..e5540aca38780 100644 --- a/tests/codegen/overaligned-constant.rs +++ b/tests/codegen/overaligned-constant.rs @@ -17,8 +17,6 @@ pub fn overaligned_constant() { // CHECK-LABEL: @overaligned_constant // CHECK: [[full:%_.*]] = alloca [32 x i8], align 8 // CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[full]], ptr align 8 @0, i64 32, i1 false) - // CHECK: %b.0 = load i32, ptr @0, align 4 - // CHECK: %b.1 = load i32, ptr getelementptr inbounds ({{.*}}), align 4 let mut s = S(1); s.0 = 3; diff --git a/tests/codegen/slice-init.rs b/tests/codegen/slice-init.rs index 1c2dd3e887555..aa5f5a2478dee 100644 --- a/tests/codegen/slice-init.rs +++ b/tests/codegen/slice-init.rs @@ -76,16 +76,14 @@ pub fn u16_init_one_bytes() -> [u16; N] { [const { u16::from_be_bytes([1, 1]) }; N] } -// FIXME: undef bytes can just be initialized with the same value as the -// defined bytes, if the defines bytes are all the same. // CHECK-LABEL: @option_none_init #[no_mangle] pub fn option_none_init() -> [Option; N] { // CHECK-NOT: select - // CHECK: br label %repeat_loop_header{{.*}} + // CHECK-NOT: br // CHECK-NOT: switch - // CHECK: icmp - // CHECK-NOT: call void @llvm.memset.p0 + // CHECK-NOT: icmp + // CHECK: call void @llvm.memset.p0 [None; N] }