|
| 1 | +use rustc_hir::LangItem; |
| 2 | +use rustc_middle::mir; |
| 3 | +use rustc_middle::mir::visit::Visitor; |
| 4 | +use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext}; |
| 5 | +use rustc_span::Span; |
| 6 | + |
| 7 | +use super::FunctionCx; |
| 8 | +use crate::base; |
| 9 | +use crate::common; |
| 10 | +use crate::mir::OperandValue; |
| 11 | +use crate::traits::*; |
| 12 | + |
| 13 | +pub fn pointers_to_check<F>( |
| 14 | + statement: &mir::Statement<'_>, |
| 15 | + required_align_of: F, |
| 16 | +) -> Vec<(mir::Local, u64)> |
| 17 | +where |
| 18 | + F: Fn(mir::Local) -> Option<u64>, |
| 19 | +{ |
| 20 | + let mut finder = PointerFinder { required_align_of, pointers: Vec::new() }; |
| 21 | + finder.visit_statement(statement, rustc_middle::mir::Location::START); |
| 22 | + finder.pointers |
| 23 | +} |
| 24 | + |
| 25 | +struct PointerFinder<F> { |
| 26 | + pointers: Vec<(mir::Local, u64)>, |
| 27 | + required_align_of: F, |
| 28 | +} |
| 29 | + |
| 30 | +impl<'tcx, F> Visitor<'tcx> for PointerFinder<F> |
| 31 | +where |
| 32 | + F: Fn(mir::Local) -> Option<u64>, |
| 33 | +{ |
| 34 | + fn visit_place( |
| 35 | + &mut self, |
| 36 | + place: &mir::Place<'tcx>, |
| 37 | + context: PlaceContext, |
| 38 | + location: mir::Location, |
| 39 | + ) { |
| 40 | + // We want to only check reads and writes to Places, so we specifically exclude |
| 41 | + // Borrows and AddressOf. |
| 42 | + match context { |
| 43 | + PlaceContext::MutatingUse( |
| 44 | + MutatingUseContext::Store |
| 45 | + | MutatingUseContext::AsmOutput |
| 46 | + | MutatingUseContext::Call |
| 47 | + | MutatingUseContext::Yield |
| 48 | + | MutatingUseContext::Drop, |
| 49 | + ) => {} |
| 50 | + PlaceContext::NonMutatingUse( |
| 51 | + NonMutatingUseContext::Copy | NonMutatingUseContext::Move, |
| 52 | + ) => {} |
| 53 | + _ => { |
| 54 | + return; |
| 55 | + } |
| 56 | + } |
| 57 | + |
| 58 | + if !place.is_indirect() { |
| 59 | + return; |
| 60 | + } |
| 61 | + |
| 62 | + let pointer = place.local; |
| 63 | + let Some(required_alignment) = (self.required_align_of)(pointer) else { |
| 64 | + return; |
| 65 | + }; |
| 66 | + |
| 67 | + if required_alignment == 1 { |
| 68 | + return; |
| 69 | + } |
| 70 | + |
| 71 | + // Ensure that this place is based on an aligned pointer. |
| 72 | + self.pointers.push((pointer, required_alignment)); |
| 73 | + |
| 74 | + self.super_place(place, context, location); |
| 75 | + } |
| 76 | +} |
| 77 | + |
| 78 | +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { |
| 79 | + #[instrument(level = "debug", skip(self, bx))] |
| 80 | + pub fn codegen_alignment_check( |
| 81 | + &mut self, |
| 82 | + bx: &mut Bx, |
| 83 | + pointer: mir::Operand<'tcx>, |
| 84 | + required_alignment: u64, |
| 85 | + source_info: mir::SourceInfo, |
| 86 | + ) { |
| 87 | + // Compute the alignment mask |
| 88 | + let mask = bx.const_usize(required_alignment - 1); |
| 89 | + let zero = bx.const_usize(0); |
| 90 | + let required_alignment = bx.const_usize(required_alignment); |
| 91 | + |
| 92 | + // And the pointer with the mask |
| 93 | + let pointer = match self.codegen_operand(bx, &pointer).val { |
| 94 | + OperandValue::Immediate(imm) => imm, |
| 95 | + OperandValue::Pair(ptr, _) => ptr, |
| 96 | + _ => { |
| 97 | + unreachable!("{pointer:?}"); |
| 98 | + } |
| 99 | + }; |
| 100 | + let addr = bx.ptrtoint(pointer, bx.cx().type_isize()); |
| 101 | + let masked = bx.and(addr, mask); |
| 102 | + |
| 103 | + // Branch on whether the masked value is zero |
| 104 | + let is_zero = bx.icmp( |
| 105 | + base::bin_op_to_icmp_predicate(mir::BinOp::Eq.to_hir_binop(), false), |
| 106 | + masked, |
| 107 | + zero, |
| 108 | + ); |
| 109 | + |
| 110 | + // Create destination blocks, branching on is_zero |
| 111 | + let panic = bx.append_sibling_block("panic"); |
| 112 | + let success = bx.append_sibling_block("success"); |
| 113 | + bx.cond_br(is_zero, success, panic); |
| 114 | + |
| 115 | + // Switch to the failure block and codegen a call to the panic intrinsic |
| 116 | + bx.switch_to_block(panic); |
| 117 | + self.set_debug_loc(bx, source_info); |
| 118 | + let location = self.get_caller_location(bx, source_info).immediate(); |
| 119 | + self.codegen_nounwind_panic( |
| 120 | + bx, |
| 121 | + LangItem::PanicMisalignedPointerDereference, |
| 122 | + &[required_alignment, addr, location], |
| 123 | + source_info.span, |
| 124 | + ); |
| 125 | + |
| 126 | + // Continue codegen in the success block. |
| 127 | + bx.switch_to_block(success); |
| 128 | + self.set_debug_loc(bx, source_info); |
| 129 | + } |
| 130 | + |
| 131 | + /// Emit a call to a diverging and `rustc_nounwind` panic helper. |
| 132 | + #[instrument(level = "debug", skip(self, bx))] |
| 133 | + fn codegen_nounwind_panic( |
| 134 | + &mut self, |
| 135 | + bx: &mut Bx, |
| 136 | + lang_item: LangItem, |
| 137 | + args: &[Bx::Value], |
| 138 | + span: Span, |
| 139 | + ) { |
| 140 | + let (fn_abi, fn_ptr, instance) = common::build_langcall(bx, Some(span), lang_item); |
| 141 | + let fn_ty = bx.fn_decl_backend_type(&fn_abi); |
| 142 | + let fn_attrs = if bx.tcx().def_kind(self.instance.def_id()).has_codegen_attrs() { |
| 143 | + Some(bx.tcx().codegen_fn_attrs(self.instance.def_id())) |
| 144 | + } else { |
| 145 | + None |
| 146 | + }; |
| 147 | + |
| 148 | + // bx.call requires that the call not unwind. Double-check that this LangItem can't unwind. |
| 149 | + assert!(!fn_abi.can_unwind); |
| 150 | + |
| 151 | + bx.call( |
| 152 | + fn_ty, |
| 153 | + fn_attrs, |
| 154 | + Some(&fn_abi), |
| 155 | + fn_ptr, |
| 156 | + args, |
| 157 | + None, /* funclet */ |
| 158 | + Some(instance), |
| 159 | + ); |
| 160 | + bx.unreachable(); |
| 161 | + } |
| 162 | +} |
0 commit comments