|
| 1 | +use rustc::hir::def_id::DefId; |
| 2 | +use rustc::ty::layout::Layout; |
| 3 | +use rustc::ty::subst::{Substs, Kind}; |
| 4 | +use rustc::ty::{self, Ty}; |
| 5 | +use rustc::mir; |
| 6 | +use syntax::codemap::Span; |
| 7 | + |
| 8 | +use error::{EvalError, EvalResult}; |
| 9 | +use eval_context::{EvalContext, monomorphize_field_ty, StackPopCleanup}; |
| 10 | +use lvalue::{Lvalue, LvalueExtra}; |
| 11 | +use memory::Pointer; |
| 12 | +use value::PrimVal; |
| 13 | +use value::Value; |
| 14 | + |
| 15 | +impl<'a, 'tcx> EvalContext<'a, 'tcx> { |
| 16 | + |
| 17 | + /// Creates stack frames for all drop impls. See `drop` for the actual content. |
| 18 | + pub fn eval_drop_impls(&mut self, drops: Vec<(DefId, Value, &'tcx Substs<'tcx>)>, span: Span) -> EvalResult<'tcx> { |
| 19 | + // add them to the stack in reverse order, because the impl that needs to run the last |
| 20 | + // is the one that needs to be at the bottom of the stack |
| 21 | + for (drop_def_id, self_arg, substs) in drops.into_iter().rev() { |
| 22 | + let mir = self.load_mir(drop_def_id)?; |
| 23 | + trace!("substs for drop glue: {:?}", substs); |
| 24 | + self.push_stack_frame( |
| 25 | + drop_def_id, |
| 26 | + span, |
| 27 | + mir, |
| 28 | + substs, |
| 29 | + Lvalue::from_ptr(Pointer::zst_ptr()), |
| 30 | + StackPopCleanup::None, |
| 31 | + Vec::new(), |
| 32 | + )?; |
| 33 | + let mut arg_locals = self.frame().mir.args_iter(); |
| 34 | + let first = arg_locals.next().expect("drop impl has self arg"); |
| 35 | + assert!(arg_locals.next().is_none(), "drop impl should have only one arg"); |
| 36 | + let dest = self.eval_lvalue(&mir::Lvalue::Local(first))?; |
| 37 | + let ty = self.frame().mir.local_decls[first].ty; |
| 38 | + self.write_value(self_arg, dest, ty)?; |
| 39 | + } |
| 40 | + Ok(()) |
| 41 | + } |
| 42 | + |
| 43 | + /// push DefIds of drop impls and their argument on the given vector |
| 44 | + pub fn drop( |
| 45 | + &mut self, |
| 46 | + lval: Lvalue<'tcx>, |
| 47 | + ty: Ty<'tcx>, |
| 48 | + drop: &mut Vec<(DefId, Value, &'tcx Substs<'tcx>)>, |
| 49 | + ) -> EvalResult<'tcx> { |
| 50 | + if !self.type_needs_drop(ty) { |
| 51 | + debug!("no need to drop {:?}", ty); |
| 52 | + return Ok(()); |
| 53 | + } |
| 54 | + trace!("-need to drop {:?} at {:?}", ty, lval); |
| 55 | + |
| 56 | + match ty.sty { |
| 57 | + // special case `Box` to deallocate the inner allocation |
| 58 | + ty::TyAdt(ref def, _) if def.is_box() => { |
| 59 | + let contents_ty = ty.boxed_ty(); |
| 60 | + let val = self.read_lvalue(lval); |
| 61 | + // we are going through the read_value path, because that already does all the |
| 62 | + // checks for the trait object types. We'd only be repeating ourselves here. |
| 63 | + let val = self.follow_by_ref_value(val, ty)?; |
| 64 | + trace!("box dealloc on {:?}", val); |
| 65 | + match val { |
| 66 | + Value::ByRef(_) => bug!("follow_by_ref_value can't result in ByRef"), |
| 67 | + Value::ByVal(ptr) => { |
| 68 | + assert!(self.type_is_sized(contents_ty)); |
| 69 | + let contents_ptr = ptr.to_ptr()?; |
| 70 | + self.drop(Lvalue::from_ptr(contents_ptr), contents_ty, drop)?; |
| 71 | + }, |
| 72 | + Value::ByValPair(prim_ptr, extra) => { |
| 73 | + let ptr = prim_ptr.to_ptr()?; |
| 74 | + let extra = match self.tcx.struct_tail(contents_ty).sty { |
| 75 | + ty::TyDynamic(..) => LvalueExtra::Vtable(extra.to_ptr()?), |
| 76 | + ty::TyStr | ty::TySlice(_) => LvalueExtra::Length(extra.to_u64()?), |
| 77 | + _ => bug!("invalid fat pointer type: {}", ty), |
| 78 | + }; |
| 79 | + self.drop(Lvalue::Ptr { ptr, extra }, contents_ty, drop)?; |
| 80 | + }, |
| 81 | + } |
| 82 | + // We cannot use Box's destructor, because it is a no-op and only exists to reduce |
| 83 | + // the number of hacks required in the compiler around the Box type. |
| 84 | + let box_free_fn = self.tcx.lang_items.box_free_fn().expect("no box_free lang item"); |
| 85 | + let substs = self.tcx.intern_substs(&[Kind::from(contents_ty)]); |
| 86 | + // this is somewhat hacky, but hey, there's no representation difference between |
| 87 | + // pointers, `Box`es and references, so |
| 88 | + // #[lang = "box_free"] unsafe fn box_free<T>(ptr: *mut T) |
| 89 | + // is the same as |
| 90 | + // fn drop(&mut self) if Self is Box<T> |
| 91 | + drop.push((box_free_fn, val, substs)); |
| 92 | + }, |
| 93 | + |
| 94 | + ty::TyAdt(adt_def, substs) => { |
| 95 | + // FIXME: some structs are represented as ByValPair |
| 96 | + let lval = self.force_allocation(lval)?; |
| 97 | + let adt_ptr = match lval { |
| 98 | + Lvalue::Ptr { ptr, .. } => ptr, |
| 99 | + _ => bug!("force allocation can only yield Lvalue::Ptr"), |
| 100 | + }; |
| 101 | + // run drop impl before the fields' drop impls |
| 102 | + if let Some(drop_def_id) = adt_def.destructor() { |
| 103 | + drop.push((drop_def_id, Value::ByVal(PrimVal::Ptr(adt_ptr)), substs)); |
| 104 | + } |
| 105 | + let layout = self.type_layout(ty)?; |
| 106 | + let fields = match *layout { |
| 107 | + Layout::Univariant { ref variant, .. } => { |
| 108 | + adt_def.struct_variant().fields.iter().zip(&variant.offsets) |
| 109 | + }, |
| 110 | + Layout::General { ref variants, .. } => { |
| 111 | + let discr_val = self.read_discriminant_value(adt_ptr, ty)? as u128; |
| 112 | + match adt_def.variants.iter().position(|v| discr_val == v.disr_val.to_u128_unchecked()) { |
| 113 | + // start at offset 1, to skip over the discriminant |
| 114 | + Some(i) => adt_def.variants[i].fields.iter().zip(&variants[i].offsets[1..]), |
| 115 | + None => return Err(EvalError::InvalidDiscriminant), |
| 116 | + } |
| 117 | + }, |
| 118 | + Layout::StructWrappedNullablePointer { nndiscr, ref nonnull, .. } => { |
| 119 | + let discr = self.read_discriminant_value(adt_ptr, ty)?; |
| 120 | + if discr == nndiscr as u128 { |
| 121 | + assert_eq!(discr as usize as u128, discr); |
| 122 | + adt_def.variants[discr as usize].fields.iter().zip(&nonnull.offsets) |
| 123 | + } else { |
| 124 | + // FIXME: the zst variant might contain zst types that impl Drop |
| 125 | + return Ok(()); // nothing to do, this is zero sized (e.g. `None`) |
| 126 | + } |
| 127 | + }, |
| 128 | + Layout::RawNullablePointer { nndiscr, .. } => { |
| 129 | + let discr = self.read_discriminant_value(adt_ptr, ty)?; |
| 130 | + if discr == nndiscr as u128 { |
| 131 | + assert_eq!(discr as usize as u128, discr); |
| 132 | + assert_eq!(adt_def.variants[discr as usize].fields.len(), 1); |
| 133 | + let field_ty = &adt_def.variants[discr as usize].fields[0]; |
| 134 | + let field_ty = monomorphize_field_ty(self.tcx, field_ty, substs); |
| 135 | + // FIXME: once read_discriminant_value works with lvalue, don't force |
| 136 | + // alloc in the RawNullablePointer case |
| 137 | + self.drop(lval, field_ty, drop)?; |
| 138 | + return Ok(()); |
| 139 | + } else { |
| 140 | + // FIXME: the zst variant might contain zst types that impl Drop |
| 141 | + return Ok(()); // nothing to do, this is zero sized (e.g. `None`) |
| 142 | + } |
| 143 | + }, |
| 144 | + Layout::CEnum { .. } => return Ok(()), |
| 145 | + _ => bug!("{:?} is not an adt layout", layout), |
| 146 | + }; |
| 147 | + let tcx = self.tcx; |
| 148 | + self.drop_fields( |
| 149 | + fields.map(|(ty, &offset)| (monomorphize_field_ty(tcx, ty, substs), offset)), |
| 150 | + lval, |
| 151 | + drop, |
| 152 | + )?; |
| 153 | + }, |
| 154 | + ty::TyTuple(fields, _) => { |
| 155 | + let offsets = match *self.type_layout(ty)? { |
| 156 | + Layout::Univariant { ref variant, .. } => &variant.offsets, |
| 157 | + _ => bug!("tuples must be univariant"), |
| 158 | + }; |
| 159 | + self.drop_fields(fields.iter().cloned().zip(offsets.iter().cloned()), lval, drop)?; |
| 160 | + }, |
| 161 | + ty::TyDynamic(..) => { |
| 162 | + let (ptr, vtable) = match lval { |
| 163 | + Lvalue::Ptr { ptr, extra: LvalueExtra::Vtable(vtable) } => (ptr, vtable), |
| 164 | + _ => bug!("expected an lvalue with a vtable"), |
| 165 | + }; |
| 166 | + let drop_fn = self.memory.read_ptr(vtable)?; |
| 167 | + // some values don't need to call a drop impl, so the value is null |
| 168 | + if drop_fn != Pointer::from_int(0) { |
| 169 | + let real_ty = self.memory.get_fn(drop_fn.alloc_id)?.expect_drop_glue_real_ty()?; |
| 170 | + self.drop(Lvalue::from_ptr(ptr), real_ty, drop)?; |
| 171 | + } else { |
| 172 | + // just a sanity check |
| 173 | + assert_eq!(drop_fn.offset, 0); |
| 174 | + } |
| 175 | + }, |
| 176 | + ty::TySlice(elem_ty) => { |
| 177 | + let (ptr, len) = match lval { |
| 178 | + Lvalue::Ptr { ptr, extra: LvalueExtra::Length(len) } => (ptr, len), |
| 179 | + _ => bug!("expected an lvalue with a length"), |
| 180 | + }; |
| 181 | + let size = self.type_size(elem_ty)?.expect("slice element must be sized"); |
| 182 | + // FIXME: this creates a lot of stack frames if the element type has |
| 183 | + // a drop impl |
| 184 | + for i in 0..len { |
| 185 | + self.drop(Lvalue::from_ptr(ptr.offset(i * size)), elem_ty, drop)?; |
| 186 | + } |
| 187 | + }, |
| 188 | + ty::TyArray(elem_ty, len) => { |
| 189 | + let lval = self.force_allocation(lval)?; |
| 190 | + let (ptr, extra) = match lval { |
| 191 | + Lvalue::Ptr { ptr, extra } => (ptr, extra), |
| 192 | + _ => bug!("expected an lvalue with optional extra data"), |
| 193 | + }; |
| 194 | + let size = self.type_size(elem_ty)?.expect("array element cannot be unsized"); |
| 195 | + // FIXME: this creates a lot of stack frames if the element type has |
| 196 | + // a drop impl |
| 197 | + for i in 0..(len as u64) { |
| 198 | + self.drop(Lvalue::Ptr { ptr: ptr.offset(i * size), extra }, elem_ty, drop)?; |
| 199 | + } |
| 200 | + }, |
| 201 | + // FIXME: what about TyClosure and TyAnon? |
| 202 | + // other types do not need to process drop |
| 203 | + _ => {}, |
| 204 | + } |
| 205 | + |
| 206 | + Ok(()) |
| 207 | + } |
| 208 | + |
| 209 | + fn drop_fields<I>( |
| 210 | + &mut self, |
| 211 | + mut fields: I, |
| 212 | + lval: Lvalue<'tcx>, |
| 213 | + drop: &mut Vec<(DefId, Value, &'tcx Substs<'tcx>)>, |
| 214 | + ) -> EvalResult<'tcx> |
| 215 | + where I: Iterator<Item = (Ty<'tcx>, ty::layout::Size)>, |
| 216 | + { |
| 217 | + // FIXME: some aggregates may be represented by Value::ByValPair |
| 218 | + let (adt_ptr, extra) = self.force_allocation(lval)?.to_ptr_and_extra(); |
| 219 | + // manual iteration, because we need to be careful about the last field if it is unsized |
| 220 | + while let Some((field_ty, offset)) = fields.next() { |
| 221 | + let ptr = adt_ptr.offset(offset.bytes()); |
| 222 | + if self.type_is_sized(field_ty) { |
| 223 | + self.drop(Lvalue::from_ptr(ptr), field_ty, drop)?; |
| 224 | + } else { |
| 225 | + self.drop(Lvalue::Ptr { ptr, extra }, field_ty, drop)?; |
| 226 | + break; // if it is not sized, then this is the last field anyway |
| 227 | + } |
| 228 | + } |
| 229 | + assert!(fields.next().is_none()); |
| 230 | + Ok(()) |
| 231 | + } |
| 232 | + |
| 233 | + fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool { |
| 234 | + self.tcx.type_needs_drop_given_env(ty, &self.tcx.empty_parameter_environment()) |
| 235 | + } |
| 236 | +} |
0 commit comments