rustc_mir_transform/
elaborate_drop.rs

1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::def::DefKind;
5use rustc_hir::lang_items::LangItem;
6use rustc_index::Idx;
7use rustc_middle::mir::*;
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::util::IntTypeExt;
10use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
11use rustc_middle::{bug, span_bug, traits};
12use rustc_span::DUMMY_SP;
13use rustc_span::source_map::{Spanned, dummy_spanned};
14use tracing::{debug, instrument};
15
16use crate::patch::MirPatch;
17
18/// Describes how/if a value should be dropped.
19#[derive(Debug)]
20pub(crate) enum DropStyle {
21    /// The value is already dead at the drop location, no drop will be executed.
22    Dead,
23
24    /// The value is known to always be initialized at the drop location, drop will always be
25    /// executed.
26    Static,
27
28    /// Whether the value needs to be dropped depends on its drop flag.
29    Conditional,
30
31    /// An "open" drop is one where only the fields of a value are dropped.
32    ///
33    /// For example, this happens when moving out of a struct field: The rest of the struct will be
34    /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
35    /// components of a value, for example for dropping array elements.
36    Open,
37}
38
39/// Which drop flags to affect/check with an operation.
40#[derive(Debug)]
41pub(crate) enum DropFlagMode {
42    /// Only affect the top-level drop flag, not that of any contained fields.
43    Shallow,
44    /// Affect all nested drop flags in addition to the top-level one.
45    Deep,
46}
47
48/// Describes if unwinding is necessary and where to unwind to if a panic occurs.
49#[derive(Copy, Clone, Debug)]
50pub(crate) enum Unwind {
51    /// Unwind to this block.
52    To(BasicBlock),
53    /// Already in an unwind path, any panic will cause an abort.
54    InCleanup,
55}
56
57impl Unwind {
58    fn is_cleanup(self) -> bool {
59        match self {
60            Unwind::To(..) => false,
61            Unwind::InCleanup => true,
62        }
63    }
64
65    fn into_action(self) -> UnwindAction {
66        match self {
67            Unwind::To(bb) => UnwindAction::Cleanup(bb),
68            Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
69        }
70    }
71
72    fn map<F>(self, f: F) -> Self
73    where
74        F: FnOnce(BasicBlock) -> BasicBlock,
75    {
76        match self {
77            Unwind::To(bb) => Unwind::To(f(bb)),
78            Unwind::InCleanup => Unwind::InCleanup,
79        }
80    }
81}
82
83pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
84    /// The type representing paths that can be moved out of.
85    ///
86    /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
87    /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
88    /// which case this may be set to (for example) `()`.
89    type Path: Copy + fmt::Debug;
90
91    // Accessors
92
93    fn patch_ref(&self) -> &MirPatch<'tcx>;
94    fn patch(&mut self) -> &mut MirPatch<'tcx>;
95    fn body(&self) -> &'a Body<'tcx>;
96    fn tcx(&self) -> TyCtxt<'tcx>;
97    fn typing_env(&self) -> ty::TypingEnv<'tcx>;
98    fn allow_async_drops(&self) -> bool;
99
100    fn terminator_loc(&self, bb: BasicBlock) -> Location;
101
102    // Drop logic
103
104    /// Returns how `path` should be dropped, given `mode`.
105    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
106
107    /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
108    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
109
110    /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
111    ///
112    /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
113    /// additional statements.
114    fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
115
116    // Subpaths
117
118    /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
119    ///
120    /// If this returns `None`, `field` will not get a dedicated drop flag.
121    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
122
123    /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
124    ///
125    /// If this returns `None`, `*path` will not get a dedicated drop flag.
126    ///
127    /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
128    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
129
130    /// Returns the subpath of downcasting `path` to one of its variants.
131    ///
132    /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
133    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
134
135    /// Returns the subpath of indexing a fixed-size array `path`.
136    ///
137    /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
138    ///
139    /// This is only relevant for array patterns, which can move out of individual array elements.
140    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
141}
142
143#[derive(Debug)]
144struct DropCtxt<'a, 'b, 'tcx, D>
145where
146    D: DropElaborator<'b, 'tcx>,
147{
148    elaborator: &'a mut D,
149
150    source_info: SourceInfo,
151
152    place: Place<'tcx>,
153    path: D::Path,
154    succ: BasicBlock,
155    unwind: Unwind,
156    dropline: Option<BasicBlock>,
157}
158
159/// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
160///
161/// The passed `elaborator` is used to determine what should happen at the drop terminator. It
162/// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
163/// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
164/// value.
165///
166/// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
167pub(crate) fn elaborate_drop<'b, 'tcx, D>(
168    elaborator: &mut D,
169    source_info: SourceInfo,
170    place: Place<'tcx>,
171    path: D::Path,
172    succ: BasicBlock,
173    unwind: Unwind,
174    bb: BasicBlock,
175    dropline: Option<BasicBlock>,
176) where
177    D: DropElaborator<'b, 'tcx>,
178    'tcx: 'b,
179{
180    DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb)
181}
182
183impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
184where
185    D: DropElaborator<'b, 'tcx>,
186    'tcx: 'b,
187{
188    #[instrument(level = "trace", skip(self), ret)]
189    fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
190        if place.local < self.elaborator.body().local_decls.next_index() {
191            place.ty(self.elaborator.body(), self.tcx()).ty
192        } else {
193            // We don't have a slice with all the locals, since some are in the patch.
194            PlaceTy::from_ty(self.elaborator.patch_ref().local_ty(place.local))
195                .multi_projection_ty(self.elaborator.tcx(), place.projection)
196                .ty
197        }
198    }
199
200    fn tcx(&self) -> TyCtxt<'tcx> {
201        self.elaborator.tcx()
202    }
203
204    // Generates three blocks:
205    // * #1:pin_obj_bb:   call Pin<ObjTy>::new_unchecked(&mut obj)
206    // * #2:call_drop_bb: fut = call obj.<AsyncDrop::drop>() OR call async_drop_in_place<T>(obj)
207    // * #3:drop_term_bb: drop (obj, fut, ...)
208    // We keep async drop unexpanded to poll-loop here, to expand it later, at StateTransform -
209    //   into states expand.
210    // call_destructor_only - to call only AsyncDrop::drop, not full async_drop_in_place glue
211    fn build_async_drop(
212        &mut self,
213        place: Place<'tcx>,
214        drop_ty: Ty<'tcx>,
215        bb: Option<BasicBlock>,
216        succ: BasicBlock,
217        unwind: Unwind,
218        dropline: Option<BasicBlock>,
219        call_destructor_only: bool,
220    ) -> BasicBlock {
221        let tcx = self.tcx();
222        let span = self.source_info.span;
223
224        let pin_obj_bb = bb.unwrap_or_else(|| {
225            self.elaborator.patch().new_block(BasicBlockData {
226                statements: vec![],
227                terminator: Some(Terminator {
228                    // Temporary terminator, will be replaced by patch
229                    source_info: self.source_info,
230                    kind: TerminatorKind::Return,
231                }),
232                is_cleanup: false,
233            })
234        });
235
236        let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only {
237            // Resolving obj.<AsyncDrop::drop>()
238            let trait_ref =
239                ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::AsyncDrop, span), [drop_ty]);
240            let (drop_trait, trait_args) = match tcx.codegen_select_candidate(
241                ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref),
242            ) {
243                Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
244                    impl_def_id,
245                    args,
246                    ..
247                })) => (*impl_def_id, *args),
248                impl_source => {
249                    span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
250                }
251            };
252            // impl_item_refs may be empty if drop fn is not implemented in 'impl AsyncDrop for ...'
253            // (#140974).
254            // Such code will report error, so just generate sync drop here and return
255            let Some(drop_fn_def_id) = tcx
256                .associated_item_def_ids(drop_trait)
257                .first()
258                .and_then(|def_id| {
259                    if tcx.def_kind(def_id) == DefKind::AssocFn
260                        && tcx.check_args_compatible(*def_id, trait_args)
261                    {
262                        Some(def_id)
263                    } else {
264                        None
265                    }
266                })
267                .copied()
268            else {
269                tcx.dcx().span_delayed_bug(
270                    self.elaborator.body().span,
271                    "AsyncDrop type without correct `async fn drop(...)`.",
272                );
273                self.elaborator.patch().patch_terminator(
274                    pin_obj_bb,
275                    TerminatorKind::Drop {
276                        place,
277                        target: succ,
278                        unwind: unwind.into_action(),
279                        replace: false,
280                        drop: None,
281                        async_fut: None,
282                    },
283                );
284                return pin_obj_bb;
285            };
286            let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
287            let sig = drop_fn.fn_sig(tcx);
288            let sig = tcx.instantiate_bound_regions_with_erased(sig);
289            (sig.output(), drop_fn_def_id, trait_args)
290        } else {
291            // Resolving async_drop_in_place<T> function for drop_ty
292            let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, span);
293            let trait_args = tcx.mk_args(&[drop_ty.into()]);
294            let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args);
295            let sig = tcx.instantiate_bound_regions_with_erased(sig);
296            (sig.output(), drop_fn_def_id, trait_args)
297        };
298
299        let fut = Place::from(self.new_temp(fut_ty));
300
301        // #1:pin_obj_bb >>> obj_ref = &mut obj
302        let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty);
303        let obj_ref_place = Place::from(self.new_temp(obj_ref_ty));
304
305        let term_loc = self.elaborator.terminator_loc(pin_obj_bb);
306        self.elaborator.patch().add_assign(
307            term_loc,
308            obj_ref_place,
309            Rvalue::Ref(
310                tcx.lifetimes.re_erased,
311                BorrowKind::Mut { kind: MutBorrowKind::Default },
312                place,
313            ),
314        );
315
316        // pin_obj_place preparation
317        let pin_obj_new_unchecked_fn = Ty::new_fn_def(
318            tcx,
319            tcx.require_lang_item(LangItem::PinNewUnchecked, span),
320            [GenericArg::from(obj_ref_ty)],
321        );
322        let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap();
323        let pin_obj_place = Place::from(self.new_temp(pin_obj_ty));
324        let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
325            span,
326            user_ty: None,
327            const_: Const::zero_sized(pin_obj_new_unchecked_fn),
328        }));
329
330        // #3:drop_term_bb
331        let drop_term_bb = self.new_block(
332            unwind,
333            TerminatorKind::Drop {
334                place,
335                target: succ,
336                unwind: unwind.into_action(),
337                replace: false,
338                drop: dropline,
339                async_fut: Some(fut.local),
340            },
341        );
342
343        // #2:call_drop_bb
344        let mut call_statements = Vec::new();
345        let drop_arg = if call_destructor_only {
346            pin_obj_place
347        } else {
348            let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else {
349                bug!();
350            };
351            let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
352            let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
353            let obj_ref_place = Place::from(self.new_temp(unwrap_ty));
354            call_statements.push(self.assign(
355                obj_ref_place,
356                Rvalue::Use(Operand::Copy(tcx.mk_place_field(
357                    pin_obj_place,
358                    FieldIdx::ZERO,
359                    unwrap_ty,
360                ))),
361            ));
362
363            let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
364
365            let addr = Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_deref(obj_ref_place));
366            call_statements.push(self.assign(obj_ptr_place, addr));
367            obj_ptr_place
368        };
369        call_statements.push(Statement {
370            source_info: self.source_info,
371            kind: StatementKind::StorageLive(fut.local),
372        });
373
374        let call_drop_bb = self.new_block_with_statements(
375            unwind,
376            call_statements,
377            TerminatorKind::Call {
378                func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span),
379                args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(),
380                destination: fut,
381                target: Some(drop_term_bb),
382                unwind: unwind.into_action(),
383                call_source: CallSource::Misc,
384                fn_span: self.source_info.span,
385            },
386        );
387
388        // StorageDead(fut) in self.succ block (at the begin)
389        self.elaborator.patch().add_statement(
390            Location { block: self.succ, statement_index: 0 },
391            StatementKind::StorageDead(fut.local),
392        );
393
394        // #1:pin_obj_bb >>> call Pin<ObjTy>::new_unchecked(&mut obj)
395        self.elaborator.patch().patch_terminator(
396            pin_obj_bb,
397            TerminatorKind::Call {
398                func: pin_obj_new_unchecked_fn,
399                args: [dummy_spanned(Operand::Move(obj_ref_place))].into(),
400                destination: pin_obj_place,
401                target: Some(call_drop_bb),
402                unwind: unwind.into_action(),
403                call_source: CallSource::Misc,
404                fn_span: span,
405            },
406        );
407        pin_obj_bb
408    }
409
410    fn build_drop(&mut self, bb: BasicBlock) {
411        let drop_ty = self.place_ty(self.place);
412        if self.tcx().features().async_drop()
413            && self.elaborator.body().coroutine.is_some()
414            && self.elaborator.allow_async_drops()
415            && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup
416            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
417        {
418            self.build_async_drop(
419                self.place,
420                drop_ty,
421                Some(bb),
422                self.succ,
423                self.unwind,
424                self.dropline,
425                false,
426            );
427        } else {
428            self.elaborator.patch().patch_terminator(
429                bb,
430                TerminatorKind::Drop {
431                    place: self.place,
432                    target: self.succ,
433                    unwind: self.unwind.into_action(),
434                    replace: false,
435                    drop: None,
436                    async_fut: None,
437                },
438            );
439        }
440    }
441
442    /// This elaborates a single drop instruction, located at `bb`, and
443    /// patches over it.
444    ///
445    /// The elaborated drop checks the drop flags to only drop what
446    /// is initialized.
447    ///
448    /// In addition, the relevant drop flags also need to be cleared
449    /// to avoid double-drops. However, in the middle of a complex
450    /// drop, one must avoid clearing some of the flags before they
451    /// are read, as that would cause a memory leak.
452    ///
453    /// In particular, when dropping an ADT, multiple fields may be
454    /// joined together under the `rest` subpath. They are all controlled
455    /// by the primary drop flag, but only the last rest-field dropped
456    /// should clear it (and it must also not clear anything else).
457    //
458    // FIXME: I think we should just control the flags externally,
459    // and then we do not need this machinery.
460    #[instrument(level = "debug")]
461    fn elaborate_drop(&mut self, bb: BasicBlock) {
462        match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
463            DropStyle::Dead => {
464                self.elaborator
465                    .patch()
466                    .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
467            }
468            DropStyle::Static => {
469                self.build_drop(bb);
470            }
471            DropStyle::Conditional => {
472                let drop_bb = self.complete_drop(self.succ, self.unwind);
473                self.elaborator
474                    .patch()
475                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
476            }
477            DropStyle::Open => {
478                let drop_bb = self.open_drop();
479                self.elaborator
480                    .patch()
481                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
482            }
483        }
484    }
485
486    /// Returns the place and move path for each field of `variant`,
487    /// (the move path is `None` if the field is a rest field).
488    fn move_paths_for_fields(
489        &self,
490        base_place: Place<'tcx>,
491        variant_path: D::Path,
492        variant: &'tcx ty::VariantDef,
493        args: GenericArgsRef<'tcx>,
494    ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
495        variant
496            .fields
497            .iter_enumerated()
498            .map(|(field_idx, field)| {
499                let subpath = self.elaborator.field_subpath(variant_path, field_idx);
500                let tcx = self.tcx();
501
502                assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
503                let field_ty = match tcx.try_normalize_erasing_regions(
504                    self.elaborator.typing_env(),
505                    field.ty(tcx, args),
506                ) {
507                    Ok(t) => t,
508                    Err(_) => Ty::new_error(
509                        self.tcx(),
510                        self.tcx().dcx().span_delayed_bug(
511                            self.elaborator.body().span,
512                            "Error normalizing in drop elaboration.",
513                        ),
514                    ),
515                };
516
517                (tcx.mk_place_field(base_place, field_idx, field_ty), subpath)
518            })
519            .collect()
520    }
521
522    fn drop_subpath(
523        &mut self,
524        place: Place<'tcx>,
525        path: Option<D::Path>,
526        succ: BasicBlock,
527        unwind: Unwind,
528        dropline: Option<BasicBlock>,
529    ) -> BasicBlock {
530        if let Some(path) = path {
531            debug!("drop_subpath: for std field {:?}", place);
532
533            DropCtxt {
534                elaborator: self.elaborator,
535                source_info: self.source_info,
536                path,
537                place,
538                succ,
539                unwind,
540                dropline,
541            }
542            .elaborated_drop_block()
543        } else {
544            debug!("drop_subpath: for rest field {:?}", place);
545
546            DropCtxt {
547                elaborator: self.elaborator,
548                source_info: self.source_info,
549                place,
550                succ,
551                unwind,
552                dropline,
553                // Using `self.path` here to condition the drop on
554                // our own drop flag.
555                path: self.path,
556            }
557            .complete_drop(succ, unwind)
558        }
559    }
560
561    /// Creates one-half of the drop ladder for a list of fields, and return
562    /// the list of steps in it in reverse order, with the first step
563    /// dropping 0 fields and so on.
564    ///
565    /// `unwind_ladder` is such a list of steps in reverse order,
566    /// which is called if the matching step of the drop glue panics.
567    ///
568    /// `dropline_ladder` is a similar list of steps in reverse order,
569    /// which is called if the matching step of the drop glue will contain async drop
570    /// (expanded later to Yield) and the containing coroutine will be dropped at this point.
571    fn drop_halfladder(
572        &mut self,
573        unwind_ladder: &[Unwind],
574        dropline_ladder: &[Option<BasicBlock>],
575        mut succ: BasicBlock,
576        fields: &[(Place<'tcx>, Option<D::Path>)],
577    ) -> Vec<BasicBlock> {
578        iter::once(succ)
579            .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map(
580                |(&(place, path), &unwind_succ, &dropline_to)| {
581                    succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to);
582                    succ
583                },
584            ))
585            .collect()
586    }
587
588    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option<BasicBlock>) {
589        // Clear the "master" drop flag at the end. This is needed
590        // because the "master" drop protects the ADT's discriminant,
591        // which is invalidated after the ADT is dropped.
592        (
593            self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind),
594            self.unwind,
595            self.dropline,
596        )
597    }
598
599    /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
600    ///
601    /// For example, with 3 fields, the drop ladder is
602    ///
603    /// .d0:
604    ///     ELAB(drop location.0 [target=.d1, unwind=.c1])
605    /// .d1:
606    ///     ELAB(drop location.1 [target=.d2, unwind=.c2])
607    /// .d2:
608    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
609    /// .c1:
610    ///     ELAB(drop location.1 [target=.c2])
611    /// .c2:
612    ///     ELAB(drop location.2 [target=`self.unwind`])
613    ///
614    /// For possible-async drops in coroutines we also need dropline ladder
615    /// .d0 (mainline):
616    ///     ELAB(drop location.0 [target=.d1, unwind=.c1, drop=.e1])
617    /// .d1 (mainline):
618    ///     ELAB(drop location.1 [target=.d2, unwind=.c2, drop=.e2])
619    /// .d2 (mainline):
620    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`, drop=`self.drop`])
621    /// .c1 (unwind):
622    ///     ELAB(drop location.1 [target=.c2])
623    /// .c2 (unwind):
624    ///     ELAB(drop location.2 [target=`self.unwind`])
625    /// .e1 (dropline):
626    ///     ELAB(drop location.1 [target=.e2, unwind=.c2])
627    /// .e2 (dropline):
628    ///     ELAB(drop location.2 [target=`self.drop`, unwind=`self.unwind`])
629    ///
630    /// NOTE: this does not clear the master drop flag, so you need
631    /// to point succ/unwind on a `drop_ladder_bottom`.
632    fn drop_ladder(
633        &mut self,
634        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
635        succ: BasicBlock,
636        unwind: Unwind,
637        dropline: Option<BasicBlock>,
638    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
639        debug!("drop_ladder({:?}, {:?})", self, fields);
640        assert!(
641            if unwind.is_cleanup() { dropline.is_none() } else { true },
642            "Dropline is set for cleanup drop ladder"
643        );
644
645        let mut fields = fields;
646        fields.retain(|&(place, _)| {
647            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
648        });
649
650        debug!("drop_ladder - fields needing drop: {:?}", fields);
651
652        let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
653        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
654        let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind {
655            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
656            halfladder.into_iter().map(Unwind::To).collect()
657        } else {
658            unwind_ladder
659        };
660        let dropline_ladder: Vec<_> = if let Some(succ) = dropline {
661            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
662            halfladder.into_iter().map(Some).collect()
663        } else {
664            dropline_ladder
665        };
666
667        let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
668
669        (
670            *normal_ladder.last().unwrap(),
671            *unwind_ladder.last().unwrap(),
672            *dropline_ladder.last().unwrap(),
673        )
674    }
675
676    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
677        debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
678
679        let fields = tys
680            .iter()
681            .enumerate()
682            .map(|(i, &ty)| {
683                (
684                    self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
685                    self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
686                )
687            })
688            .collect();
689
690        let (succ, unwind, dropline) = self.drop_ladder_bottom();
691        self.drop_ladder(fields, succ, unwind, dropline).0
692    }
693
694    /// Drops the T contained in a `Box<T>` if it has not been moved out of
695    #[instrument(level = "debug", ret)]
696    fn open_drop_for_box_contents(
697        &mut self,
698        adt: ty::AdtDef<'tcx>,
699        args: GenericArgsRef<'tcx>,
700        succ: BasicBlock,
701        unwind: Unwind,
702        dropline: Option<BasicBlock>,
703    ) -> BasicBlock {
704        // drop glue is sent straight to codegen
705        // box cannot be directly dereferenced
706        let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
707        let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
708        let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
709        let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
710
711        let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
712        let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
713
714        let ptr_local = self.new_temp(ptr_ty);
715
716        let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
717        let interior_path = self.elaborator.deref_subpath(self.path);
718
719        let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline);
720
721        let setup_bbd = BasicBlockData {
722            statements: vec![self.assign(
723                Place::from(ptr_local),
724                Rvalue::Cast(CastKind::Transmute, Operand::Copy(nonnull_place), ptr_ty),
725            )],
726            terminator: Some(Terminator {
727                kind: TerminatorKind::Goto { target: do_drop_bb },
728                source_info: self.source_info,
729            }),
730            is_cleanup: unwind.is_cleanup(),
731        };
732        self.elaborator.patch().new_block(setup_bbd)
733    }
734
735    #[instrument(level = "debug", ret)]
736    fn open_drop_for_adt(
737        &mut self,
738        adt: ty::AdtDef<'tcx>,
739        args: GenericArgsRef<'tcx>,
740    ) -> BasicBlock {
741        if adt.variants().is_empty() {
742            return self.elaborator.patch().new_block(BasicBlockData {
743                statements: vec![],
744                terminator: Some(Terminator {
745                    source_info: self.source_info,
746                    kind: TerminatorKind::Unreachable,
747                }),
748                is_cleanup: self.unwind.is_cleanup(),
749            });
750        }
751
752        let skip_contents = adt.is_union() || adt.is_manually_drop();
753        let contents_drop = if skip_contents {
754            (self.succ, self.unwind, self.dropline)
755        } else {
756            self.open_drop_for_adt_contents(adt, args)
757        };
758
759        if adt.is_box() {
760            // we need to drop the inside of the box before running the destructor
761            let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
762            let unwind = contents_drop
763                .1
764                .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
765            let dropline = contents_drop
766                .2
767                .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
768
769            self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
770        } else if adt.has_dtor(self.tcx()) {
771            self.destructor_call_block(contents_drop)
772        } else {
773            contents_drop.0
774        }
775    }
776
777    fn open_drop_for_adt_contents(
778        &mut self,
779        adt: ty::AdtDef<'tcx>,
780        args: GenericArgsRef<'tcx>,
781    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
782        let (succ, unwind, dropline) = self.drop_ladder_bottom();
783        if !adt.is_enum() {
784            let fields =
785                self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
786            self.drop_ladder(fields, succ, unwind, dropline)
787        } else {
788            self.open_drop_for_multivariant(adt, args, succ, unwind, dropline)
789        }
790    }
791
792    fn open_drop_for_multivariant(
793        &mut self,
794        adt: ty::AdtDef<'tcx>,
795        args: GenericArgsRef<'tcx>,
796        succ: BasicBlock,
797        unwind: Unwind,
798        dropline: Option<BasicBlock>,
799    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
800        let mut values = Vec::with_capacity(adt.variants().len());
801        let mut normal_blocks = Vec::with_capacity(adt.variants().len());
802        let mut unwind_blocks =
803            if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
804        let mut dropline_blocks =
805            if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
806
807        let mut have_otherwise_with_drop_glue = false;
808        let mut have_otherwise = false;
809        let tcx = self.tcx();
810
811        for (variant_index, discr) in adt.discriminants(tcx) {
812            let variant = &adt.variant(variant_index);
813            let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
814
815            if let Some(variant_path) = subpath {
816                let base_place = tcx.mk_place_elem(
817                    self.place,
818                    ProjectionElem::Downcast(Some(variant.name), variant_index),
819                );
820                let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
821                values.push(discr.val);
822                if let Unwind::To(unwind) = unwind {
823                    // We can't use the half-ladder from the original
824                    // drop ladder, because this breaks the
825                    // "funclet can't have 2 successor funclets"
826                    // requirement from MSVC:
827                    //
828                    //           switch       unwind-switch
829                    //          /      \         /        \
830                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
831                    //         |        |      /             |
832                    //    v1.1-unwind  v2.1-unwind           |
833                    //      ^                                |
834                    //       \-------------------------------/
835                    //
836                    // Create a duplicate half-ladder to avoid that. We
837                    // could technically only do this on MSVC, but I
838                    // I want to minimize the divergence between MSVC
839                    // and non-MSVC.
840
841                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
842                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
843                    let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
844                    let halfladder =
845                        self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields);
846                    unwind_blocks.push(halfladder.last().cloned().unwrap());
847                }
848                let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline);
849                normal_blocks.push(normal);
850                if dropline.is_some() {
851                    dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap());
852                }
853            } else {
854                have_otherwise = true;
855
856                let typing_env = self.elaborator.typing_env();
857                let have_field_with_drop_glue = variant
858                    .fields
859                    .iter()
860                    .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
861                if have_field_with_drop_glue {
862                    have_otherwise_with_drop_glue = true;
863                }
864            }
865        }
866
867        if !have_otherwise {
868            values.pop();
869        } else if !have_otherwise_with_drop_glue {
870            normal_blocks.push(self.goto_block(succ, unwind));
871            if let Unwind::To(unwind) = unwind {
872                unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
873            }
874        } else {
875            normal_blocks.push(self.drop_block(succ, unwind));
876            if let Unwind::To(unwind) = unwind {
877                unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
878            }
879        }
880
881        (
882            self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
883            unwind.map(|unwind| {
884                self.adt_switch_block(
885                    adt,
886                    unwind_blocks.unwrap(),
887                    &values,
888                    unwind,
889                    Unwind::InCleanup,
890                )
891            }),
892            dropline.map(|dropline| {
893                self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind)
894            }),
895        )
896    }
897
898    fn adt_switch_block(
899        &mut self,
900        adt: ty::AdtDef<'tcx>,
901        blocks: Vec<BasicBlock>,
902        values: &[u128],
903        succ: BasicBlock,
904        unwind: Unwind,
905    ) -> BasicBlock {
906        // If there are multiple variants, then if something
907        // is present within the enum the discriminant, tracked
908        // by the rest path, must be initialized.
909        //
910        // Additionally, we do not want to switch on the
911        // discriminant after it is free-ed, because that
912        // way lies only trouble.
913        let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
914        let discr = Place::from(self.new_temp(discr_ty));
915        let discr_rv = Rvalue::Discriminant(self.place);
916        let switch_block = BasicBlockData {
917            statements: vec![self.assign(discr, discr_rv)],
918            terminator: Some(Terminator {
919                source_info: self.source_info,
920                kind: TerminatorKind::SwitchInt {
921                    discr: Operand::Move(discr),
922                    targets: SwitchTargets::new(
923                        values.iter().copied().zip(blocks.iter().copied()),
924                        *blocks.last().unwrap(),
925                    ),
926                },
927            }),
928            is_cleanup: unwind.is_cleanup(),
929        };
930        let switch_block = self.elaborator.patch().new_block(switch_block);
931        self.drop_flag_test_block(switch_block, succ, unwind)
932    }
933
934    fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
935        debug!("destructor_call_block_sync({:?}, {:?})", self, succ);
936        let tcx = self.tcx();
937        let drop_trait = tcx.require_lang_item(LangItem::Drop, DUMMY_SP);
938        let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
939        let ty = self.place_ty(self.place);
940
941        let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
942        let ref_place = self.new_temp(ref_ty);
943        let unit_temp = Place::from(self.new_temp(tcx.types.unit));
944
945        let result = BasicBlockData {
946            statements: vec![self.assign(
947                Place::from(ref_place),
948                Rvalue::Ref(
949                    tcx.lifetimes.re_erased,
950                    BorrowKind::Mut { kind: MutBorrowKind::Default },
951                    self.place,
952                ),
953            )],
954            terminator: Some(Terminator {
955                kind: TerminatorKind::Call {
956                    func: Operand::function_handle(
957                        tcx,
958                        drop_fn,
959                        [ty.into()],
960                        self.source_info.span,
961                    ),
962                    args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
963                        .into(),
964                    destination: unit_temp,
965                    target: Some(succ),
966                    unwind: unwind.into_action(),
967                    call_source: CallSource::Misc,
968                    fn_span: self.source_info.span,
969                },
970                source_info: self.source_info,
971            }),
972            is_cleanup: unwind.is_cleanup(),
973        };
974
975        let destructor_block = self.elaborator.patch().new_block(result);
976
977        let block_start = Location { block: destructor_block, statement_index: 0 };
978        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
979
980        self.drop_flag_test_block(destructor_block, succ, unwind)
981    }
982
983    fn destructor_call_block(
984        &mut self,
985        (succ, unwind, dropline): (BasicBlock, Unwind, Option<BasicBlock>),
986    ) -> BasicBlock {
987        debug!("destructor_call_block({:?}, {:?})", self, succ);
988        let ty = self.place_ty(self.place);
989        if self.tcx().features().async_drop()
990            && self.elaborator.body().coroutine.is_some()
991            && self.elaborator.allow_async_drops()
992            && !unwind.is_cleanup()
993            && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
994        {
995            let destructor_block =
996                self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
997
998            let block_start = Location { block: destructor_block, statement_index: 0 };
999            self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
1000
1001            self.drop_flag_test_block(destructor_block, succ, unwind)
1002        } else {
1003            self.destructor_call_block_sync((succ, unwind))
1004        }
1005    }
1006
1007    /// Create a loop that drops an array:
1008    ///
1009    /// ```text
1010    /// loop-block:
1011    ///    can_go = cur == len
1012    ///    if can_go then succ else drop-block
1013    /// drop-block:
1014    ///    ptr = &raw mut P[cur]
1015    ///    cur = cur + 1
1016    ///    drop(ptr)
1017    /// ```
1018    fn drop_loop(
1019        &mut self,
1020        succ: BasicBlock,
1021        cur: Local,
1022        len: Local,
1023        ety: Ty<'tcx>,
1024        unwind: Unwind,
1025        dropline: Option<BasicBlock>,
1026    ) -> BasicBlock {
1027        let copy = |place: Place<'tcx>| Operand::Copy(place);
1028        let move_ = |place: Place<'tcx>| Operand::Move(place);
1029        let tcx = self.tcx();
1030
1031        let ptr_ty = Ty::new_mut_ptr(tcx, ety);
1032        let ptr = Place::from(self.new_temp(ptr_ty));
1033        let can_go = Place::from(self.new_temp(tcx.types.bool));
1034        let one = self.constant_usize(1);
1035
1036        let drop_block = BasicBlockData {
1037            statements: vec![
1038                self.assign(
1039                    ptr,
1040                    Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
1041                ),
1042                self.assign(
1043                    cur.into(),
1044                    Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
1045                ),
1046            ],
1047            is_cleanup: unwind.is_cleanup(),
1048            terminator: Some(Terminator {
1049                source_info: self.source_info,
1050                // this gets overwritten by drop elaboration.
1051                kind: TerminatorKind::Unreachable,
1052            }),
1053        };
1054        let drop_block = self.elaborator.patch().new_block(drop_block);
1055
1056        let loop_block = BasicBlockData {
1057            statements: vec![self.assign(
1058                can_go,
1059                Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
1060            )],
1061            is_cleanup: unwind.is_cleanup(),
1062            terminator: Some(Terminator {
1063                source_info: self.source_info,
1064                kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
1065            }),
1066        };
1067        let loop_block = self.elaborator.patch().new_block(loop_block);
1068
1069        let place = tcx.mk_place_deref(ptr);
1070        if self.tcx().features().async_drop()
1071            && self.elaborator.body().coroutine.is_some()
1072            && self.elaborator.allow_async_drops()
1073            && !unwind.is_cleanup()
1074            && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1075        {
1076            self.build_async_drop(
1077                place,
1078                ety,
1079                Some(drop_block),
1080                loop_block,
1081                unwind,
1082                dropline,
1083                false,
1084            );
1085        } else {
1086            self.elaborator.patch().patch_terminator(
1087                drop_block,
1088                TerminatorKind::Drop {
1089                    place,
1090                    target: loop_block,
1091                    unwind: unwind.into_action(),
1092                    replace: false,
1093                    drop: None,
1094                    async_fut: None,
1095                },
1096            );
1097        }
1098        loop_block
1099    }
1100
1101    fn open_drop_for_array(
1102        &mut self,
1103        array_ty: Ty<'tcx>,
1104        ety: Ty<'tcx>,
1105        opt_size: Option<u64>,
1106    ) -> BasicBlock {
1107        debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
1108        let tcx = self.tcx();
1109
1110        if let Some(size) = opt_size {
1111            enum ProjectionKind<Path> {
1112                Drop(std::ops::Range<u64>),
1113                Keep(u64, Path),
1114            }
1115            // Previously, we'd make a projection for every element in the array and create a drop
1116            // ladder if any `array_subpath` was `Some`, i.e. moving out with an array pattern.
1117            // This caused huge memory usage when generating the drops for large arrays, so we instead
1118            // record the *subslices* which are dropped and the *indexes* which are kept
1119            let mut drop_ranges = vec![];
1120            let mut dropping = true;
1121            let mut start = 0;
1122            for i in 0..size {
1123                let path = self.elaborator.array_subpath(self.path, i, size);
1124                if dropping && path.is_some() {
1125                    drop_ranges.push(ProjectionKind::Drop(start..i));
1126                    dropping = false;
1127                } else if !dropping && path.is_none() {
1128                    dropping = true;
1129                    start = i;
1130                }
1131                if let Some(path) = path {
1132                    drop_ranges.push(ProjectionKind::Keep(i, path));
1133                }
1134            }
1135            if !drop_ranges.is_empty() {
1136                if dropping {
1137                    drop_ranges.push(ProjectionKind::Drop(start..size));
1138                }
1139                let fields = drop_ranges
1140                    .iter()
1141                    .rev()
1142                    .map(|p| {
1143                        let (project, path) = match p {
1144                            ProjectionKind::Drop(r) => (
1145                                ProjectionElem::Subslice {
1146                                    from: r.start,
1147                                    to: r.end,
1148                                    from_end: false,
1149                                },
1150                                None,
1151                            ),
1152                            &ProjectionKind::Keep(offset, path) => (
1153                                ProjectionElem::ConstantIndex {
1154                                    offset,
1155                                    min_length: size,
1156                                    from_end: false,
1157                                },
1158                                Some(path),
1159                            ),
1160                        };
1161                        (tcx.mk_place_elem(self.place, project), path)
1162                    })
1163                    .collect::<Vec<_>>();
1164                let (succ, unwind, dropline) = self.drop_ladder_bottom();
1165                return self.drop_ladder(fields, succ, unwind, dropline).0;
1166            }
1167        }
1168
1169        let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
1170        let array_ptr = self.new_temp(array_ptr_ty);
1171
1172        let slice_ty = Ty::new_slice(tcx, ety);
1173        let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
1174        let slice_ptr = self.new_temp(slice_ptr_ty);
1175
1176        let mut delegate_block = BasicBlockData {
1177            statements: vec![
1178                self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
1179                self.assign(
1180                    Place::from(slice_ptr),
1181                    Rvalue::Cast(
1182                        CastKind::PointerCoercion(
1183                            PointerCoercion::Unsize,
1184                            CoercionSource::Implicit,
1185                        ),
1186                        Operand::Move(Place::from(array_ptr)),
1187                        slice_ptr_ty,
1188                    ),
1189                ),
1190            ],
1191            is_cleanup: self.unwind.is_cleanup(),
1192            terminator: None,
1193        };
1194
1195        let array_place = mem::replace(
1196            &mut self.place,
1197            Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
1198        );
1199        let slice_block = self.drop_loop_trio_for_slice(ety);
1200        self.place = array_place;
1201
1202        delegate_block.terminator = Some(Terminator {
1203            source_info: self.source_info,
1204            kind: TerminatorKind::Goto { target: slice_block },
1205        });
1206        self.elaborator.patch().new_block(delegate_block)
1207    }
1208
1209    /// Creates a trio of drop-loops of `place`, which drops its contents, even
1210    /// in the case of 1 panic or in the case of coroutine drop
1211    fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
1212        debug!("drop_loop_trio_for_slice({:?})", ety);
1213        let tcx = self.tcx();
1214        let len = self.new_temp(tcx.types.usize);
1215        let cur = self.new_temp(tcx.types.usize);
1216
1217        let unwind = self
1218            .unwind
1219            .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None));
1220
1221        let dropline =
1222            self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None));
1223
1224        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline);
1225
1226        let [PlaceElem::Deref] = self.place.projection.as_slice() else {
1227            span_bug!(
1228                self.source_info.span,
1229                "Expected place for slice drop shim to be *_n, but it's {:?}",
1230                self.place,
1231            );
1232        };
1233
1234        let zero = self.constant_usize(0);
1235        let block = BasicBlockData {
1236            statements: vec![
1237                self.assign(
1238                    len.into(),
1239                    Rvalue::UnaryOp(
1240                        UnOp::PtrMetadata,
1241                        Operand::Copy(Place::from(self.place.local)),
1242                    ),
1243                ),
1244                self.assign(cur.into(), Rvalue::Use(zero)),
1245            ],
1246            is_cleanup: unwind.is_cleanup(),
1247            terminator: Some(Terminator {
1248                source_info: self.source_info,
1249                kind: TerminatorKind::Goto { target: loop_block },
1250            }),
1251        };
1252
1253        let drop_block = self.elaborator.patch().new_block(block);
1254        // FIXME(#34708): handle partially-dropped array/slice elements.
1255        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
1256        self.drop_flag_test_block(reset_block, self.succ, unwind)
1257    }
1258
1259    /// The slow-path - create an "open", elaborated drop for a type
1260    /// which is moved-out-of only partially, and patch `bb` to a jump
1261    /// to it. This must not be called on ADTs with a destructor,
1262    /// as these can't be moved-out-of, except for `Box<T>`, which is
1263    /// special-cased.
1264    ///
1265    /// This creates a "drop ladder" that drops the needed fields of the
1266    /// ADT, both in the success case or if one of the destructors fail.
1267    fn open_drop(&mut self) -> BasicBlock {
1268        let ty = self.place_ty(self.place);
1269        match ty.kind() {
1270            ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
1271            ty::CoroutineClosure(_, args) => {
1272                self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
1273            }
1274            // Note that `elaborate_drops` only drops the upvars of a coroutine,
1275            // and this is ok because `open_drop` here can only be reached
1276            // within that own coroutine's resume function.
1277            // This should only happen for the self argument on the resume function.
1278            // It effectively only contains upvars until the coroutine transformation runs.
1279            // See librustc_body/transform/coroutine.rs for more details.
1280            ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
1281            ty::Tuple(fields) => self.open_drop_for_tuple(fields),
1282            ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
1283            ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
1284            ty::Array(ety, size) => {
1285                let size = size.try_to_target_usize(self.tcx());
1286                self.open_drop_for_array(ty, *ety, size)
1287            }
1288            ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety),
1289
1290            ty::UnsafeBinder(_) => {
1291                // Unsafe binders may elaborate drops if their inner type isn't copy.
1292                // This is enforced in typeck, so this should never happen.
1293                self.tcx().dcx().span_delayed_bug(
1294                    self.source_info.span,
1295                    "open drop for unsafe binder shouldn't be encountered",
1296                );
1297                self.elaborator.patch().new_block(BasicBlockData {
1298                    statements: vec![],
1299                    terminator: Some(Terminator {
1300                        source_info: self.source_info,
1301                        kind: TerminatorKind::Unreachable,
1302                    }),
1303                    is_cleanup: self.unwind.is_cleanup(),
1304                })
1305            }
1306
1307            _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
1308        }
1309    }
1310
1311    fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
1312        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
1313
1314        let drop_block = self.drop_block(succ, unwind);
1315
1316        self.drop_flag_test_block(drop_block, succ, unwind)
1317    }
1318
1319    /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
1320    /// also be cleared.
1321    fn drop_flag_reset_block(
1322        &mut self,
1323        mode: DropFlagMode,
1324        succ: BasicBlock,
1325        unwind: Unwind,
1326    ) -> BasicBlock {
1327        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
1328
1329        if unwind.is_cleanup() {
1330            // The drop flag isn't read again on the unwind path, so don't
1331            // bother setting it.
1332            return succ;
1333        }
1334        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
1335        let block_start = Location { block, statement_index: 0 };
1336        self.elaborator.clear_drop_flag(block_start, self.path, mode);
1337        block
1338    }
1339
1340    fn elaborated_drop_block(&mut self) -> BasicBlock {
1341        debug!("elaborated_drop_block({:?})", self);
1342        let blk = self.drop_block_simple(self.succ, self.unwind);
1343        self.elaborate_drop(blk);
1344        blk
1345    }
1346
1347    fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1348        let block = TerminatorKind::Drop {
1349            place: self.place,
1350            target,
1351            unwind: unwind.into_action(),
1352            replace: false,
1353            drop: self.dropline,
1354            async_fut: None,
1355        };
1356        self.new_block(unwind, block)
1357    }
1358
1359    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1360        let drop_ty = self.place_ty(self.place);
1361        if self.tcx().features().async_drop()
1362            && self.elaborator.body().coroutine.is_some()
1363            && self.elaborator.allow_async_drops()
1364            && !unwind.is_cleanup()
1365            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1366        {
1367            self.build_async_drop(
1368                self.place,
1369                drop_ty,
1370                None,
1371                self.succ,
1372                unwind,
1373                self.dropline,
1374                false,
1375            )
1376        } else {
1377            let block = TerminatorKind::Drop {
1378                place: self.place,
1379                target,
1380                unwind: unwind.into_action(),
1381                replace: false,
1382                drop: None,
1383                async_fut: None,
1384            };
1385            self.new_block(unwind, block)
1386        }
1387    }
1388
1389    fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1390        let block = TerminatorKind::Goto { target };
1391        self.new_block(unwind, block)
1392    }
1393
1394    /// Returns the block to jump to in order to test the drop flag and execute the drop.
1395    ///
1396    /// Depending on the required `DropStyle`, this might be a generated block with an `if`
1397    /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
1398    /// the drop can be statically determined.
1399    fn drop_flag_test_block(
1400        &mut self,
1401        on_set: BasicBlock,
1402        on_unset: BasicBlock,
1403        unwind: Unwind,
1404    ) -> BasicBlock {
1405        let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1406        debug!(
1407            "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1408            self, on_set, on_unset, unwind, style
1409        );
1410
1411        match style {
1412            DropStyle::Dead => on_unset,
1413            DropStyle::Static => on_set,
1414            DropStyle::Conditional | DropStyle::Open => {
1415                let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1416                let term = TerminatorKind::if_(flag, on_set, on_unset);
1417                self.new_block(unwind, term)
1418            }
1419        }
1420    }
1421
1422    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1423        self.elaborator.patch().new_block(BasicBlockData {
1424            statements: vec![],
1425            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1426            is_cleanup: unwind.is_cleanup(),
1427        })
1428    }
1429
1430    fn new_block_with_statements(
1431        &mut self,
1432        unwind: Unwind,
1433        statements: Vec<Statement<'tcx>>,
1434        k: TerminatorKind<'tcx>,
1435    ) -> BasicBlock {
1436        self.elaborator.patch().new_block(BasicBlockData {
1437            statements,
1438            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1439            is_cleanup: unwind.is_cleanup(),
1440        })
1441    }
1442
1443    fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1444        self.elaborator.patch().new_temp(ty, self.source_info.span)
1445    }
1446
1447    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1448        Operand::Constant(Box::new(ConstOperand {
1449            span: self.source_info.span,
1450            user_ty: None,
1451            const_: Const::from_usize(self.tcx(), val.into()),
1452        }))
1453    }
1454
1455    fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1456        Statement {
1457            source_info: self.source_info,
1458            kind: StatementKind::Assign(Box::new((lhs, rhs))),
1459        }
1460    }
1461}