Skip to content

Commit e1ed8e2

Browse files
authored
Unrolled build for #143672
Rollup merge of #143672 - beepster4096:box_drop_flags_again, r=oli-obk Fix Box allocator drop elaboration New version of #131146. Clearing Box's drop flag after running its destructor can cause it to skip dropping its allocator, so just don't. Its cleared by the drop ladder code afterwards already. Unlike the last PR this also handles other types with destructors properly, in the event that we can have open drops on them in the future (by partial initialization or DerefMove or something). Finally, I also added tests for the interaction with async drop here but I discovered #143658, so one of the tests has a `knownbug` annotation. Not sure if it should be in this PR at all though. Fixes #131082 r? wesleywiser - prev. reviewer
2 parents 32e7a4b + ccc2f78 commit e1ed8e2

File tree

8 files changed

+551
-26
lines changed

8 files changed

+551
-26
lines changed

compiler/rustc_mir_transform/src/elaborate_drop.rs

Lines changed: 28 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -761,24 +761,37 @@ where
761761

762762
let skip_contents = adt.is_union() || adt.is_manually_drop();
763763
let contents_drop = if skip_contents {
764+
if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
765+
// the top-level drop flag is usually cleared by open_drop_for_adt_contents
766+
// types with destructors would still need an empty drop ladder to clear it
767+
768+
// however, these types are only open dropped in `DropShimElaborator`
769+
// which does not have drop flags
770+
// a future box-like "DerefMove" trait would allow for this case to happen
771+
span_bug!(self.source_info.span, "open dropping partially moved union");
772+
}
773+
764774
(self.succ, self.unwind, self.dropline)
765775
} else {
766776
self.open_drop_for_adt_contents(adt, args)
767777
};
768778

769-
if adt.is_box() {
770-
// we need to drop the inside of the box before running the destructor
771-
let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
772-
let unwind = contents_drop
773-
.1
774-
.map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
775-
let dropline = contents_drop
776-
.2
777-
.map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
778-
779-
self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
780-
} else if adt.has_dtor(self.tcx()) {
781-
self.destructor_call_block(contents_drop)
779+
if adt.has_dtor(self.tcx()) {
780+
let destructor_block = if adt.is_box() {
781+
// we need to drop the inside of the box before running the destructor
782+
let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
783+
let unwind = contents_drop
784+
.1
785+
.map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
786+
let dropline = contents_drop
787+
.2
788+
.map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
789+
self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
790+
} else {
791+
self.destructor_call_block(contents_drop)
792+
};
793+
794+
self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
782795
} else {
783796
contents_drop.0
784797
}
@@ -982,12 +995,7 @@ where
982995
unwind.is_cleanup(),
983996
);
984997

985-
let destructor_block = self.elaborator.patch().new_block(result);
986-
987-
let block_start = Location { block: destructor_block, statement_index: 0 };
988-
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
989-
990-
self.drop_flag_test_block(destructor_block, succ, unwind)
998+
self.elaborator.patch().new_block(result)
991999
}
9921000

9931001
fn destructor_call_block(
@@ -1002,13 +1010,7 @@ where
10021010
&& !unwind.is_cleanup()
10031011
&& ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
10041012
{
1005-
let destructor_block =
1006-
self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
1007-
1008-
let block_start = Location { block: destructor_block, statement_index: 0 };
1009-
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
1010-
1011-
self.drop_flag_test_block(destructor_block, succ, unwind)
1013+
self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
10121014
} else {
10131015
self.destructor_call_block_sync((succ, unwind))
10141016
}
Lines changed: 186 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,186 @@
1+
- // MIR for `main` before ElaborateDrops
2+
+ // MIR for `main` after ElaborateDrops
3+
4+
fn main() -> () {
5+
let mut _0: ();
6+
let _1: std::boxed::Box<HasDrop, DropAllocator>;
7+
let mut _2: HasDrop;
8+
let mut _3: DropAllocator;
9+
let mut _4: bool;
10+
let _5: ();
11+
let mut _6: HasDrop;
12+
let _7: ();
13+
let mut _8: std::boxed::Box<HasDrop, DropAllocator>;
14+
+ let mut _9: bool;
15+
+ let mut _10: &mut std::boxed::Box<HasDrop, DropAllocator>;
16+
+ let mut _11: ();
17+
+ let mut _12: &mut std::boxed::Box<HasDrop, DropAllocator>;
18+
+ let mut _13: ();
19+
+ let mut _14: *const HasDrop;
20+
+ let mut _15: &mut std::boxed::Box<HasDrop, DropAllocator>;
21+
+ let mut _16: ();
22+
+ let mut _17: *const HasDrop;
23+
scope 1 {
24+
debug b => _1;
25+
}
26+
27+
bb0: {
28+
+ _9 = const false;
29+
StorageLive(_1);
30+
StorageLive(_2);
31+
_2 = HasDrop;
32+
StorageLive(_3);
33+
_3 = DropAllocator;
34+
_1 = Box::<HasDrop, DropAllocator>::new_in(move _2, move _3) -> [return: bb1, unwind: bb11];
35+
}
36+
37+
bb1: {
38+
+ _9 = const true;
39+
StorageDead(_3);
40+
StorageDead(_2);
41+
StorageLive(_4);
42+
_4 = const true;
43+
switchInt(move _4) -> [0: bb4, otherwise: bb2];
44+
}
45+
46+
bb2: {
47+
StorageLive(_5);
48+
StorageLive(_6);
49+
_6 = move (*_1);
50+
_5 = std::mem::drop::<HasDrop>(move _6) -> [return: bb3, unwind: bb9];
51+
}
52+
53+
bb3: {
54+
StorageDead(_6);
55+
StorageDead(_5);
56+
_0 = const ();
57+
goto -> bb6;
58+
}
59+
60+
bb4: {
61+
StorageLive(_7);
62+
StorageLive(_8);
63+
+ _9 = const false;
64+
_8 = move _1;
65+
_7 = std::mem::drop::<Box<HasDrop, DropAllocator>>(move _8) -> [return: bb5, unwind: bb8];
66+
}
67+
68+
bb5: {
69+
StorageDead(_8);
70+
StorageDead(_7);
71+
_0 = const ();
72+
goto -> bb6;
73+
}
74+
75+
bb6: {
76+
StorageDead(_4);
77+
- drop(_1) -> [return: bb7, unwind continue];
78+
+ goto -> bb23;
79+
}
80+
81+
bb7: {
82+
+ _9 = const false;
83+
StorageDead(_1);
84+
return;
85+
}
86+
87+
bb8 (cleanup): {
88+
- drop(_8) -> [return: bb10, unwind terminate(cleanup)];
89+
+ goto -> bb10;
90+
}
91+
92+
bb9 (cleanup): {
93+
- drop(_6) -> [return: bb10, unwind terminate(cleanup)];
94+
+ goto -> bb10;
95+
}
96+
97+
bb10 (cleanup): {
98+
- drop(_1) -> [return: bb13, unwind terminate(cleanup)];
99+
+ goto -> bb29;
100+
}
101+
102+
bb11 (cleanup): {
103+
- drop(_3) -> [return: bb12, unwind terminate(cleanup)];
104+
+ goto -> bb12;
105+
}
106+
107+
bb12 (cleanup): {
108+
- drop(_2) -> [return: bb13, unwind terminate(cleanup)];
109+
+ goto -> bb13;
110+
}
111+
112+
bb13 (cleanup): {
113+
resume;
114+
+ }
115+
+
116+
+ bb14: {
117+
+ _9 = const false;
118+
+ goto -> bb7;
119+
+ }
120+
+
121+
+ bb15 (cleanup): {
122+
+ drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
123+
+ }
124+
+
125+
+ bb16 (cleanup): {
126+
+ switchInt(copy _9) -> [0: bb13, otherwise: bb15];
127+
+ }
128+
+
129+
+ bb17: {
130+
+ drop((_1.1: DropAllocator)) -> [return: bb14, unwind: bb13];
131+
+ }
132+
+
133+
+ bb18: {
134+
+ switchInt(copy _9) -> [0: bb14, otherwise: bb17];
135+
+ }
136+
+
137+
+ bb19: {
138+
+ _10 = &mut _1;
139+
+ _11 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _10) -> [return: bb18, unwind: bb16];
140+
+ }
141+
+
142+
+ bb20 (cleanup): {
143+
+ _12 = &mut _1;
144+
+ _13 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _12) -> [return: bb16, unwind terminate(cleanup)];
145+
+ }
146+
+
147+
+ bb21: {
148+
+ goto -> bb19;
149+
+ }
150+
+
151+
+ bb22: {
152+
+ _14 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
153+
+ goto -> bb21;
154+
+ }
155+
+
156+
+ bb23: {
157+
+ switchInt(copy _9) -> [0: bb18, otherwise: bb22];
158+
+ }
159+
+
160+
+ bb24 (cleanup): {
161+
+ drop((_1.1: DropAllocator)) -> [return: bb13, unwind terminate(cleanup)];
162+
+ }
163+
+
164+
+ bb25 (cleanup): {
165+
+ switchInt(copy _9) -> [0: bb13, otherwise: bb24];
166+
+ }
167+
+
168+
+ bb26 (cleanup): {
169+
+ _15 = &mut _1;
170+
+ _16 = <Box<HasDrop, DropAllocator> as Drop>::drop(move _15) -> [return: bb25, unwind terminate(cleanup)];
171+
+ }
172+
+
173+
+ bb27 (cleanup): {
174+
+ goto -> bb26;
175+
+ }
176+
+
177+
+ bb28 (cleanup): {
178+
+ _17 = copy ((_1.0: std::ptr::Unique<HasDrop>).0: std::ptr::NonNull<HasDrop>) as *const HasDrop (Transmute);
179+
+ goto -> bb27;
180+
+ }
181+
+
182+
+ bb29 (cleanup): {
183+
+ switchInt(copy _9) -> [0: bb25, otherwise: bb28];
184+
}
185+
}
186+
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
// skip-filecheck
2+
//@ test-mir-pass: ElaborateDrops
3+
//@ needs-unwind
4+
#![feature(allocator_api)]
5+
6+
// Regression test for #131082.
7+
// Testing that the allocator of a Box is dropped in conditional drops
8+
9+
use std::alloc::{AllocError, Allocator, Global, Layout};
10+
use std::ptr::NonNull;
11+
12+
struct DropAllocator;
13+
14+
unsafe impl Allocator for DropAllocator {
15+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
16+
Global.allocate(layout)
17+
}
18+
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
19+
Global.deallocate(ptr, layout);
20+
}
21+
}
22+
impl Drop for DropAllocator {
23+
fn drop(&mut self) {}
24+
}
25+
26+
struct HasDrop;
27+
impl Drop for HasDrop {
28+
fn drop(&mut self) {}
29+
}
30+
31+
// EMIT_MIR box_conditional_drop_allocator.main.ElaborateDrops.diff
32+
fn main() {
33+
let b = Box::new_in(HasDrop, DropAllocator);
34+
if true {
35+
drop(*b);
36+
} else {
37+
drop(b);
38+
}
39+
}

0 commit comments

Comments
 (0)