Skip to content

Commit 29a56a3

Browse files
committed
Auto merge of #122053 - erikdesjardins:alloca, r=nikic
Stop using LLVM struct types for alloca The alloca type has no semantic meaning, only the size (and alignment, but we specify it explicitly) matter. Using `[N x i8]` is a more direct way to specify that we want `N` bytes, and avoids relying on LLVM's struct layout. It is likely that a future LLVM version will change to an untyped alloca representation. Split out from #121577. r? `@ghost`
2 parents c1feb3e + 6df27ef commit 29a56a3

36 files changed

+234
-226
lines changed

compiler/rustc_codegen_gcc/src/builder.rs

+4-10
Original file line numberDiff line numberDiff line change
@@ -898,26 +898,20 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
898898
self.gcc_checked_binop(oop, typ, lhs, rhs)
899899
}
900900

901-
fn alloca(&mut self, ty: Type<'gcc>, align: Align) -> RValue<'gcc> {
902-
// FIXME(antoyo): this check that we don't call get_aligned() a second time on a type.
903-
// Ideally, we shouldn't need to do this check.
904-
let aligned_type = if ty == self.cx.u128_type || ty == self.cx.i128_type {
905-
ty
906-
} else {
907-
ty.get_aligned(align.bytes())
908-
};
901+
fn alloca(&mut self, size: Size, align: Align) -> RValue<'gcc> {
902+
let ty = self.cx.type_array(self.cx.type_i8(), size.bytes()).get_aligned(align.bytes());
909903
// TODO(antoyo): It might be better to return a LValue, but fixing the rustc API is non-trivial.
910904
self.stack_var_count.set(self.stack_var_count.get() + 1);
911905
self.current_func()
912906
.new_local(
913907
self.location,
914-
aligned_type,
908+
ty,
915909
&format!("stack_var_{}", self.stack_var_count.get()),
916910
)
917911
.get_address(self.location)
918912
}
919913

920-
fn byte_array_alloca(&mut self, _len: RValue<'gcc>, _align: Align) -> RValue<'gcc> {
914+
fn dynamic_alloca(&mut self, _len: RValue<'gcc>, _align: Align) -> RValue<'gcc> {
921915
unimplemented!();
922916
}
923917

compiler/rustc_codegen_gcc/src/intrinsic/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -531,7 +531,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
531531
// We instead thus allocate some scratch space...
532532
let scratch_size = cast.size(bx);
533533
let scratch_align = cast.align(bx);
534-
let llscratch = bx.alloca(cast.gcc_type(bx), scratch_align);
534+
let llscratch = bx.alloca(scratch_size, scratch_align);
535535
bx.lifetime_start(llscratch, scratch_size);
536536

537537
// ... where we first store the value...

compiler/rustc_codegen_gcc/src/intrinsic/simd.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use rustc_middle::span_bug;
1818
use rustc_middle::ty::layout::HasTyCtxt;
1919
use rustc_middle::ty::{self, Ty};
2020
use rustc_span::{sym, Span, Symbol};
21-
use rustc_target::abi::Align;
21+
use rustc_target::abi::{Align, Size};
2222

2323
use crate::builder::Builder;
2424
#[cfg(not(feature = "master"))]
@@ -558,7 +558,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
558558
let ze = bx.zext(result, bx.type_ix(expected_bytes * 8));
559559

560560
// Convert the integer to a byte array
561-
let ptr = bx.alloca(bx.type_ix(expected_bytes * 8), Align::ONE);
561+
let ptr = bx.alloca(Size::from_bytes(expected_bytes), Align::ONE);
562562
bx.store(ze, ptr, Align::ONE);
563563
let array_ty = bx.type_array(bx.type_i8(), expected_bytes);
564564
let ptr = bx.pointercast(ptr, bx.cx.type_ptr_to(array_ty));

compiler/rustc_codegen_llvm/src/abi.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
227227
// when passed by value, making it larger.
228228
let copy_bytes = cmp::min(scratch_size.bytes(), self.layout.size.bytes());
229229
// Allocate some scratch space...
230-
let llscratch = bx.alloca(cast.llvm_type(bx), scratch_align);
230+
let llscratch = bx.alloca(scratch_size, scratch_align);
231231
bx.lifetime_start(llscratch, scratch_size);
232232
// ...store the value...
233233
bx.store(val, llscratch, scratch_align);

compiler/rustc_codegen_llvm/src/builder.rs

+4-3
Original file line numberDiff line numberDiff line change
@@ -468,20 +468,21 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
468468
val
469469
}
470470

471-
fn alloca(&mut self, ty: &'ll Type, align: Align) -> &'ll Value {
471+
fn alloca(&mut self, size: Size, align: Align) -> &'ll Value {
472472
let mut bx = Builder::with_cx(self.cx);
473473
bx.position_at_start(unsafe { llvm::LLVMGetFirstBasicBlock(self.llfn()) });
474+
let ty = self.cx().type_array(self.cx().type_i8(), size.bytes());
474475
unsafe {
475476
let alloca = llvm::LLVMBuildAlloca(bx.llbuilder, ty, UNNAMED);
476477
llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
477478
alloca
478479
}
479480
}
480481

481-
fn byte_array_alloca(&mut self, len: &'ll Value, align: Align) -> &'ll Value {
482+
fn dynamic_alloca(&mut self, size: &'ll Value, align: Align) -> &'ll Value {
482483
unsafe {
483484
let alloca =
484-
llvm::LLVMBuildArrayAlloca(self.llbuilder, self.cx().type_i8(), len, UNNAMED);
485+
llvm::LLVMBuildArrayAlloca(self.llbuilder, self.cx().type_i8(), size, UNNAMED);
485486
llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
486487
alloca
487488
}

compiler/rustc_codegen_llvm/src/intrinsic.rs

+10-10
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, LayoutOf};
1818
use rustc_middle::ty::{self, GenericArgsRef, Ty};
1919
use rustc_middle::{bug, span_bug};
2020
use rustc_span::{sym, Span, Symbol};
21-
use rustc_target::abi::{self, Align, HasDataLayout, Primitive};
21+
use rustc_target::abi::{self, Align, HasDataLayout, Primitive, Size};
2222
use rustc_target::spec::{HasTargetSpec, PanicStrategy};
2323

2424
use std::cmp::Ordering;
@@ -649,8 +649,9 @@ fn codegen_msvc_try<'ll>(
649649
// }
650650
//
651651
// More information can be found in libstd's seh.rs implementation.
652+
let ptr_size = bx.tcx().data_layout.pointer_size;
652653
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
653-
let slot = bx.alloca(bx.type_ptr(), ptr_align);
654+
let slot = bx.alloca(ptr_size, ptr_align);
654655
let try_func_ty = bx.type_func(&[bx.type_ptr()], bx.type_void());
655656
bx.invoke(try_func_ty, None, None, try_func, &[data], normal, catchswitch, None, None);
656657

@@ -920,15 +921,14 @@ fn codegen_emcc_try<'ll>(
920921

921922
// We need to pass two values to catch_func (ptr and is_rust_panic), so
922923
// create an alloca and pass a pointer to that.
924+
let ptr_size = bx.tcx().data_layout.pointer_size;
923925
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
924926
let i8_align = bx.tcx().data_layout.i8_align.abi;
925-
let catch_data_type = bx.type_struct(&[bx.type_ptr(), bx.type_bool()], false);
926-
let catch_data = bx.alloca(catch_data_type, ptr_align);
927-
let catch_data_0 =
928-
bx.inbounds_gep(catch_data_type, catch_data, &[bx.const_usize(0), bx.const_usize(0)]);
929-
bx.store(ptr, catch_data_0, ptr_align);
930-
let catch_data_1 =
931-
bx.inbounds_gep(catch_data_type, catch_data, &[bx.const_usize(0), bx.const_usize(1)]);
927+
// Required in order for there to be no padding between the fields.
928+
assert!(i8_align <= ptr_align);
929+
let catch_data = bx.alloca(2 * ptr_size, ptr_align);
930+
bx.store(ptr, catch_data, ptr_align);
931+
let catch_data_1 = bx.inbounds_ptradd(catch_data, bx.const_usize(ptr_size.bytes()));
932932
bx.store(is_rust_panic, catch_data_1, i8_align);
933933

934934
let catch_ty = bx.type_func(&[bx.type_ptr(), bx.type_ptr()], bx.type_void());
@@ -1374,7 +1374,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
13741374
let ze = bx.zext(i_, bx.type_ix(expected_bytes * 8));
13751375

13761376
// Convert the integer to a byte array
1377-
let ptr = bx.alloca(bx.type_ix(expected_bytes * 8), Align::ONE);
1377+
let ptr = bx.alloca(Size::from_bytes(expected_bytes), Align::ONE);
13781378
bx.store(ze, ptr, Align::ONE);
13791379
let array_ty = bx.type_array(bx.type_i8(), expected_bytes);
13801380
return Ok(bx.load(array_ty, ptr, Align::ONE));

compiler/rustc_codegen_ssa/src/base.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,7 @@ fn get_argc_argv<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
508508
let ptr_size = bx.tcx().data_layout.pointer_size;
509509
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
510510
let arg_argc = bx.const_int(cx.type_isize(), 2);
511-
let arg_argv = bx.alloca(cx.type_array(cx.type_ptr(), 2), ptr_align);
511+
let arg_argv = bx.alloca(2 * ptr_size, ptr_align);
512512
bx.store(param_handle, arg_argv, ptr_align);
513513
let arg_argv_el1 = bx.inbounds_ptradd(arg_argv, bx.const_usize(ptr_size.bytes()));
514514
bx.store(param_system_table, arg_argv_el1, ptr_align);

compiler/rustc_codegen_ssa/src/mir/block.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1517,7 +1517,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
15171517
// when passed by value, making it larger.
15181518
let copy_bytes = cmp::min(scratch_size.bytes(), arg.layout.size.bytes());
15191519
// Allocate some scratch space...
1520-
let llscratch = bx.alloca(bx.cast_backend_type(cast), scratch_align);
1520+
let llscratch = bx.alloca(scratch_size, scratch_align);
15211521
bx.lifetime_start(llscratch, scratch_size);
15221522
// ...memcpy the value...
15231523
bx.memcpy(

compiler/rustc_codegen_ssa/src/mir/operand.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -327,7 +327,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
327327
let llfield_ty = bx.cx().backend_type(field);
328328

329329
// Can't bitcast an aggregate, so round trip through memory.
330-
let llptr = bx.alloca(llfield_ty, field.align.abi);
330+
let llptr = bx.alloca(field.size, field.align.abi);
331331
bx.store(*llval, llptr, field.align.abi);
332332
*llval = bx.load(llfield_ty, llptr, field.align.abi);
333333
}
@@ -470,7 +470,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
470470
let align_minus_1 = bx.sub(align, one);
471471
let size_extra = bx.add(size, align_minus_1);
472472
let min_align = Align::ONE;
473-
let alloca = bx.byte_array_alloca(size_extra, min_align);
473+
let alloca = bx.dynamic_alloca(size_extra, min_align);
474474
let address = bx.ptrtoint(alloca, bx.type_isize());
475475
let neg_address = bx.neg(address);
476476
let offset = bx.and(neg_address, align_minus_1);

compiler/rustc_codegen_ssa/src/mir/place.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
8181
align: Align,
8282
) -> Self {
8383
assert!(layout.is_sized(), "tried to statically allocate unsized place");
84-
let tmp = bx.alloca(bx.cx().backend_type(layout), align);
84+
let tmp = bx.alloca(layout.size, align);
8585
Self::new_sized_aligned(tmp, layout, align)
8686
}
8787

compiler/rustc_codegen_ssa/src/traits/builder.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -144,8 +144,8 @@ pub trait BuilderMethods<'a, 'tcx>:
144144
}
145145
fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
146146

147-
fn alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value;
148-
fn byte_array_alloca(&mut self, len: Self::Value, align: Align) -> Self::Value;
147+
fn alloca(&mut self, size: Size, align: Align) -> Self::Value;
148+
fn dynamic_alloca(&mut self, size: Self::Value, align: Align) -> Self::Value;
149149

150150
fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
151151
fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;

tests/assembly/stack-protector/stack-protector-heuristics-effect-windows-32bit.rs

+8-22
Original file line numberDiff line numberDiff line change
@@ -37,23 +37,9 @@ pub fn array_char(f: fn(*const char)) {
3737
f(&b as *const _);
3838
f(&c as *const _);
3939

40-
// Any type of local array variable leads to stack protection with the
41-
// "strong" heuristic. The 'basic' heuristic only adds stack protection to
42-
// functions with local array variables of a byte-sized type, however. Since
43-
// 'char' is 4 bytes in Rust, this function is not protected by the 'basic'
44-
// heuristic
45-
//
46-
// (This test *also* takes the address of the local stack variables. We
47-
// cannot know that this isn't what triggers the `strong` heuristic.
48-
// However, the test strategy of passing the address of a stack array to an
49-
// external function is sufficient to trigger the `basic` heuristic (see
50-
// test `array_u8_large()`). Since the `basic` heuristic only checks for the
51-
// presence of stack-local array variables, we can be confident that this
52-
// test also captures this part of the `strong` heuristic specification.)
53-
5440
// all: __security_check_cookie
5541
// strong: __security_check_cookie
56-
// basic-NOT: __security_check_cookie
42+
// basic: __security_check_cookie
5743
// none-NOT: __security_check_cookie
5844
// missing-NOT: __security_check_cookie
5945
}
@@ -231,8 +217,8 @@ pub fn local_large_var_moved(f: fn(Gigastruct)) {
231217
// Even though the local variable conceptually doesn't have its address
232218
// taken, it's so large that the "move" is implemented with a reference to a
233219
// stack-local variable in the ABI. Consequently, this function *is*
234-
// protected by the `strong` heuristic. This is also the case for
235-
// rvalue-references in C++, regardless of struct size:
220+
// protected. This is also the case for rvalue-references in C++,
221+
// regardless of struct size:
236222
// ```
237223
// cat <<EOF | clang++ -O2 -fstack-protector-strong -S -x c++ - -o - | grep stack_chk
238224
// #include <cstdint>
@@ -246,7 +232,7 @@ pub fn local_large_var_moved(f: fn(Gigastruct)) {
246232

247233
// all: __security_check_cookie
248234
// strong: __security_check_cookie
249-
// basic-NOT: __security_check_cookie
235+
// basic: __security_check_cookie
250236
// none-NOT: __security_check_cookie
251237
// missing-NOT: __security_check_cookie
252238
}
@@ -259,9 +245,9 @@ pub fn local_large_var_cloned(f: fn(Gigastruct)) {
259245
// A new instance of `Gigastruct` is passed to `f()`, without any apparent
260246
// connection to this stack frame. Still, since instances of `Gigastruct`
261247
// are sufficiently large, it is allocated in the caller stack frame and
262-
// passed as a pointer. As such, this function is *also* protected by the
263-
// `strong` heuristic, just like `local_large_var_moved`. This is also the
264-
// case for pass-by-value of sufficiently large structs in C++:
248+
// passed as a pointer. As such, this function is *also* protected, just
249+
// like `local_large_var_moved`. This is also the case for pass-by-value
250+
// of sufficiently large structs in C++:
265251
// ```
266252
// cat <<EOF | clang++ -O2 -fstack-protector-strong -S -x c++ - -o - | grep stack_chk
267253
// #include <cstdint>
@@ -276,7 +262,7 @@ pub fn local_large_var_cloned(f: fn(Gigastruct)) {
276262

277263
// all: __security_check_cookie
278264
// strong: __security_check_cookie
279-
// basic-NOT: __security_check_cookie
265+
// basic: __security_check_cookie
280266
// none-NOT: __security_check_cookie
281267
// missing-NOT: __security_check_cookie
282268
}

tests/assembly/stack-protector/stack-protector-heuristics-effect-windows-64bit.rs

+8-22
Original file line numberDiff line numberDiff line change
@@ -37,23 +37,9 @@ pub fn array_char(f: fn(*const char)) {
3737
f(&b as *const _);
3838
f(&c as *const _);
3939

40-
// Any type of local array variable leads to stack protection with the
41-
// "strong" heuristic. The 'basic' heuristic only adds stack protection to
42-
// functions with local array variables of a byte-sized type, however. Since
43-
// 'char' is 4 bytes in Rust, this function is not protected by the 'basic'
44-
// heuristic
45-
//
46-
// (This test *also* takes the address of the local stack variables. We
47-
// cannot know that this isn't what triggers the `strong` heuristic.
48-
// However, the test strategy of passing the address of a stack array to an
49-
// external function is sufficient to trigger the `basic` heuristic (see
50-
// test `array_u8_large()`). Since the `basic` heuristic only checks for the
51-
// presence of stack-local array variables, we can be confident that this
52-
// test also captures this part of the `strong` heuristic specification.)
53-
5440
// all: __security_check_cookie
5541
// strong: __security_check_cookie
56-
// basic-NOT: __security_check_cookie
42+
// basic: __security_check_cookie
5743
// none-NOT: __security_check_cookie
5844
// missing-NOT: __security_check_cookie
5945
}
@@ -239,8 +225,8 @@ pub fn local_large_var_moved(f: fn(Gigastruct)) {
239225
// Even though the local variable conceptually doesn't have its address
240226
// taken, it's so large that the "move" is implemented with a reference to a
241227
// stack-local variable in the ABI. Consequently, this function *is*
242-
// protected by the `strong` heuristic. This is also the case for
243-
// rvalue-references in C++, regardless of struct size:
228+
// protected. This is also the case for rvalue-references in C++,
229+
// regardless of struct size:
244230
// ```
245231
// cat <<EOF | clang++ -O2 -fstack-protector-strong -S -x c++ - -o - | grep stack_chk
246232
// #include <cstdint>
@@ -254,7 +240,7 @@ pub fn local_large_var_moved(f: fn(Gigastruct)) {
254240

255241
// all: __security_check_cookie
256242
// strong: __security_check_cookie
257-
// basic-NOT: __security_check_cookie
243+
// basic: __security_check_cookie
258244
// none-NOT: __security_check_cookie
259245
// missing-NOT: __security_check_cookie
260246
}
@@ -267,9 +253,9 @@ pub fn local_large_var_cloned(f: fn(Gigastruct)) {
267253
// A new instance of `Gigastruct` is passed to `f()`, without any apparent
268254
// connection to this stack frame. Still, since instances of `Gigastruct`
269255
// are sufficiently large, it is allocated in the caller stack frame and
270-
// passed as a pointer. As such, this function is *also* protected by the
271-
// `strong` heuristic, just like `local_large_var_moved`. This is also the
272-
// case for pass-by-value of sufficiently large structs in C++:
256+
// passed as a pointer. As such, this function is *also* protected, just
257+
// like `local_large_var_moved`. This is also the case for pass-by-value
258+
// of sufficiently large structs in C++:
273259
// ```
274260
// cat <<EOF | clang++ -O2 -fstack-protector-strong -S -x c++ - -o - | grep stack_chk
275261
// #include <cstdint>
@@ -284,7 +270,7 @@ pub fn local_large_var_cloned(f: fn(Gigastruct)) {
284270

285271
// all: __security_check_cookie
286272
// strong: __security_check_cookie
287-
// basic-NOT: __security_check_cookie
273+
// basic: __security_check_cookie
288274
// none-NOT: __security_check_cookie
289275
// missing-NOT: __security_check_cookie
290276
}

0 commit comments

Comments
 (0)