Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 8 pull requests #123429

Merged
merged 26 commits into from
Apr 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
df93364
Added ability to report on generic argument mismatch better
strottos Feb 25, 2024
8a5245e
Refactored a few bits:
strottos Feb 26, 2024
4498cd6
extend extern tests to include FiveU16s
erikdesjardins Mar 17, 2024
41c6fa8
sparc64: fix crash in ABI code for { f64, f32 } struct
erikdesjardins Mar 17, 2024
74ef47e
make CastTarget::size and CastTarget::llvm_type consistent, remove
erikdesjardins Mar 17, 2024
8841315
make PassMode::Cast consistently copy between Rust/ABI representation
erikdesjardins Mar 17, 2024
8d5fd94
add tests for PassMode::Cast fixes
erikdesjardins Mar 17, 2024
6577aef
Revert "sparc64: fix crash in ABI code for { f64, f32 } struct"
erikdesjardins Mar 17, 2024
dec81ac
disable crashing test on sparc
erikdesjardins Mar 17, 2024
4c0aea0
Move some tests
c410-f3r Mar 31, 2024
8cf2c0d
Improve debugging experience
Nadrieril Mar 29, 2024
db9b4ea
Add tests
Nadrieril Mar 31, 2024
27704c7
Fix union handling in exhaustiveness
Nadrieril Mar 31, 2024
ab821ae
Fix precedence of postfix match
compiler-errors Apr 2, 2024
4cb5643
Fix contains_exterior_struct_lit
compiler-errors Apr 2, 2024
989660c
rename `expose_addr` to `expose_provenance`
joboet Apr 3, 2024
b40ea03
rustc_index: Add a `ZERO` constant to index types
petrochenkov Apr 3, 2024
6edb021
Fix target name in NetBSD platform-support doc
taiki-e Apr 3, 2024
32c8c5c
Rollup merge of #121595 - strottos:issue_116615, r=compiler-errors
matthiaskrgr Apr 3, 2024
bc8415b
Rollup merge of #122619 - erikdesjardins:cast, r=compiler-errors
matthiaskrgr Apr 3, 2024
80d592c
Rollup merge of #122964 - joboet:pointer_expose, r=Amanieu
matthiaskrgr Apr 3, 2024
0c8c18f
Rollup merge of #123291 - c410-f3r:testsssssss, r=petrochenkov
matthiaskrgr Apr 3, 2024
5f74403
Rollup merge of #123301 - Nadrieril:unions, r=compiler-errors
matthiaskrgr Apr 3, 2024
202509b
Rollup merge of #123395 - compiler-errors:postfix-matches-fixes-2, r=…
matthiaskrgr Apr 3, 2024
25b0e84
Rollup merge of #123419 - petrochenkov:zeroindex, r=compiler-errors
matthiaskrgr Apr 3, 2024
65398c4
Rollup merge of #123421 - taiki-e:netbsd-doc, r=Nilstrieb
matthiaskrgr Apr 3, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1276,7 +1276,8 @@ impl Expr {
ExprKind::While(..) => ExprPrecedence::While,
ExprKind::ForLoop { .. } => ExprPrecedence::ForLoop,
ExprKind::Loop(..) => ExprPrecedence::Loop,
ExprKind::Match(..) => ExprPrecedence::Match,
ExprKind::Match(_, _, MatchKind::Prefix) => ExprPrecedence::Match,
ExprKind::Match(_, _, MatchKind::Postfix) => ExprPrecedence::PostfixMatch,
ExprKind::Closure(..) => ExprPrecedence::Closure,
ExprKind::Block(..) => ExprPrecedence::Block,
ExprKind::TryBlock(..) => ExprPrecedence::TryBlock,
Expand Down
7 changes: 5 additions & 2 deletions compiler/rustc_ast/src/util/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ pub enum ExprPrecedence {
ForLoop,
Loop,
Match,
PostfixMatch,
ConstBlock,
Block,
TryBlock,
Expand Down Expand Up @@ -334,7 +335,8 @@ impl ExprPrecedence {
| ExprPrecedence::InlineAsm
| ExprPrecedence::Mac
| ExprPrecedence::FormatArgs
| ExprPrecedence::OffsetOf => PREC_POSTFIX,
| ExprPrecedence::OffsetOf
| ExprPrecedence::PostfixMatch => PREC_POSTFIX,

// Never need parens
ExprPrecedence::Array
Expand Down Expand Up @@ -390,7 +392,8 @@ pub fn contains_exterior_struct_lit(value: &ast::Expr) -> bool {
| ast::ExprKind::Cast(x, _)
| ast::ExprKind::Type(x, _)
| ast::ExprKind::Field(x, _)
| ast::ExprKind::Index(x, _, _) => {
| ast::ExprKind::Index(x, _, _)
| ast::ExprKind::Match(x, _, ast::MatchKind::Postfix) => {
// &X { y: 1 }, X { y: 1 }.y
contains_exterior_struct_lit(x)
}
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_ast_lowering/src/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::{LocalDefId, LocalDefIdMap};
use rustc_hir::intravisit::Visitor;
use rustc_hir::*;
use rustc_index::{Idx, IndexVec};
use rustc_index::IndexVec;
use rustc_middle::span_bug;
use rustc_middle::ty::TyCtxt;
use rustc_span::{Span, DUMMY_SP};
Expand Down Expand Up @@ -31,7 +31,7 @@ pub(super) fn index_hir<'hir>(
bodies: &SortedMap<ItemLocalId, &'hir Body<'hir>>,
num_nodes: usize,
) -> (IndexVec<ItemLocalId, ParentedNode<'hir>>, LocalDefIdMap<ItemLocalId>) {
let zero_id = ItemLocalId::new(0);
let zero_id = ItemLocalId::ZERO;
let err_node = ParentedNode { parent: zero_id, node: Node::Err(item.span()) };
let mut nodes = IndexVec::from_elem_n(err_node, num_nodes);
// This node's parent should never be accessed: the owner's parent is computed by the
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_ast_lowering/src/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
use rustc_hir::PredicateOrigin;
use rustc_index::{Idx, IndexSlice, IndexVec};
use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::span_bug;
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
use rustc_span::edit_distance::find_best_match_for_name;
Expand Down Expand Up @@ -563,7 +563,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let kind =
this.lower_use_tree(use_tree, &prefix, id, vis_span, &mut ident, attrs);
if let Some(attrs) = attrs {
this.attrs.insert(hir::ItemLocalId::new(0), attrs);
this.attrs.insert(hir::ItemLocalId::ZERO, attrs);
}

let item = hir::Item {
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
attrs: SortedMap::default(),
children: Vec::default(),
current_hir_id_owner: hir::CRATE_OWNER_ID,
item_local_id_counter: hir::ItemLocalId::new(0),
item_local_id_counter: hir::ItemLocalId::ZERO,
node_id_to_local_id: Default::default(),
trait_map: Default::default(),

Expand Down Expand Up @@ -583,7 +583,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// and the caller to refer to some of the subdefinitions' nodes' `LocalDefId`s.

// Always allocate the first `HirId` for the owner itself.
let _old = self.node_id_to_local_id.insert(owner, hir::ItemLocalId::new(0));
let _old = self.node_id_to_local_id.insert(owner, hir::ItemLocalId::ZERO);
debug_assert_eq!(_old, None);

let item = f(self);
Expand Down Expand Up @@ -677,7 +677,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
v.insert(local_id);
self.item_local_id_counter.increment_by(1);

assert_ne!(local_id, hir::ItemLocalId::new(0));
assert_ne!(local_id, hir::ItemLocalId::ZERO);
if let Some(def_id) = self.opt_local_def_id(ast_node_id) {
self.children.push((def_id, hir::MaybeOwner::NonOwner(hir_id)));
}
Expand All @@ -696,7 +696,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn next_id(&mut self) -> hir::HirId {
let owner = self.current_hir_id_owner;
let local_id = self.item_local_id_counter;
assert_ne!(local_id, hir::ItemLocalId::new(0));
assert_ne!(local_id, hir::ItemLocalId::ZERO);
self.item_local_id_counter.increment_by(1);
hir::HirId { owner, local_id }
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_borrowck/src/borrow_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ impl<'tcx> BorrowSet<'tcx> {
}

pub(crate) fn indices(&self) -> impl Iterator<Item = BorrowIndex> {
BorrowIndex::from_usize(0)..BorrowIndex::from_usize(self.len())
BorrowIndex::ZERO..BorrowIndex::from_usize(self.len())
}

pub(crate) fn iter_enumerated(&self) -> impl Iterator<Item = (BorrowIndex, &BorrowData<'tcx>)> {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_borrowck/src/type_check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2261,7 +2261,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
}

CastKind::PointerExposeAddress => {
CastKind::PointerExposeProvenance => {
let ty_from = op.ty(body, tcx);
let cast_ty_from = CastTy::from_ty(ty_from);
let cast_ty_to = CastTy::from_ty(*ty);
Expand All @@ -2271,7 +2271,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
span_mirbug!(
self,
rvalue,
"Invalid PointerExposeAddress cast {:?} -> {:?}",
"Invalid PointerExposeProvenance cast {:?} -> {:?}",
ty_from,
ty
)
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_cranelift/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ fn codegen_stmt<'tcx>(
| CastKind::IntToFloat
| CastKind::FnPtrToPtr
| CastKind::PtrToPtr
| CastKind::PointerExposeAddress
| CastKind::PointerExposeProvenance
| CastKind::PointerWithExposedProvenance,
ref operand,
to_ty,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1393,7 +1393,7 @@ fn llvm_add_sub<'tcx>(

// c + carry -> c + first intermediate carry or borrow respectively
let int0 = crate::num::codegen_checked_int_binop(fx, bin_op, a, b);
let c = int0.value_field(fx, FieldIdx::new(0));
let c = int0.value_field(fx, FieldIdx::ZERO);
let cb0 = int0.value_field(fx, FieldIdx::new(1)).load_scalar(fx);

// c + carry -> c + second intermediate carry or borrow respectively
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -965,7 +965,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
});
}

sym::simd_expose_addr | sym::simd_with_exposed_provenance | sym::simd_cast_ptr => {
sym::simd_expose_provenance | sym::simd_with_exposed_provenance | sym::simd_cast_ptr => {
intrinsic_args!(fx, args => (arg); intrinsic);
ret.write_cvalue_transmute(fx, arg);
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_cranelift/src/vtable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
if ty.is_dyn_star() {
let inner_layout = fx.layout_of(arg.layout().ty.builtin_deref(true).unwrap().ty);
let dyn_star = CPlace::for_ptr(Pointer::new(arg.load_scalar(fx)), inner_layout);
let ptr = dyn_star.place_field(fx, FieldIdx::new(0)).to_ptr();
let ptr = dyn_star.place_field(fx, FieldIdx::ZERO).to_ptr();
let vtable =
dyn_star.place_field(fx, FieldIdx::new(1)).to_cvalue(fx).load_scalar(fx);
break 'block (ptr, vtable);
Expand Down
120 changes: 51 additions & 69 deletions compiler/rustc_codegen_llvm/src/abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@ pub use rustc_middle::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
use rustc_middle::ty::Ty;
use rustc_session::config;
pub use rustc_target::abi::call::*;
use rustc_target::abi::{self, HasDataLayout, Int};
use rustc_target::abi::{self, HasDataLayout, Int, Size};
pub use rustc_target::spec::abi::Abi;
use rustc_target::spec::SanitizerSet;

use libc::c_uint;
use smallvec::SmallVec;

use std::cmp;

pub trait ArgAttributesExt {
fn apply_attrs_to_llfn(&self, idx: AttributePlace, cx: &CodegenCx<'_, '_>, llfn: &Value);
fn apply_attrs_to_callsite(
Expand Down Expand Up @@ -130,42 +132,36 @@ impl LlvmType for Reg {
impl LlvmType for CastTarget {
fn llvm_type<'ll>(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
let rest_ll_unit = self.rest.unit.llvm_type(cx);
let (rest_count, rem_bytes) = if self.rest.unit.size.bytes() == 0 {
(0, 0)
let rest_count = if self.rest.total == Size::ZERO {
0
} else {
(
self.rest.total.bytes() / self.rest.unit.size.bytes(),
self.rest.total.bytes() % self.rest.unit.size.bytes(),
)
assert_ne!(
self.rest.unit.size,
Size::ZERO,
"total size {:?} cannot be divided into units of zero size",
self.rest.total
);
if self.rest.total.bytes() % self.rest.unit.size.bytes() != 0 {
assert_eq!(self.rest.unit.kind, RegKind::Integer, "only int regs can be split");
}
self.rest.total.bytes().div_ceil(self.rest.unit.size.bytes())
};

// Simplify to a single unit or an array if there's no prefix.
// This produces the same layout, but using a simpler type.
if self.prefix.iter().all(|x| x.is_none()) {
// Simplify to a single unit when there is no prefix and size <= unit size
if self.rest.total <= self.rest.unit.size {
if rest_count == 1 {
return rest_ll_unit;
}

// Simplify to array when all chunks are the same size and type
if rem_bytes == 0 {
return cx.type_array(rest_ll_unit, rest_count);
}
}

// Create list of fields in the main structure
let mut args: Vec<_> = self
.prefix
.iter()
.flat_map(|option_reg| option_reg.map(|reg| reg.llvm_type(cx)))
.chain((0..rest_count).map(|_| rest_ll_unit))
.collect();

// Append final integer
if rem_bytes != 0 {
// Only integers can be really split further.
assert_eq!(self.rest.unit.kind, RegKind::Integer);
args.push(cx.type_ix(rem_bytes * 8));
return cx.type_array(rest_ll_unit, rest_count);
}

// Generate a struct type with the prefix and the "rest" arguments.
let prefix_args =
self.prefix.iter().flat_map(|option_reg| option_reg.map(|reg| reg.llvm_type(cx)));
let rest_args = (0..rest_count).map(|_| rest_ll_unit);
let args: Vec<_> = prefix_args.chain(rest_args).collect();
cx.type_struct(&args, false)
}
}
Expand Down Expand Up @@ -215,47 +211,33 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
}
PassMode::Cast { cast, pad_i32: _ } => {
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
let can_store_through_cast_ptr = false;
if can_store_through_cast_ptr {
bx.store(val, dst.llval, self.layout.align.abi);
} else {
// The actual return type is a struct, but the ABI
// adaptation code has cast it into some scalar type. The
// code that follows is the only reliable way I have
// found to do a transform like i64 -> {i32,i32}.
// Basically we dump the data onto the stack then memcpy it.
//
// Other approaches I tried:
// - Casting rust ret pointer to the foreign type and using Store
// is (a) unsafe if size of foreign type > size of rust type and
// (b) runs afoul of strict aliasing rules, yielding invalid
// assembly under -O (specifically, the store gets removed).
// - Truncating foreign type to correct integral type and then
// bitcasting to the struct type yields invalid cast errors.

// We instead thus allocate some scratch space...
let scratch_size = cast.size(bx);
let scratch_align = cast.align(bx);
let llscratch = bx.alloca(cast.llvm_type(bx), scratch_align);
bx.lifetime_start(llscratch, scratch_size);

// ... where we first store the value...
bx.store(val, llscratch, scratch_align);

// ... and then memcpy it to the intended destination.
bx.memcpy(
dst.llval,
self.layout.align.abi,
llscratch,
scratch_align,
bx.const_usize(self.layout.size.bytes()),
MemFlags::empty(),
);

bx.lifetime_end(llscratch, scratch_size);
}
// The ABI mandates that the value is passed as a different struct representation.
// Spill and reload it from the stack to convert from the ABI representation to
// the Rust representation.
let scratch_size = cast.size(bx);
let scratch_align = cast.align(bx);
// Note that the ABI type may be either larger or smaller than the Rust type,
// due to the presence or absence of trailing padding. For example:
// - On some ABIs, the Rust layout { f64, f32, <f32 padding> } may omit padding
// when passed by value, making it smaller.
// - On some ABIs, the Rust layout { u16, u16, u16 } may be padded up to 8 bytes
// when passed by value, making it larger.
let copy_bytes = cmp::min(scratch_size.bytes(), self.layout.size.bytes());
// Allocate some scratch space...
let llscratch = bx.alloca(cast.llvm_type(bx), scratch_align);
bx.lifetime_start(llscratch, scratch_size);
// ...store the value...
bx.store(val, llscratch, scratch_align);
// ... and then memcpy it to the intended destination.
bx.memcpy(
dst.llval,
self.layout.align.abi,
llscratch,
scratch_align,
bx.const_usize(copy_bytes),
MemFlags::empty(),
);
bx.lifetime_end(llscratch, scratch_size);
}
_ => {
OperandRef::from_immediate_or_packed_pair(bx, val, self.layout).val.store(bx, dst);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_llvm/src/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2111,7 +2111,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
return Ok(args[0].immediate());
}

if name == sym::simd_expose_addr {
if name == sym::simd_expose_provenance {
let (out_len, out_elem) = require_simd!(ret_ty, SimdReturn);
require!(
in_len == out_len,
Expand Down
32 changes: 29 additions & 3 deletions compiler/rustc_codegen_ssa/src/mir/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1505,9 +1505,35 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {

if by_ref && !arg.is_indirect() {
// Have to load the argument, maybe while casting it.
if let PassMode::Cast { cast: ty, .. } = &arg.mode {
let llty = bx.cast_backend_type(ty);
llval = bx.load(llty, llval, align.min(arg.layout.align.abi));
if let PassMode::Cast { cast, pad_i32: _ } = &arg.mode {
// The ABI mandates that the value is passed as a different struct representation.
// Spill and reload it from the stack to convert from the Rust representation to
// the ABI representation.
let scratch_size = cast.size(bx);
let scratch_align = cast.align(bx);
// Note that the ABI type may be either larger or smaller than the Rust type,
// due to the presence or absence of trailing padding. For example:
// - On some ABIs, the Rust layout { f64, f32, <f32 padding> } may omit padding
// when passed by value, making it smaller.
// - On some ABIs, the Rust layout { u16, u16, u16 } may be padded up to 8 bytes
// when passed by value, making it larger.
let copy_bytes = cmp::min(scratch_size.bytes(), arg.layout.size.bytes());
// Allocate some scratch space...
let llscratch = bx.alloca(bx.cast_backend_type(cast), scratch_align);
bx.lifetime_start(llscratch, scratch_size);
// ...memcpy the value...
bx.memcpy(
llscratch,
scratch_align,
llval,
align,
bx.const_usize(copy_bytes),
MemFlags::empty(),
);
// ...and then load it with the ABI type.
let cast_ty = bx.cast_backend_type(cast);
llval = bx.load(cast_ty, llscratch, scratch_align);
bx.lifetime_end(llscratch, scratch_size);
} else {
// We can't use `PlaceRef::load` here because the argument
// may have a type we don't treat as immediate, but the ABI
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/mir/rvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let cast = bx.cx().layout_of(self.monomorphize(mir_cast_ty));

let val = match *kind {
mir::CastKind::PointerExposeAddress => {
mir::CastKind::PointerExposeProvenance => {
assert!(bx.cx().is_backend_immediate(cast));
let llptr = operand.immediate();
let llcast_ty = bx.cx().immediate_backend_type(cast);
Expand Down
Loading
Loading