Skip to content

Commit 89da361

Browse files
compiler: make rustc_target have less weird reexports
rustc_target has had a lot of weird reexports for various reasons, but now we're at a point where we can actually start reducing their number. We remove weird shadowing-dependent behavior and import directly from rustc_abi instead of doing weird renaming imports. This is only incremental progress and does not entirely fix the crate.
1 parent 5958825 commit 89da361

23 files changed

+143
-125
lines changed

Diff for: compiler/rustc_target/src/asm/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
use std::fmt;
22
use std::str::FromStr;
33

4+
use rustc_abi::Size;
45
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
56
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
67
use rustc_span::Symbol;
78

8-
use crate::abi::Size;
99
use crate::spec::{RelocModel, Target};
1010

1111
pub struct ModifierInfo {

Diff for: compiler/rustc_target/src/callconv/aarch64.rs

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,8 @@
11
use std::iter;
22

3-
use rustc_abi::{BackendRepr, Primitive};
3+
use rustc_abi::{BackendRepr, HasDataLayout, Primitive, TyAbiInterface};
44

55
use crate::abi::call::{ArgAbi, FnAbi, Reg, RegKind, Uniform};
6-
use crate::abi::{HasDataLayout, TyAbiInterface};
76
use crate::spec::{HasTargetSpec, Target};
87

98
/// Indicates the variant of the AArch64 ABI we are compiling for.

Diff for: compiler/rustc_target/src/callconv/amdgpu.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
use rustc_abi::{HasDataLayout, TyAbiInterface};
2+
13
use crate::abi::call::{ArgAbi, FnAbi};
2-
use crate::abi::{HasDataLayout, TyAbiInterface};
34

45
fn classify_ret<'a, Ty, C>(_cx: &C, ret: &mut ArgAbi<'a, Ty>)
56
where

Diff for: compiler/rustc_target/src/callconv/arm.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
use rustc_abi::{HasDataLayout, TyAbiInterface};
2+
13
use crate::abi::call::{ArgAbi, Conv, FnAbi, Reg, RegKind, Uniform};
2-
use crate::abi::{HasDataLayout, TyAbiInterface};
34
use crate::spec::HasTargetSpec;
45

56
fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>) -> Option<Uniform>

Diff for: compiler/rustc_target/src/callconv/loongarch.rs

+12-11
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
1-
use crate::abi::call::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Reg, RegKind, Uniform};
2-
use crate::abi::{
3-
self, BackendRepr, FieldsShape, HasDataLayout, Size, TyAbiInterface, TyAndLayout,
1+
use rustc_abi::{
2+
BackendRepr, ExternAbi, FieldsShape, HasDataLayout, Primitive, Reg, RegKind, Size,
3+
TyAbiInterface, TyAndLayout, Variants,
44
};
5+
6+
use crate::callconv::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Uniform};
57
use crate::spec::HasTargetSpec;
6-
use crate::spec::abi::Abi as SpecAbi;
78

89
#[derive(Copy, Clone)]
910
enum RegPassKind {
@@ -42,7 +43,7 @@ where
4243
{
4344
match arg_layout.backend_repr {
4445
BackendRepr::Scalar(scalar) => match scalar.primitive() {
45-
abi::Int(..) | abi::Pointer(_) => {
46+
Primitive::Int(..) | Primitive::Pointer(_) => {
4647
if arg_layout.size.bits() > xlen {
4748
return Err(CannotUseFpConv);
4849
}
@@ -62,7 +63,7 @@ where
6263
_ => return Err(CannotUseFpConv),
6364
}
6465
}
65-
abi::Float(_) => {
66+
Primitive::Float(_) => {
6667
if arg_layout.size.bits() > flen {
6768
return Err(CannotUseFpConv);
6869
}
@@ -115,8 +116,8 @@ where
115116
}
116117
FieldsShape::Arbitrary { .. } => {
117118
match arg_layout.variants {
118-
abi::Variants::Multiple { .. } => return Err(CannotUseFpConv),
119-
abi::Variants::Single { .. } | abi::Variants::Empty => (),
119+
Variants::Multiple { .. } => return Err(CannotUseFpConv),
120+
Variants::Single { .. } | Variants::Empty => (),
120121
}
121122
for i in arg_layout.fields.index_by_increasing_offset() {
122123
let field = arg_layout.field(cx, i);
@@ -314,7 +315,7 @@ fn classify_arg<'a, Ty, C>(
314315

315316
fn extend_integer_width<Ty>(arg: &mut ArgAbi<'_, Ty>, xlen: u64) {
316317
if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
317-
if let abi::Int(i, _) = scalar.primitive() {
318+
if let Primitive::Int(i, _) = scalar.primitive() {
318319
// 32-bit integers are always sign-extended
319320
if i.size().bits() == 32 && xlen > 32 {
320321
if let PassMode::Direct(ref mut attrs) = arg.mode {
@@ -363,12 +364,12 @@ where
363364
}
364365
}
365366

366-
pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: SpecAbi)
367+
pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: ExternAbi)
367368
where
368369
Ty: TyAbiInterface<'a, C> + Copy,
369370
C: HasDataLayout + HasTargetSpec,
370371
{
371-
if abi == SpecAbi::RustIntrinsic {
372+
if abi == ExternAbi::RustIntrinsic {
372373
return;
373374
}
374375

Diff for: compiler/rustc_target/src/callconv/mips.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
use rustc_abi::{HasDataLayout, Size};
2+
13
use crate::abi::call::{ArgAbi, FnAbi, Reg, Uniform};
2-
use crate::abi::{HasDataLayout, Size};
34

45
fn classify_ret<Ty, C>(cx: &C, ret: &mut ArgAbi<'_, Ty>, offset: &mut Size)
56
where

Diff for: compiler/rustc_target/src/callconv/mips64.rs

+18-15
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
1-
use crate::abi::call::{
2-
ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, PassMode, Reg, Uniform,
1+
use rustc_abi::{
2+
BackendRepr, FieldsShape, Float, HasDataLayout, Primitive, Reg, Size, TyAbiInterface,
3+
};
4+
5+
use crate::callconv::{
6+
ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, CastTarget, FnAbi, PassMode, Uniform,
37
};
4-
use crate::abi::{self, HasDataLayout, Size, TyAbiInterface};
58

69
fn extend_integer_width_mips<Ty>(arg: &mut ArgAbi<'_, Ty>, bits: u64) {
710
// Always sign extend u32 values on 64-bit mips
8-
if let abi::BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
9-
if let abi::Int(i, signed) = scalar.primitive() {
11+
if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
12+
if let Primitive::Int(i, signed) = scalar.primitive() {
1013
if !signed && i.size().bits() == 32 {
1114
if let PassMode::Direct(ref mut attrs) = arg.mode {
1215
attrs.ext(ArgExtension::Sext);
@@ -25,9 +28,9 @@ where
2528
C: HasDataLayout,
2629
{
2730
match ret.layout.field(cx, i).backend_repr {
28-
abi::BackendRepr::Scalar(scalar) => match scalar.primitive() {
29-
abi::Float(abi::F32) => Some(Reg::f32()),
30-
abi::Float(abi::F64) => Some(Reg::f64()),
31+
BackendRepr::Scalar(scalar) => match scalar.primitive() {
32+
Primitive::Float(Float::F32) => Some(Reg::f32()),
33+
Primitive::Float(Float::F64) => Some(Reg::f64()),
3134
_ => None,
3235
},
3336
_ => None,
@@ -51,7 +54,7 @@ where
5154
// use of float registers to structures (not unions) containing exactly one or two
5255
// float fields.
5356

54-
if let abi::FieldsShape::Arbitrary { .. } = ret.layout.fields {
57+
if let FieldsShape::Arbitrary { .. } = ret.layout.fields {
5558
if ret.layout.fields.count() == 1 {
5659
if let Some(reg) = float_reg(cx, ret, 0) {
5760
ret.cast_to(reg);
@@ -90,16 +93,16 @@ where
9093
let mut prefix_index = 0;
9194

9295
match arg.layout.fields {
93-
abi::FieldsShape::Primitive => unreachable!(),
94-
abi::FieldsShape::Array { .. } => {
96+
FieldsShape::Primitive => unreachable!(),
97+
FieldsShape::Array { .. } => {
9598
// Arrays are passed indirectly
9699
arg.make_indirect();
97100
return;
98101
}
99-
abi::FieldsShape::Union(_) => {
102+
FieldsShape::Union(_) => {
100103
// Unions and are always treated as a series of 64-bit integer chunks
101104
}
102-
abi::FieldsShape::Arbitrary { .. } => {
105+
FieldsShape::Arbitrary { .. } => {
103106
// Structures are split up into a series of 64-bit integer chunks, but any aligned
104107
// doubles not part of another aggregate are passed as floats.
105108
let mut last_offset = Size::ZERO;
@@ -109,8 +112,8 @@ where
109112
let offset = arg.layout.fields.offset(i);
110113

111114
// We only care about aligned doubles
112-
if let abi::BackendRepr::Scalar(scalar) = field.backend_repr {
113-
if scalar.primitive() == abi::Float(abi::F64) {
115+
if let BackendRepr::Scalar(scalar) = field.backend_repr {
116+
if scalar.primitive() == Primitive::Float(Float::F64) {
114117
if offset.is_aligned(dl.f64_align.abi) {
115118
// Insert enough integers to cover [last_offset, offset)
116119
assert!(last_offset.is_aligned(dl.f64_align.abi));

Diff for: compiler/rustc_target/src/callconv/mod.rs

+11-9
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
use std::str::FromStr;
22
use std::{fmt, iter};
33

4-
pub use rustc_abi::{ExternAbi, Reg, RegKind};
4+
use rustc_abi::{
5+
AddressSpace, Align, BackendRepr, ExternAbi, HasDataLayout, Scalar, Size, TyAbiInterface,
6+
TyAndLayout,
7+
};
8+
pub use rustc_abi::{Primitive, Reg, RegKind};
59
use rustc_macros::HashStable_Generic;
610
use rustc_span::Symbol;
711

8-
use crate::abi::{
9-
self, AddressSpace, Align, BackendRepr, HasDataLayout, Pointer, Size, TyAbiInterface,
10-
TyAndLayout,
11-
};
1212
use crate::spec::{HasTargetSpec, HasWasmCAbiOpt, HasX86AbiOpt, WasmCAbi};
1313

1414
mod aarch64;
@@ -349,7 +349,7 @@ impl<'a, Ty> ArgAbi<'a, Ty> {
349349
pub fn new(
350350
cx: &impl HasDataLayout,
351351
layout: TyAndLayout<'a, Ty>,
352-
scalar_attrs: impl Fn(&TyAndLayout<'a, Ty>, abi::Scalar, Size) -> ArgAttributes,
352+
scalar_attrs: impl Fn(&TyAndLayout<'a, Ty>, Scalar, Size) -> ArgAttributes,
353353
) -> Self {
354354
let mode = match layout.backend_repr {
355355
BackendRepr::Uninhabited => PassMode::Ignore,
@@ -464,7 +464,7 @@ impl<'a, Ty> ArgAbi<'a, Ty> {
464464
pub fn extend_integer_width_to(&mut self, bits: u64) {
465465
// Only integers have signedness
466466
if let BackendRepr::Scalar(scalar) = self.layout.backend_repr {
467-
if let abi::Int(i, signed) = scalar.primitive() {
467+
if let Primitive::Int(i, signed) = scalar.primitive() {
468468
if i.size().bits() < bits {
469469
if let PassMode::Direct(ref mut attrs) = self.mode {
470470
if signed {
@@ -756,7 +756,9 @@ impl<'a, Ty> FnAbi<'a, Ty> {
756756
continue;
757757
}
758758

759-
if arg_idx.is_none() && arg.layout.size > Pointer(AddressSpace::DATA).size(cx) * 2 {
759+
if arg_idx.is_none()
760+
&& arg.layout.size > Primitive::Pointer(AddressSpace::DATA).size(cx) * 2
761+
{
760762
// Return values larger than 2 registers using a return area
761763
// pointer. LLVM and Cranelift disagree about how to return
762764
// values that don't fit in the registers designated for return
@@ -837,7 +839,7 @@ impl<'a, Ty> FnAbi<'a, Ty> {
837839
assert!(is_indirect_not_on_stack);
838840

839841
let size = arg.layout.size;
840-
if !arg.layout.is_unsized() && size <= Pointer(AddressSpace::DATA).size(cx) {
842+
if !arg.layout.is_unsized() && size <= Primitive::Pointer(AddressSpace::DATA).size(cx) {
841843
// We want to pass small aggregates as immediates, but using
842844
// an LLVM aggregate type for this leads to bad optimizations,
843845
// so we pick an appropriately sized integer type instead.

Diff for: compiler/rustc_target/src/callconv/nvptx64.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1+
use rustc_abi::{HasDataLayout, Reg, Size, TyAbiInterface};
2+
13
use super::{ArgAttribute, ArgAttributes, ArgExtension, CastTarget};
2-
use crate::abi::call::{ArgAbi, FnAbi, Reg, Size, Uniform};
3-
use crate::abi::{HasDataLayout, TyAbiInterface};
4+
use crate::abi::call::{ArgAbi, FnAbi, Uniform};
45

56
fn classify_ret<Ty>(ret: &mut ArgAbi<'_, Ty>) {
67
if ret.layout.is_aggregate() && ret.layout.is_sized() {

Diff for: compiler/rustc_target/src/callconv/powerpc64.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@
22
// Alignment of 128 bit types is not currently handled, this will
33
// need to be fixed when PowerPC vector support is added.
44

5+
use rustc_abi::{Endian, HasDataLayout, TyAbiInterface};
6+
57
use crate::abi::call::{Align, ArgAbi, FnAbi, Reg, RegKind, Uniform};
6-
use crate::abi::{Endian, HasDataLayout, TyAbiInterface};
78
use crate::spec::HasTargetSpec;
89

910
#[derive(Debug, Clone, Copy, PartialEq)]

Diff for: compiler/rustc_target/src/callconv/riscv.rs

+12-11
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,13 @@
44
// Reference: Clang RISC-V ELF psABI lowering code
55
// https://github.com/llvm/llvm-project/blob/8e780252a7284be45cf1ba224cabd884847e8e92/clang/lib/CodeGen/TargetInfo.cpp#L9311-L9773
66

7-
use rustc_abi::{BackendRepr, FieldsShape, HasDataLayout, Size, TyAbiInterface, TyAndLayout};
7+
use rustc_abi::{
8+
BackendRepr, ExternAbi, FieldsShape, HasDataLayout, Primitive, Reg, RegKind, Size,
9+
TyAbiInterface, TyAndLayout, Variants,
10+
};
811

9-
use crate::abi;
10-
use crate::abi::call::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Reg, RegKind, Uniform};
12+
use crate::abi::call::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Uniform};
1113
use crate::spec::HasTargetSpec;
12-
use crate::spec::abi::Abi as SpecAbi;
1314

1415
#[derive(Copy, Clone)]
1516
enum RegPassKind {
@@ -48,7 +49,7 @@ where
4849
{
4950
match arg_layout.backend_repr {
5051
BackendRepr::Scalar(scalar) => match scalar.primitive() {
51-
abi::Int(..) | abi::Pointer(_) => {
52+
Primitive::Int(..) | Primitive::Pointer(_) => {
5253
if arg_layout.size.bits() > xlen {
5354
return Err(CannotUseFpConv);
5455
}
@@ -68,7 +69,7 @@ where
6869
_ => return Err(CannotUseFpConv),
6970
}
7071
}
71-
abi::Float(_) => {
72+
Primitive::Float(_) => {
7273
if arg_layout.size.bits() > flen {
7374
return Err(CannotUseFpConv);
7475
}
@@ -121,8 +122,8 @@ where
121122
}
122123
FieldsShape::Arbitrary { .. } => {
123124
match arg_layout.variants {
124-
abi::Variants::Multiple { .. } => return Err(CannotUseFpConv),
125-
abi::Variants::Single { .. } | abi::Variants::Empty => (),
125+
Variants::Multiple { .. } => return Err(CannotUseFpConv),
126+
Variants::Single { .. } | Variants::Empty => (),
126127
}
127128
for i in arg_layout.fields.index_by_increasing_offset() {
128129
let field = arg_layout.field(cx, i);
@@ -320,7 +321,7 @@ fn classify_arg<'a, Ty, C>(
320321

321322
fn extend_integer_width<Ty>(arg: &mut ArgAbi<'_, Ty>, xlen: u64) {
322323
if let BackendRepr::Scalar(scalar) = arg.layout.backend_repr {
323-
if let abi::Int(i, _) = scalar.primitive() {
324+
if let Primitive::Int(i, _) = scalar.primitive() {
324325
// 32-bit integers are always sign-extended
325326
if i.size().bits() == 32 && xlen > 32 {
326327
if let PassMode::Direct(ref mut attrs) = arg.mode {
@@ -369,12 +370,12 @@ where
369370
}
370371
}
371372

372-
pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: SpecAbi)
373+
pub(crate) fn compute_rust_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, abi: ExternAbi)
373374
where
374375
Ty: TyAbiInterface<'a, C> + Copy,
375376
C: HasDataLayout + HasTargetSpec,
376377
{
377-
if abi == SpecAbi::RustIntrinsic {
378+
if abi == ExternAbi::RustIntrinsic {
378379
return;
379380
}
380381

Diff for: compiler/rustc_target/src/callconv/s390x.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
// Reference: ELF Application Binary Interface s390x Supplement
22
// https://github.com/IBM/s390x-abi
33

4+
use rustc_abi::{BackendRepr, HasDataLayout, TyAbiInterface};
5+
46
use crate::abi::call::{ArgAbi, FnAbi, Reg, RegKind};
5-
use crate::abi::{BackendRepr, HasDataLayout, TyAbiInterface};
67
use crate::spec::HasTargetSpec;
78

89
fn classify_ret<Ty>(ret: &mut ArgAbi<'_, Ty>) {

Diff for: compiler/rustc_target/src/callconv/sparc.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1+
use rustc_abi::{HasDataLayout, Size};
2+
13
use crate::abi::call::{ArgAbi, FnAbi, Reg, Uniform};
2-
use crate::abi::{HasDataLayout, Size};
34

45
fn classify_ret<Ty, C>(cx: &C, ret: &mut ArgAbi<'_, Ty>, offset: &mut Size)
56
where

0 commit comments

Comments
 (0)