forked from rust-lang/rust
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmod.rs
290 lines (254 loc) · 11.7 KB
/
mod.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
// Not in interpret to make sure we do not use private implementation details
use std::convert::TryFrom;
use rustc_hir::lang_items::LangItem;
use rustc_hir::Mutability;
use rustc_middle::mir::{self, interpret::ConstAlloc};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
use rustc_span::{source_map::DUMMY_SP, symbol::Symbol};
use rustc_target::abi::Size;
use crate::interpret::{
self, intern_const_alloc_recursive, ConstValue, InternKind, InterpCx, InterpResult, MPlaceTy,
MemPlaceMeta, Scalar,
};
mod error;
mod eval_queries;
mod fn_queries;
mod machine;
pub use error::*;
pub use eval_queries::*;
pub use fn_queries::*;
pub use machine::*;
pub(crate) fn const_caller_location(
tcx: TyCtxt<'_>,
(file, line, col): (Symbol, u32, u32),
) -> ConstValue<'_> {
trace!("const_caller_location: {}:{}:{}", file, line, col);
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
let loc_place = ecx.alloc_caller_location(file, line, col);
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
bug!("intern_const_alloc_recursive should not error in this case")
}
ConstValue::Scalar(Scalar::from_pointer(loc_place.ptr.into_pointer_or_addr().unwrap(), &tcx))
}
pub(crate) fn const_type_id<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
) -> ConstValue<'tcx> {
trace!("const_type_id: {}", ty);
// Compute (logical) `TypeId` field values, before trying to encode them.
let hash = tcx.type_id_hash(ty);
let mangling = tcx.type_id_mangling(param_env.and(ty)).name;
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let type_id_ty = tcx.type_of(tcx.require_lang_item(LangItem::TypeId, None));
let type_id_layout = ecx.layout_of(type_id_ty).unwrap();
// Encode `TypeId` field values, before putting together the allocation.
let hash_val = Scalar::from_u64(hash);
let mangling_val = {
let mangling_len = u64::try_from(mangling.len()).unwrap();
let mangling_len_val = Scalar::from_machine_usize(mangling_len, &ecx);
// The field is `mangling: &TypeManglingStr`, get `TypeManglingStr` from it.
let mangling_field_ty = type_id_layout.field(&ecx, 1).ty;
let type_mangling_str_ty = mangling_field_ty.builtin_deref(true).unwrap().ty;
// Allocate memory for `TypeManglingStr` struct.
let type_mangling_str_layout = ecx.layout_of(type_mangling_str_ty).unwrap();
let type_mangling_str_place = {
// NOTE(eddyb) this similar to the `ecx.allocate(...)` used below
// for `type_id_place`, except with an additional size for the
// string bytes (`mangling`) being added to the `TypeManglingStr`
// (which is unsized, using an `extern { type }` tail).
let layout = type_mangling_str_layout;
let size = layout.size + Size::from_bytes(mangling_len);
let ptr = ecx
.allocate_ptr(size, layout.align.abi, interpret::MemoryKind::IntrinsicGlobal)
.unwrap();
MPlaceTy::from_aligned_ptr(ptr.into(), layout)
};
// Initialize `TypeManglingStr` fields.
ecx.write_scalar(
mangling_len_val,
&ecx.mplace_field(&type_mangling_str_place, 0).unwrap().into(),
)
.unwrap();
ecx.write_bytes_ptr(
ecx.mplace_field(&type_mangling_str_place, 1).unwrap().ptr,
mangling.bytes(),
)
.unwrap();
// `&TypeManglingStr` has no metadata, thanks to the length being stored
// behind the reference (in the first field of `TypeManglingStr`).
type_mangling_str_place.to_ref(&ecx).to_scalar().unwrap()
};
// FIXME(eddyb) everything below would be unnecessary if `ConstValue` could
// hold a pair of `Scalar`s, or if we moved to valtrees.
// Allocate memory for `TypeId` struct.
let type_id_place =
ecx.allocate(type_id_layout, interpret::MemoryKind::IntrinsicGlobal).unwrap();
// Initialize `TypeId` fields.
ecx.write_scalar(hash_val, &ecx.mplace_field(&type_id_place, 0).unwrap().into()).unwrap();
ecx.write_scalar(mangling_val, &ecx.mplace_field(&type_id_place, 1).unwrap().into()).unwrap();
// Convert the `TypeId` allocation from being in `ecx`, to a global `ConstValue`.
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &type_id_place).is_err() {
bug!("intern_const_alloc_recursive should not error in this case")
}
let (type_id_alloc_id, type_id_offset) =
type_id_place.ptr.into_pointer_or_addr().unwrap().into_parts();
ConstValue::ByRef {
alloc: tcx.global_alloc(type_id_alloc_id).unwrap_memory(),
offset: type_id_offset,
}
}
/// Convert an evaluated constant to a type level constant
pub(crate) fn const_to_valtree<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
raw: ConstAlloc<'tcx>,
) -> Option<ty::ValTree<'tcx>> {
let ecx = mk_eval_cx(
tcx, DUMMY_SP, param_env,
// It is absolutely crucial for soundness that
// we do not read from static items or other mutable memory.
false,
);
let place = ecx.raw_const_to_mplace(raw).unwrap();
const_to_valtree_inner(&ecx, &place)
}
fn const_to_valtree_inner<'tcx>(
ecx: &CompileTimeEvalContext<'tcx, 'tcx>,
place: &MPlaceTy<'tcx>,
) -> Option<ty::ValTree<'tcx>> {
let branches = |n, variant| {
let place = match variant {
Some(variant) => ecx.mplace_downcast(&place, variant).unwrap(),
None => *place,
};
let variant =
variant.map(|variant| Some(ty::ValTree::Leaf(ScalarInt::from(variant.as_u32()))));
let fields = (0..n).map(|i| {
let field = ecx.mplace_field(&place, i).unwrap();
const_to_valtree_inner(ecx, &field)
});
// For enums, we preped their variant index before the variant's fields so we can figure out
// the variant again when just seeing a valtree.
let branches = variant.into_iter().chain(fields);
Some(ty::ValTree::Branch(
ecx.tcx.arena.alloc_from_iter(branches.collect::<Option<Vec<_>>>()?),
))
};
match place.layout.ty.kind() {
ty::FnDef(..) => Some(ty::ValTree::zst()),
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
let val = ecx.read_immediate(&place.into()).unwrap();
let val = val.to_scalar().unwrap();
Some(ty::ValTree::Leaf(val.assert_int()))
}
// Raw pointers are not allowed in type level constants, as we cannot properly test them for
// equality at compile-time (see `ptr_guaranteed_eq`/`_ne`).
// Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to
// agree with runtime equality tests.
ty::FnPtr(_) | ty::RawPtr(_) => None,
ty::Ref(..) => unimplemented!("need to use deref_const"),
// Trait objects are not allowed in type level constants, as we have no concept for
// resolving their backing type, even if we can do that at const eval time. We may
// hypothetically be able to allow `dyn StructuralEq` trait objects in the future,
// but it is unclear if this is useful.
ty::Dynamic(..) => None,
ty::Slice(_) | ty::Str => {
unimplemented!("need to find the backing data of the slice/str and recurse on that")
}
ty::Tuple(substs) => branches(substs.len(), None),
ty::Array(_, len) => branches(usize::try_from(len.eval_usize(ecx.tcx.tcx, ecx.param_env)).unwrap(), None),
ty::Adt(def, _) => {
if def.variants().is_empty() {
bug!("uninhabited types should have errored and never gotten converted to valtree")
}
let variant = ecx.read_discriminant(&place.into()).unwrap().1;
branches(def.variant(variant).fields.len(), def.is_enum().then_some(variant))
}
ty::Never
| ty::Error(_)
| ty::Foreign(..)
| ty::Infer(ty::FreshIntTy(_))
| ty::Infer(ty::FreshFloatTy(_))
| ty::Projection(..)
| ty::Param(_)
| ty::Bound(..)
| ty::Placeholder(..)
// FIXME(oli-obk): we could look behind opaque types
| ty::Opaque(..)
| ty::Infer(_)
// FIXME(oli-obk): we can probably encode closures just like structs
| ty::Closure(..)
| ty::Generator(..)
| ty::GeneratorWitness(..) => None,
}
}
/// This function should never fail for validated constants. However, it is also invoked from the
/// pretty printer which might attempt to format invalid constants and in that case it might fail.
pub(crate) fn try_destructure_const<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
val: ty::Const<'tcx>,
) -> InterpResult<'tcx, mir::DestructuredConst<'tcx>> {
trace!("destructure_const: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let op = ecx.const_to_op(val, None)?;
// We go to `usize` as we cannot allocate anything bigger anyway.
let (field_count, variant, down) = match val.ty().kind() {
ty::Array(_, len) => (usize::try_from(len.eval_usize(tcx, param_env)).unwrap(), None, op),
// Checks if we have any variants, to avoid downcasting to a non-existing variant (when
// there are no variants `read_discriminant` successfully returns a non-existing variant
// index).
ty::Adt(def, _) if def.variants().is_empty() => throw_ub!(Unreachable),
ty::Adt(def, _) => {
let variant = ecx.read_discriminant(&op)?.1;
let down = ecx.operand_downcast(&op, variant)?;
(def.variant(variant).fields.len(), Some(variant), down)
}
ty::Tuple(substs) => (substs.len(), None, op),
_ => bug!("cannot destructure constant {:?}", val),
};
let fields = (0..field_count)
.map(|i| {
let field_op = ecx.operand_field(&down, i)?;
let val = op_to_const(&ecx, &field_op);
Ok(ty::Const::from_value(tcx, val, field_op.layout.ty))
})
.collect::<InterpResult<'tcx, Vec<_>>>()?;
let fields = tcx.arena.alloc_from_iter(fields);
Ok(mir::DestructuredConst { variant, fields })
}
pub(crate) fn deref_const<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
val: ty::Const<'tcx>,
) -> ty::Const<'tcx> {
trace!("deref_const: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let op = ecx.const_to_op(val, None).unwrap();
let mplace = ecx.deref_operand(&op).unwrap();
if let Some(alloc_id) = mplace.ptr.provenance {
assert_eq!(
tcx.get_global_alloc(alloc_id).unwrap().unwrap_memory().inner().mutability,
Mutability::Not,
"deref_const cannot be used with mutable allocations as \
that could allow pattern matching to observe mutable statics",
);
}
let ty = match mplace.meta {
MemPlaceMeta::None => mplace.layout.ty,
MemPlaceMeta::Poison => bug!("poison metadata in `deref_const`: {:#?}", mplace),
// In case of unsized types, figure out the real type behind.
MemPlaceMeta::Meta(scalar) => match mplace.layout.ty.kind() {
ty::Str => bug!("there's no sized equivalent of a `str`"),
ty::Slice(elem_ty) => tcx.mk_array(*elem_ty, scalar.to_machine_usize(&tcx).unwrap()),
_ => bug!(
"type {} should not have metadata, but had {:?}",
mplace.layout.ty,
mplace.meta
),
},
};
tcx.mk_const(ty::ConstS { val: ty::ConstKind::Value(op_to_const(&ecx, &mplace.into())), ty })
}