@@ -51,6 +51,11 @@ pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
51
51
/// a static creates a copy here, in the machine.
52
52
alloc_map : FxHashMap < AllocId , ( MemoryKind < M :: MemoryKinds > , Allocation ) > ,
53
53
54
+ /// To be able to compare pointers with NULL, and to check alignment for accesses
55
+ /// to ZSTs (where pointers may dangle), we keep track of the size even for allocations
56
+ /// that do not exist any more.
57
+ dead_alloc_map : FxHashMap < AllocId , ( Size , Align ) > ,
58
+
54
59
pub tcx : TyCtxtAt < ' a , ' tcx , ' tcx > ,
55
60
}
56
61
@@ -74,6 +79,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
74
79
Memory {
75
80
data,
76
81
alloc_map : FxHashMap :: default ( ) ,
82
+ dead_alloc_map : FxHashMap :: default ( ) ,
77
83
tcx,
78
84
}
79
85
}
@@ -150,6 +156,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
150
156
size_and_align : Option < ( Size , Align ) > ,
151
157
kind : MemoryKind < M :: MemoryKinds > ,
152
158
) -> EvalResult < ' tcx > {
159
+ debug ! ( "deallocating: {}" , ptr. alloc_id) ;
160
+
153
161
if ptr. offset . bytes ( ) != 0 {
154
162
return err ! ( DeallocateNonBasePtr ) ;
155
163
}
@@ -189,23 +197,41 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
189
197
}
190
198
}
191
199
192
- debug ! ( "deallocated : {}" , ptr. alloc_id) ;
200
+ // Don't forget to remember size and align of this now-dead allocation
201
+ let old = self . dead_alloc_map . insert (
202
+ ptr. alloc_id ,
203
+ ( Size :: from_bytes ( alloc. bytes . len ( ) as u64 ) , alloc. align )
204
+ ) ;
205
+ if old. is_some ( ) {
206
+ bug ! ( "Nothing can be deallocated twice" ) ;
207
+ }
193
208
194
209
Ok ( ( ) )
195
210
}
196
211
197
- /// Check that the pointer is aligned AND non-NULL. This supports scalars
198
- /// for the benefit of other parts of miri that need to check alignment even for ZST .
212
+ /// Check that the pointer is aligned AND non-NULL. This supports ZSTs in two ways:
213
+ /// You can pass a scalar, and a `Pointer` does not have to actually still be allocated .
199
214
pub fn check_align ( & self , ptr : Scalar , required_align : Align ) -> EvalResult < ' tcx > {
200
215
// Check non-NULL/Undef, extract offset
201
216
let ( offset, alloc_align) = match ptr {
202
217
Scalar :: Ptr ( ptr) => {
203
- let alloc = self . get ( ptr. alloc_id ) ?;
204
- ( ptr. offset . bytes ( ) , alloc. align )
218
+ let ( size, align) = self . get_size_and_align ( ptr. alloc_id ) ?;
219
+ // check this is not NULL -- which we can ensure only if this is in-bounds
220
+ // of some (potentially dead) allocation.
221
+ if ptr. offset > size {
222
+ return err ! ( PointerOutOfBounds {
223
+ ptr,
224
+ access: true ,
225
+ allocation_size: size,
226
+ } ) ;
227
+ } ;
228
+ // keep data for alignment check
229
+ ( ptr. offset . bytes ( ) , align)
205
230
}
206
231
Scalar :: Bits { bits, size } => {
207
232
assert_eq ! ( size as u64 , self . pointer_size( ) . bytes( ) ) ;
208
233
assert ! ( bits < ( 1u128 << self . pointer_size( ) . bits( ) ) ) ;
234
+ // check this is not NULL
209
235
if bits == 0 {
210
236
return err ! ( InvalidNullPointerUsage ) ;
211
237
}
@@ -306,6 +332,21 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
306
332
}
307
333
}
308
334
335
+ pub fn get_size_and_align ( & self , id : AllocId ) -> EvalResult < ' tcx , ( Size , Align ) > {
336
+ Ok ( match self . get ( id) {
337
+ Ok ( alloc) => ( Size :: from_bytes ( alloc. bytes . len ( ) as u64 ) , alloc. align ) ,
338
+ Err ( err) => match err. kind {
339
+ EvalErrorKind :: DanglingPointerDeref =>
340
+ // This should be in the dead allocation map
341
+ * self . dead_alloc_map . get ( & id) . expect (
342
+ "allocation missing in dead_alloc_map"
343
+ ) ,
344
+ // E.g. a function ptr allocation
345
+ _ => return Err ( err)
346
+ }
347
+ } )
348
+ }
349
+
309
350
pub fn get_mut (
310
351
& mut self ,
311
352
id : AllocId ,
0 commit comments