@@ -408,10 +408,18 @@ impl Cache {
408
408
// First up, test if this `lib` has any segment containing the
409
409
// `addr` (handling relocation). If this check passes then we
410
410
// can continue below and actually translate the address.
411
+ //
412
+ // Note that we're using `wrapping_add` here to avoid overflow
413
+ // checks. It's been seen in the wild that the SVMA + bias
414
+ // computation overflows. It seems a bit odd that would happen
415
+ // but there's not a huge amount we can do about it other than
416
+ // probably just ignore those segments since they're likely
417
+ // pointing off into space. This originally came up in
418
+ // rust-lang/backtrace-rs#329.
411
419
if !lib. segments . iter ( ) . any ( |s| {
412
- let svma = s. stated_virtual_memory_address as usize ;
413
- let start = svma + lib. bias as usize ;
414
- let end = start + s. len ;
420
+ let svma = s. stated_virtual_memory_address ;
421
+ let start = svma. wrapping_add ( lib. bias ) ;
422
+ let end = start. wrapping_add ( s. len ) ;
415
423
let address = addr as usize ;
416
424
start <= address && address < end
417
425
} ) {
@@ -420,7 +428,7 @@ impl Cache {
420
428
421
429
// Now that we know `lib` contains `addr`, we can offset with
422
430
// the bias to find the stated virutal memory address.
423
- let svma = addr as usize - lib. bias as usize ;
431
+ let svma = ( addr as usize ) . wrapping_sub ( lib. bias ) ;
424
432
Some ( ( i, svma as * const u8 ) )
425
433
} )
426
434
. next ( )
0 commit comments