25
25
extern crate libc;
26
26
27
27
use core:: ptr;
28
+ use core:: sync:: atomic:: { AtomicPtr , AtomicUsize , Ordering } ;
28
29
29
30
// The minimum alignment guaranteed by the architecture. This value is used to
30
31
// add fast paths for low alignment values. In practice, the alignment is a
@@ -47,26 +48,29 @@ const MIN_ALIGN: usize = 16;
47
48
const CHUNK_SIZE : usize = 4096 * 16 ;
48
49
const CHUNK_ALIGN : usize = 4096 ;
49
50
50
- static mut HEAP : * mut u8 = ptr:: null_mut ( ) ;
51
- static mut HEAP_LEFT : usize = 0 ;
51
+ static HEAP : AtomicPtr < u8 > = AtomicPtr :: new ( ptr:: null_mut ( ) ) ;
52
+ static HEAP_LEFT : AtomicUsize = AtomicUsize :: new ( 0 ) ;
52
53
53
54
#[ no_mangle]
54
55
pub extern "C" fn __rust_allocate ( size : usize , align : usize ) -> * mut u8 {
55
56
let new_align = if align < MIN_ALIGN { MIN_ALIGN } else { align } ;
56
57
let new_size = ( size + new_align - 1 ) & !( new_align - 1 ) ;
57
58
58
59
unsafe {
59
- if new_size < HEAP_LEFT {
60
- HEAP_LEFT -= new_size;
61
- let p = HEAP ;
62
- HEAP = HEAP . offset ( new_size as isize ) ;
63
- return p;
64
- } else if new_size > CHUNK_SIZE {
60
+ if new_size > CHUNK_SIZE {
65
61
return imp:: allocate ( size, align) ;
62
+ }
63
+
64
+ let heap = HEAP . load ( Ordering :: SeqCst ) ;
65
+ let heap_left = HEAP_LEFT . load ( Ordering :: SeqCst ) ;
66
+ if new_size < heap_left {
67
+ HEAP_LEFT . store ( heap_left - new_size, Ordering :: SeqCst ) ;
68
+ HEAP . store ( heap. offset ( new_size as isize ) , Ordering :: SeqCst ) ;
69
+ return heap;
66
70
} else {
67
- HEAP_LEFT = CHUNK_SIZE - new_size;
71
+ HEAP_LEFT . store ( CHUNK_SIZE - new_size, Ordering :: SeqCst ) ;
68
72
let p = imp:: allocate ( CHUNK_SIZE , CHUNK_ALIGN ) ;
69
- HEAP = p. offset ( new_size as isize ) ;
73
+ HEAP . store ( p. offset ( new_size as isize ) , Ordering :: SeqCst ) ;
70
74
return p;
71
75
}
72
76
}
0 commit comments