2525extern crate libc;
2626
2727use core:: ptr;
28+ use core:: sync:: atomic:: { AtomicPtr , AtomicUsize , Ordering } ;
2829
2930// The minimum alignment guaranteed by the architecture. This value is used to
3031// add fast paths for low alignment values. In practice, the alignment is a
@@ -47,26 +48,29 @@ const MIN_ALIGN: usize = 16;
4748const CHUNK_SIZE : usize = 4096 * 16 ;
4849const CHUNK_ALIGN : usize = 4096 ;
4950
50- static mut HEAP : * mut u8 = ptr:: null_mut ( ) ;
51- static mut HEAP_LEFT : usize = 0 ;
51+ static HEAP : AtomicPtr < u8 > = AtomicPtr :: new ( ptr:: null_mut ( ) ) ;
52+ static HEAP_LEFT : AtomicUsize = AtomicUsize :: new ( 0 ) ;
5253
5354#[ no_mangle]
5455pub extern "C" fn __rust_allocate ( size : usize , align : usize ) -> * mut u8 {
5556 let new_align = if align < MIN_ALIGN { MIN_ALIGN } else { align } ;
5657 let new_size = ( size + new_align - 1 ) & !( new_align - 1 ) ;
5758
5859 unsafe {
59- if new_size < HEAP_LEFT {
60- HEAP_LEFT -= new_size;
61- let p = HEAP ;
62- HEAP = HEAP . offset ( new_size as isize ) ;
63- return p;
64- } else if new_size > CHUNK_SIZE {
60+ if new_size > CHUNK_SIZE {
6561 return imp:: allocate ( size, align) ;
62+ }
63+
64+ let heap = HEAP . load ( Ordering :: SeqCst ) ;
65+ let heap_left = HEAP_LEFT . load ( Ordering :: SeqCst ) ;
66+ if new_size < heap_left {
67+ HEAP_LEFT . store ( heap_left - new_size, Ordering :: SeqCst ) ;
68+ HEAP . store ( heap. offset ( new_size as isize ) , Ordering :: SeqCst ) ;
69+ return heap;
6670 } else {
67- HEAP_LEFT = CHUNK_SIZE - new_size;
71+ HEAP_LEFT . store ( CHUNK_SIZE - new_size, Ordering :: SeqCst ) ;
6872 let p = imp:: allocate ( CHUNK_SIZE , CHUNK_ALIGN ) ;
69- HEAP = p. offset ( new_size as isize ) ;
73+ HEAP . store ( p. offset ( new_size as isize ) , Ordering :: SeqCst ) ;
7074 return p;
7175 }
7276 }
0 commit comments