31
31
#![ feature( alloc) ]
32
32
#![ feature( box_syntax) ]
33
33
#![ feature( core_intrinsics) ]
34
+ #![ feature( drop_in_place) ]
35
+ #![ feature( raw) ]
34
36
#![ feature( heap_api) ]
35
37
#![ feature( oom) ]
36
- #![ feature( ptr_as_ref) ]
37
38
#![ feature( raw) ]
38
39
#![ feature( staged_api) ]
39
40
#![ feature( dropck_parametricity) ]
@@ -47,9 +48,13 @@ use std::intrinsics;
47
48
use std:: marker;
48
49
use std:: mem;
49
50
use std:: ptr;
51
+ use std:: raw;
52
+ use std:: raw:: Repr ;
50
53
use std:: rc:: Rc ;
54
+ use std:: slice;
51
55
52
- use alloc:: heap:: { allocate, deallocate} ;
56
+ use alloc:: heap;
57
+ use alloc:: raw_vec:: RawVec ;
53
58
54
59
// The way arena uses arrays is really deeply awful. The arrays are
55
60
// allocated, and have capacities reserved, but the fill for the array
@@ -366,115 +371,87 @@ fn test_arena_destructors_fail() {
366
371
/// A faster arena that can hold objects of only one type.
367
372
pub struct TypedArena < T > {
368
373
/// A pointer to the next object to be allocated.
369
- ptr : Cell < * const T > ,
374
+ ptr : Cell < * mut T > ,
370
375
371
376
/// A pointer to the end of the allocated area. When this pointer is
372
377
/// reached, a new chunk is allocated.
373
- end : Cell < * const T > ,
378
+ end : Cell < * mut T > ,
374
379
375
- /// A pointer to the first arena segment .
376
- first : RefCell < * mut TypedArenaChunk < T > > ,
380
+ /// A vector arena segments .
381
+ chunks : RefCell < Vec < TypedArenaChunk < T > > > ,
377
382
378
383
/// Marker indicating that dropping the arena causes its owned
379
384
/// instances of `T` to be dropped.
380
385
_own : marker:: PhantomData < T > ,
381
386
}
382
387
383
388
struct TypedArenaChunk < T > {
384
- marker : marker:: PhantomData < T > ,
385
-
386
389
/// Pointer to the next arena segment.
387
- next : * mut TypedArenaChunk < T > ,
388
-
389
- /// The number of elements that this chunk can hold.
390
- capacity : usize ,
391
-
392
- // Objects follow here, suitably aligned.
393
- }
394
-
395
- fn calculate_size < T > ( capacity : usize ) -> usize {
396
- let mut size = mem:: size_of :: < TypedArenaChunk < T > > ( ) ;
397
- size = round_up ( size, mem:: align_of :: < T > ( ) ) ;
398
- let elem_size = mem:: size_of :: < T > ( ) ;
399
- let elems_size = elem_size. checked_mul ( capacity) . unwrap ( ) ;
400
- size = size. checked_add ( elems_size) . unwrap ( ) ;
401
- size
390
+ storage : RawVec < T > ,
402
391
}
403
392
404
393
impl < T > TypedArenaChunk < T > {
405
394
#[ inline]
406
- unsafe fn new ( next : * mut TypedArenaChunk < T > , capacity : usize ) -> * mut TypedArenaChunk < T > {
407
- let size = calculate_size :: < T > ( capacity) ;
408
- let chunk =
409
- allocate ( size, mem:: align_of :: < TypedArenaChunk < T > > ( ) ) as * mut TypedArenaChunk < T > ;
410
- if chunk. is_null ( ) {
411
- alloc:: oom ( )
412
- }
413
- ( * chunk) . next = next;
414
- ( * chunk) . capacity = capacity;
415
- chunk
395
+ unsafe fn new ( capacity : usize ) -> TypedArenaChunk < T > {
396
+ TypedArenaChunk { storage : RawVec :: with_capacity ( capacity) }
416
397
}
417
398
418
- /// Destroys this arena chunk. If the type descriptor is supplied, the
419
- /// drop glue is called; otherwise, drop glue is not called.
399
+ /// Destroys this arena chunk.
420
400
#[ inline]
421
401
unsafe fn destroy ( & mut self , len : usize ) {
422
- // Destroy all the allocated objects.
402
+ // The branch on needs_drop() is an -O1 performance optimization.
403
+ // Without the branch, dropping TypedArena<u8> takes linear time.
423
404
if intrinsics:: needs_drop :: < T > ( ) {
424
405
let mut start = self . start ( ) ;
406
+ // Destroy all allocated objects.
425
407
for _ in 0 ..len {
426
- ptr:: read ( start as * const T ) ; // run the destructor on the pointer
427
- start = start. offset ( mem :: size_of :: < T > ( ) as isize )
408
+ ptr:: drop_in_place ( start) ;
409
+ start = start. offset ( 1 ) ;
428
410
}
429
411
}
430
-
431
- // Destroy the next chunk.
432
- let next = self . next ;
433
- let size = calculate_size :: < T > ( self . capacity ) ;
434
- let self_ptr: * mut TypedArenaChunk < T > = self ;
435
- deallocate ( self_ptr as * mut u8 ,
436
- size,
437
- mem:: align_of :: < TypedArenaChunk < T > > ( ) ) ;
438
- if !next. is_null ( ) {
439
- let capacity = ( * next) . capacity ;
440
- ( * next) . destroy ( capacity) ;
441
- }
442
412
}
443
413
444
414
// Returns a pointer to the first allocated object.
445
415
#[ inline]
446
- fn start ( & self ) -> * const u8 {
447
- let this: * const TypedArenaChunk < T > = self ;
448
- unsafe { round_up ( this. offset ( 1 ) as usize , mem:: align_of :: < T > ( ) ) as * const u8 }
416
+ fn start ( & self ) -> * mut T {
417
+ self . storage . ptr ( )
449
418
}
450
419
451
420
// Returns a pointer to the end of the allocated space.
452
421
#[ inline]
453
- fn end ( & self ) -> * const u8 {
422
+ fn end ( & self ) -> * mut T {
454
423
unsafe {
455
- let size = mem:: size_of :: < T > ( ) . checked_mul ( self . capacity ) . unwrap ( ) ;
456
- self . start ( ) . offset ( size as isize )
424
+ if mem:: size_of :: < T > ( ) == 0 {
425
+ // A pointer as large as possible for zero-sized elements.
426
+ !0 as * mut T
427
+ } else {
428
+ self . start ( ) . offset ( self . storage . cap ( ) as isize )
429
+ }
457
430
}
458
431
}
459
432
}
460
433
434
+ const PAGE : usize = 4096 ;
435
+
461
436
impl < T > TypedArena < T > {
462
- /// Creates a new `TypedArena` with preallocated space for eight objects.
437
+ /// Creates a new `TypedArena` with preallocated space for many objects.
463
438
#[ inline]
464
439
pub fn new ( ) -> TypedArena < T > {
465
- TypedArena :: with_capacity ( 8 )
440
+ // Reserve at least one page.
441
+ let elem_size = cmp:: max ( 1 , mem:: size_of :: < T > ( ) ) ;
442
+ TypedArena :: with_capacity ( PAGE / elem_size)
466
443
}
467
444
468
445
/// Creates a new `TypedArena` with preallocated space for the given number of
469
446
/// objects.
470
447
#[ inline]
471
448
pub fn with_capacity ( capacity : usize ) -> TypedArena < T > {
472
449
unsafe {
473
- let chunk = TypedArenaChunk :: < T > :: new ( ptr :: null_mut ( ) , capacity) ;
450
+ let chunk = TypedArenaChunk :: < T > :: new ( cmp :: max ( 1 , capacity) ) ;
474
451
TypedArena {
475
- ptr : Cell :: new ( ( * chunk) . start ( ) as * const T ) ,
476
- end : Cell :: new ( ( * chunk) . end ( ) as * const T ) ,
477
- first : RefCell :: new ( chunk) ,
452
+ ptr : Cell :: new ( chunk. start ( ) ) ,
453
+ end : Cell :: new ( chunk. end ( ) ) ,
454
+ chunks : RefCell :: new ( vec ! [ chunk] ) ,
478
455
_own : marker:: PhantomData ,
479
456
}
480
457
}
@@ -488,23 +465,39 @@ impl<T> TypedArena<T> {
488
465
}
489
466
490
467
unsafe {
491
- let ptr: & mut T = & mut * ( self . ptr . get ( ) as * mut T ) ;
492
- ptr:: write ( ptr, object) ;
493
- self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
494
- ptr
468
+ if mem:: size_of :: < T > ( ) == 0 {
469
+ self . ptr . set ( intrinsics:: arith_offset ( self . ptr . get ( ) as * mut u8 , 1 ) as * mut T ) ;
470
+ let ptr = heap:: EMPTY as * mut T ;
471
+ // Don't drop the object. This `write` is equivalent to `forget`.
472
+ ptr:: write ( ptr, object) ;
473
+ & mut * ptr
474
+ } else {
475
+ let ptr = self . ptr . get ( ) ;
476
+ // Advance the pointer.
477
+ self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
478
+ // Write into uninitialized memory.
479
+ ptr:: write ( ptr, object) ;
480
+ & mut * ptr
481
+ }
495
482
}
496
483
}
497
484
498
485
/// Grows the arena.
499
486
#[ inline( never) ]
487
+ #[ cold]
500
488
fn grow ( & self ) {
501
489
unsafe {
502
- let chunk = * self . first . borrow_mut ( ) ;
503
- let new_capacity = ( * chunk) . capacity . checked_mul ( 2 ) . unwrap ( ) ;
504
- let chunk = TypedArenaChunk :: < T > :: new ( chunk, new_capacity) ;
505
- self . ptr . set ( ( * chunk) . start ( ) as * const T ) ;
506
- self . end . set ( ( * chunk) . end ( ) as * const T ) ;
507
- * self . first . borrow_mut ( ) = chunk
490
+ let mut chunks = self . chunks . borrow_mut ( ) ;
491
+ let prev_capacity = chunks. last ( ) . unwrap ( ) . storage . cap ( ) ;
492
+ let new_capacity = prev_capacity. checked_mul ( 2 ) . unwrap ( ) ;
493
+ if chunks. last_mut ( ) . unwrap ( ) . storage . double_in_place ( ) {
494
+ self . end . set ( chunks. last ( ) . unwrap ( ) . end ( ) ) ;
495
+ } else {
496
+ let chunk = TypedArenaChunk :: < T > :: new ( new_capacity) ;
497
+ self . ptr . set ( chunk. start ( ) ) ;
498
+ self . end . set ( chunk. end ( ) ) ;
499
+ chunks. push ( chunk) ;
500
+ }
508
501
}
509
502
}
510
503
}
@@ -514,12 +507,26 @@ impl<T> Drop for TypedArena<T> {
514
507
fn drop ( & mut self ) {
515
508
unsafe {
516
509
// Determine how much was filled.
517
- let start = self . first . borrow ( ) . as_ref ( ) . unwrap ( ) . start ( ) as usize ;
510
+ let mut chunks_borrow = self . chunks . borrow_mut ( ) ;
511
+ let mut last_chunk = chunks_borrow. pop ( ) . unwrap ( ) ;
512
+ let start = last_chunk. start ( ) as usize ;
518
513
let end = self . ptr . get ( ) as usize ;
519
- let diff = ( end - start) / mem:: size_of :: < T > ( ) ;
514
+ let diff = if mem:: size_of :: < T > ( ) == 0 {
515
+ // Avoid division by zero.
516
+ end - start
517
+ } else {
518
+ ( end - start) / mem:: size_of :: < T > ( )
519
+ } ;
520
520
521
521
// Pass that to the `destroy` method.
522
- ( * * self . first . borrow_mut ( ) ) . destroy ( diff)
522
+ last_chunk. destroy ( diff) ;
523
+ // Destroy this chunk.
524
+ let _: RawVec < T > = mem:: transmute ( last_chunk) ;
525
+
526
+ for chunk in chunks_borrow. iter_mut ( ) {
527
+ let cap = chunk. storage . cap ( ) ;
528
+ chunk. destroy ( cap) ;
529
+ }
523
530
}
524
531
}
525
532
}
@@ -657,4 +664,12 @@ mod tests {
657
664
} )
658
665
} )
659
666
}
667
+
668
+ #[ test]
669
+ pub fn test_zero_sized ( ) {
670
+ let arena = TypedArena :: new ( ) ;
671
+ for _ in 0 ..100000 {
672
+ arena. alloc ( ( ) ) ;
673
+ }
674
+ }
660
675
}
0 commit comments