@@ -81,8 +81,8 @@ pub struct Arena {
81
81
// The head is separated out from the list as a unbenchmarked
82
82
// microoptimization, to avoid needing to case on the list to access the
83
83
// head.
84
- head : RefCell < Chunk > ,
85
- copy_head : RefCell < Chunk > ,
84
+ head : Chunk ,
85
+ copy_head : Chunk ,
86
86
chunks : RefCell < Vec < Chunk > > ,
87
87
}
88
88
@@ -95,8 +95,8 @@ impl Arena {
95
95
/// Allocate a new Arena with `initial_size` bytes preallocated.
96
96
pub fn new_with_size ( initial_size : uint ) -> Arena {
97
97
Arena {
98
- head : RefCell :: new ( chunk ( initial_size, false ) ) ,
99
- copy_head : RefCell :: new ( chunk ( initial_size, true ) ) ,
98
+ head : chunk ( initial_size, false ) ,
99
+ copy_head : chunk ( initial_size, true ) ,
100
100
chunks : RefCell :: new ( Vec :: new ( ) ) ,
101
101
}
102
102
}
@@ -114,7 +114,7 @@ fn chunk(size: uint, is_copy: bool) -> Chunk {
114
114
impl Drop for Arena {
115
115
fn drop ( & mut self ) {
116
116
unsafe {
117
- destroy_chunk ( & * self . head . borrow ( ) ) ;
117
+ destroy_chunk ( & self . head ) ;
118
118
for chunk in self . chunks . borrow ( ) . iter ( ) {
119
119
if !chunk. is_copy . get ( ) {
120
120
destroy_chunk ( chunk) ;
@@ -171,40 +171,38 @@ fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) {
171
171
172
172
impl Arena {
173
173
fn chunk_size ( & self ) -> uint {
174
- self . copy_head . borrow ( ) . capacity ( )
174
+ self . copy_head . capacity ( )
175
175
}
176
-
177
176
// Functions for the POD part of the arena
178
- fn alloc_copy_grow ( & self , n_bytes : uint , align : uint ) -> * u8 {
177
+ fn alloc_copy_grow ( & mut self , n_bytes : uint , align : uint ) -> * u8 {
179
178
// Allocate a new chunk.
180
179
let new_min_chunk_size = cmp:: max ( n_bytes, self . chunk_size ( ) ) ;
181
- self . chunks . borrow_mut ( ) . push ( self . copy_head . borrow ( ) . clone ( ) ) ;
182
-
183
- * self . copy_head . borrow_mut ( ) =
180
+ self . chunks . borrow_mut ( ) . push ( self . copy_head . clone ( ) ) ;
181
+ self . copy_head =
184
182
chunk ( num:: next_power_of_two ( new_min_chunk_size + 1 u) , true ) ;
185
183
186
184
return self . alloc_copy_inner ( n_bytes, align) ;
187
185
}
188
186
189
187
#[ inline]
190
- fn alloc_copy_inner ( & self , n_bytes : uint , align : uint ) -> * u8 {
191
- let start = round_up ( self . copy_head . borrow ( ) . fill . get ( ) , align) ;
192
-
193
- let end = start + n_bytes;
194
- if end > self . chunk_size ( ) {
195
- return self . alloc_copy_grow ( n_bytes, align) ;
196
- }
188
+ fn alloc_copy_inner ( & mut self , n_bytes : uint , align : uint ) -> * u8 {
189
+ unsafe {
190
+ let start = round_up ( self . copy_head . fill . get ( ) , align) ;
191
+ let end = start + n_bytes;
192
+ if end > self . chunk_size ( ) {
193
+ return self . alloc_copy_grow ( n_bytes, align) ;
194
+ }
195
+ self . copy_head . fill . set ( end) ;
197
196
198
- let copy_head = self . copy_head . borrow ( ) ;
199
- copy_head . fill . set ( end ) ;
197
+ //debug!("idx = {}, size = {}, align = {}, fill = {}",
198
+ // start, n_bytes, align, head .fill.get() );
200
199
201
- unsafe {
202
- copy_head. as_ptr ( ) . offset ( start as int )
200
+ self . copy_head . as_ptr ( ) . offset ( start as int )
203
201
}
204
202
}
205
203
206
204
#[ inline]
207
- fn alloc_copy < ' a , T > ( & ' a self , op: || -> T ) -> & ' a T {
205
+ fn alloc_copy < ' a , T > ( & ' a mut self , op: || -> T ) -> & ' a T {
208
206
unsafe {
209
207
let ptr = self . alloc_copy_inner ( mem:: size_of :: < T > ( ) ,
210
208
mem:: min_align_of :: < T > ( ) ) ;
@@ -215,48 +213,42 @@ impl Arena {
215
213
}
216
214
217
215
// Functions for the non-POD part of the arena
218
- fn alloc_noncopy_grow ( & self , n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
216
+ fn alloc_noncopy_grow ( & mut self , n_bytes : uint , align : uint )
217
+ -> ( * u8 , * u8 ) {
219
218
// Allocate a new chunk.
220
219
let new_min_chunk_size = cmp:: max ( n_bytes, self . chunk_size ( ) ) ;
221
- self . chunks . borrow_mut ( ) . push ( self . head . borrow ( ) . clone ( ) ) ;
222
-
223
- * self . head . borrow_mut ( ) =
220
+ self . chunks . borrow_mut ( ) . push ( self . head . clone ( ) ) ;
221
+ self . head =
224
222
chunk ( num:: next_power_of_two ( new_min_chunk_size + 1 u) , false ) ;
225
223
226
224
return self . alloc_noncopy_inner ( n_bytes, align) ;
227
225
}
228
226
229
227
#[ inline]
230
- fn alloc_noncopy_inner ( & self , n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
231
- // Be careful to not maintain any `head` borrows active, because
232
- // `alloc_noncopy_grow` borrows it mutably.
233
- let ( start, end, tydesc_start, head_capacity) = {
234
- let head = self . head . borrow ( ) ;
235
- let fill = head. fill . get ( ) ;
236
-
237
- let tydesc_start = fill;
238
- let after_tydesc = fill + mem:: size_of :: < * TyDesc > ( ) ;
228
+ fn alloc_noncopy_inner ( & mut self , n_bytes : uint , align : uint )
229
+ -> ( * u8 , * u8 ) {
230
+ unsafe {
231
+ let tydesc_start = self . head . fill . get ( ) ;
232
+ let after_tydesc = self . head . fill . get ( ) + mem:: size_of :: < * TyDesc > ( ) ;
239
233
let start = round_up ( after_tydesc, align) ;
240
234
let end = start + n_bytes;
241
235
242
- ( start, end, tydesc_start, head. capacity ( ) )
243
- } ;
236
+ if end > self . head . capacity ( ) {
237
+ return self . alloc_noncopy_grow ( n_bytes, align) ;
238
+ }
244
239
245
- if end > head_capacity {
246
- return self . alloc_noncopy_grow ( n_bytes, align) ;
247
- }
240
+ self . head . fill . set ( round_up ( end, mem:: align_of :: < * TyDesc > ( ) ) ) ;
248
241
249
- let head = self . head . borrow ( ) ;
250
- head. fill . set ( round_up ( end , mem :: align_of :: < * TyDesc > ( ) ) ) ;
242
+ //debug!("idx = {}, size = {}, align = {}, fill = {}",
243
+ // start, n_bytes, align, head.fill);
251
244
252
- unsafe {
253
- let buf = head. as_ptr ( ) ;
245
+ let buf = self . head . as_ptr ( ) ;
254
246
return ( buf. offset ( tydesc_start as int ) , buf. offset ( start as int ) ) ;
255
247
}
256
248
}
257
249
258
250
#[ inline]
259
- fn alloc_noncopy < ' a , T > ( & ' a self , op: || -> T ) -> & ' a T {
251
+ fn alloc_noncopy < ' a , T > ( & ' a mut self , op: || -> T ) -> & ' a T {
260
252
unsafe {
261
253
let tydesc = get_tydesc :: < T > ( ) ;
262
254
let ( ty_ptr, ptr) =
@@ -282,10 +274,12 @@ impl Arena {
282
274
#[ inline]
283
275
pub fn alloc < ' a , T > ( & ' a self , op: || -> T ) -> & ' a T {
284
276
unsafe {
277
+ // FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
278
+ let this: & mut Arena = mem:: transmute :: < & _ , & mut _ > ( self ) ;
285
279
if intrinsics:: needs_drop :: < T > ( ) {
286
- self . alloc_noncopy ( op)
280
+ this . alloc_noncopy ( op)
287
281
} else {
288
- self . alloc_copy ( op)
282
+ this . alloc_copy ( op)
289
283
}
290
284
}
291
285
}
@@ -304,20 +298,6 @@ fn test_arena_destructors() {
304
298
}
305
299
}
306
300
307
- #[ test]
308
- fn test_arena_alloc_nested ( ) {
309
- struct Inner { value : uint }
310
- struct Outer < ' a > { inner : & ' a Inner }
311
-
312
- let arena = Arena :: new ( ) ;
313
-
314
- let result = arena. alloc ( || Outer {
315
- inner : arena. alloc ( || Inner { value : 10 } )
316
- } ) ;
317
-
318
- assert_eq ! ( result. inner. value, 10 ) ;
319
- }
320
-
321
301
#[ test]
322
302
#[ should_fail]
323
303
fn test_arena_destructors_fail ( ) {
@@ -345,20 +325,19 @@ fn test_arena_destructors_fail() {
345
325
/// run again for these objects.
346
326
pub struct TypedArena < T > {
347
327
/// A pointer to the next object to be allocated.
348
- ptr : Cell < * T > ,
328
+ ptr : * T ,
349
329
350
330
/// A pointer to the end of the allocated area. When this pointer is
351
331
/// reached, a new chunk is allocated.
352
- end : Cell < * T > ,
332
+ end : * T ,
353
333
354
334
/// A pointer to the first arena segment.
355
- first : RefCell < TypedArenaChunkRef < T > > ,
335
+ first : Option < Box < TypedArenaChunk < T > > > ,
356
336
}
357
- type TypedArenaChunkRef < T > = Option < Box < TypedArenaChunk < T > > > ;
358
337
359
338
struct TypedArenaChunk < T > {
360
339
/// Pointer to the next arena segment.
361
- next : TypedArenaChunkRef < T > ,
340
+ next : Option < Box < TypedArenaChunk < T > > > ,
362
341
363
342
/// The number of elements that this chunk can hold.
364
343
capacity : uint ,
@@ -444,52 +423,53 @@ impl<T> TypedArena<T> {
444
423
pub fn with_capacity ( capacity : uint ) -> TypedArena < T > {
445
424
let chunk = TypedArenaChunk :: < T > :: new ( None , capacity) ;
446
425
TypedArena {
447
- ptr : Cell :: new ( chunk. start ( ) as * T ) ,
448
- end : Cell :: new ( chunk. end ( ) as * T ) ,
449
- first : RefCell :: new ( Some ( chunk) ) ,
426
+ ptr : chunk. start ( ) as * T ,
427
+ end : chunk. end ( ) as * T ,
428
+ first : Some ( chunk) ,
450
429
}
451
430
}
452
431
453
432
/// Allocates an object in the TypedArena, returning a reference to it.
454
433
#[ inline]
455
434
pub fn alloc < ' a > ( & ' a self , object : T ) -> & ' a T {
456
- if self . ptr == self . end {
457
- self . grow ( )
458
- }
435
+ unsafe {
436
+ // FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
437
+ let this: & mut TypedArena < T > = mem:: transmute :: < & _ , & mut _ > ( self ) ;
438
+ if this. ptr == this. end {
439
+ this. grow ( )
440
+ }
459
441
460
- let ptr: & ' a T = unsafe {
461
- let ptr: & ' a mut T = mem:: transmute ( self . ptr ) ;
442
+ let ptr: & ' a mut T = mem:: transmute ( this. ptr ) ;
462
443
ptr:: write ( ptr, object) ;
463
- self . ptr . set ( self . ptr . get ( ) . offset ( 1 ) ) ;
444
+ this. ptr = this. ptr . offset ( 1 ) ;
445
+ let ptr: & ' a T = ptr;
464
446
ptr
465
- } ;
466
-
467
- ptr
447
+ }
468
448
}
469
449
470
450
/// Grows the arena.
471
451
#[ inline( never) ]
472
- fn grow ( & self ) {
473
- let chunk = self . first . borrow_mut ( ) . take_unwrap ( ) ;
452
+ fn grow ( & mut self ) {
453
+ let chunk = self . first . take_unwrap ( ) ;
474
454
let new_capacity = chunk. capacity . checked_mul ( & 2 ) . unwrap ( ) ;
475
455
let chunk = TypedArenaChunk :: < T > :: new ( Some ( chunk) , new_capacity) ;
476
- self . ptr . set ( chunk. start ( ) as * T ) ;
477
- self . end . set ( chunk. end ( ) as * T ) ;
478
- * self . first . borrow_mut ( ) = Some ( chunk)
456
+ self . ptr = chunk. start ( ) as * T ;
457
+ self . end = chunk. end ( ) as * T ;
458
+ self . first = Some ( chunk)
479
459
}
480
460
}
481
461
482
462
#[ unsafe_destructor]
483
463
impl < T > Drop for TypedArena < T > {
484
464
fn drop ( & mut self ) {
485
465
// Determine how much was filled.
486
- let start = self . first . borrow ( ) . get_ref ( ) . start ( ) as uint ;
487
- let end = self . ptr . get ( ) as uint ;
466
+ let start = self . first . get_ref ( ) . start ( ) as uint ;
467
+ let end = self . ptr as uint ;
488
468
let diff = ( end - start) / mem:: size_of :: < T > ( ) ;
489
469
490
470
// Pass that to the `destroy` method.
491
471
unsafe {
492
- self . first . borrow_mut ( ) . get_mut_ref ( ) . destroy ( diff)
472
+ self . first . get_mut_ref ( ) . destroy ( diff)
493
473
}
494
474
}
495
475
}
0 commit comments