1
1
#[ feature( managed_boxes) ] ;
2
-
2
+ # [ allow ( dead_code ) ] ; // reenable check after done with dev.
3
3
#[ allow( unused_imports) ] ;
4
4
use std:: cast;
5
5
use std:: mem;
6
6
use std:: ptr;
7
- use std:: rt :: local_heap :: { Box } ;
7
+ use RawBox = std:: unstable :: raw :: Box ;
8
8
use std:: rt:: global_heap;
9
9
use std:: unstable:: intrinsics;
10
10
use std:: unstable:: intrinsics:: { TyDesc } ;
11
11
12
12
// Reminder:
13
- // struct Box<T> { refc: uint, desc: *TyDesc, links: Lnx<T>, data: T }
14
- // where
15
- // struct Lnx<T> { prev: *mut Box<T>, next: *mut Box<T> }
13
+ // struct Box<T> {
14
+ // ref_count: uint, type_desc: *TyDesc,
15
+ // prev: *mut Box<T>, next: *mut Box<T>, data: T }
16
16
17
17
/// A Span holds the span `[start, limit)` for contiguous area of memory.
18
18
struct Span {
@@ -22,12 +22,15 @@ struct Span {
22
22
23
23
impl Span {
24
24
fn new ( start : * uint , limit : * uint ) -> Span {
25
- Span { start : start, limit : limit }
25
+ let ret = Span { start : start, limit : limit } ;
26
+ debug ! ( "Span::new({}, {})" , start, limit) ;
27
+ ret
26
28
}
27
29
fn tup ( & self ) -> ( * uint , * uint ) { ( self . start , self . limit ) }
28
30
fn from ( ( start, limit) : ( * uint , * uint ) ) -> Span { Span :: new ( start, limit) }
29
31
fn size_bytes ( & self ) -> uint { ( self . limit as uint ) - ( self . start as uint ) }
30
32
fn can_fit ( & self , bytes : uint ) -> bool { bytes <= self . size_bytes ( ) }
33
+ fn would_exhaust ( & self , bytes : uint ) -> bool { ! self . can_fit ( bytes) }
31
34
unsafe fn shift_start ( & mut self , bytes : uint ) {
32
35
assert ! ( self . can_fit( bytes) ) ;
33
36
assert ! ( bytes as int >= 0 ) ;
@@ -62,11 +65,16 @@ impl Chunk {
62
65
assert ! ( size >= mem:: size_of:: <FutureBox <( ) >>( ) ) ;
63
66
assert ! ( size >= mem:: size_of:: <ForwardedBox <( ) >>( ) ) ;
64
67
68
+ let word_size = mem:: size_of :: < uint > ( ) ;
69
+ if 0 != size % word_size {
70
+ fail ! ( "chunks must be multiples of machine words." ) ;
71
+ }
72
+
65
73
unsafe {
66
74
let chunk_mem = global_heap:: malloc_raw ( size) ;
67
75
let start : * uint = cast:: transmute ( chunk_mem) ;
68
76
assert ! ( ( size as int) >= 0 ) ;
69
- let limit : * uint = ptr:: offset ( start, size as int ) ;
77
+ let limit : * uint = ptr:: offset ( start, ( size / word_size ) as int ) ;
70
78
let block : * mut BigBlock = cast:: transmute ( start) ;
71
79
( * block) . next = ptr:: null ( ) ;
72
80
( * block) . limit = limit;
@@ -90,6 +98,18 @@ impl Chunk {
90
98
let limit = ( * b) . limit ;
91
99
Span :: new ( start, limit)
92
100
}
101
+
102
+ unsafe fn free_all ( & mut self ) {
103
+ let mut ptr = self . span . start ;
104
+ let mut next = self . next ;
105
+ loop {
106
+ global_heap:: free_raw ( ptr) ;
107
+ match next {
108
+ None => break ,
109
+ Some ( p) => { ptr = ( * p) . span . start ; next = ( * p) . next ; }
110
+ }
111
+ }
112
+ }
93
113
}
94
114
95
115
/// A Block is a contiguous slice of memory within a Chunk. When
@@ -158,18 +178,24 @@ impl Gc {
158
178
}
159
179
160
180
pub fn alloc < T > ( & mut self , arg : T ) -> @T {
161
- #[ allow( unused_variable) ] ;
162
-
163
181
unsafe {
164
182
let tydesc = intrinsics:: get_tydesc :: < T > ( ) ;
165
183
let obj = self . alloc_ty_instance ( tydesc) ;
166
- fail ! ( "GC::alloc not yet implemented" ) ;
184
+ let obj : * mut RawBox < T > = cast:: transmute ( obj) ;
185
+ // artificially pump up ref-count so that cheney will manage this object.
186
+ ( * obj) . ref_count += 1 ;
187
+ ( * obj) . type_desc = tydesc;
188
+ ( * obj) . prev = ptr:: mut_null ( ) ;
189
+ ( * obj) . next = ptr:: mut_null ( ) ;
190
+ ( * obj) . data = arg;
191
+ let obj : @T = cast:: transmute ( obj) ;
192
+ return obj;
167
193
}
168
194
}
169
195
170
196
unsafe fn alloc_ty_instance ( & mut self , tydesc : * TyDesc ) -> * uint {
171
197
let total_size = global_heap:: get_box_size ( ( * tydesc) . size , ( * tydesc) . align ) ;
172
- if self . avail . can_fit ( total_size) {
198
+ if self . avail . would_exhaust ( total_size) {
173
199
// TODO: if total_size is large enough, consider
174
200
// allocating a separate chunk for it rather than
175
201
// immediately jumping into a Gc attempt.
@@ -186,9 +212,11 @@ impl Gc {
186
212
fn fill_remaining_space ( & mut self ) {
187
213
// TODO: inject placeholder object with fixed-size header as
188
214
// to spend O(1) effort rather than O(|remainingspace|).
215
+ let mut a = self . avail . start ;
216
+ let lim = self . avail . limit ;
217
+ println ! ( "fill_remaining_space: a: {} lim: {} lim-a: {} bytes" ,
218
+ a, lim, ( lim as uint) - ( a as uint) ) ;
189
219
unsafe {
190
- let mut a = self . avail . start ;
191
- let lim = self . avail . limit ;
192
220
while a < lim {
193
221
{
194
222
let a : * mut uint = cast:: transmute ( a) ;
@@ -204,7 +232,7 @@ impl Gc {
204
232
#[ allow( unused_variable) ] ;
205
233
206
234
let owned_objects_to_scan : ~[ * ( ) ] = ~[ ] ;
207
- let pinned_shared_to_scan : ~[ * Box ] = ~[ ] ;
235
+ let pinned_shared_to_scan : ~[ * RawBox < ( ) > ] = ~[ ] ;
208
236
let scan_ptr : * uint ;
209
237
let to_ptr : * uint ;
210
238
let limit : * uint ;
@@ -272,3 +300,14 @@ fn main() {
272
300
let i3 = gc. alloc :: < int > ( 3 ) ;
273
301
println ! ( "i3: {:?}" , i3) ;
274
302
}
303
+
304
+ impl Drop for Gc {
305
+ fn drop ( & mut self ) {
306
+ unsafe {
307
+ self . normal_chunks . free_all ( ) ;
308
+ match self . large_objects . take ( ) {
309
+ None => { } , Some ( c) => c. free_all ( ) ,
310
+ }
311
+ }
312
+ }
313
+ }
0 commit comments