Skip to content

Commit ecd4de2

Browse files
committed
---
yaml --- r: 152448 b: refs/heads/try2 c: c54ce27 h: refs/heads/master v: v3
1 parent 0489d45 commit ecd4de2

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

103 files changed

+822
-344
lines changed

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ refs/heads/snap-stage3: 78a7676898d9f80ab540c6df5d4c9ce35bb50463
55
refs/heads/try: 519addf6277dbafccbb4159db4b710c37eaa2ec5
66
refs/tags/release-0.1: 1f5c5126e96c79d22cb7862f75304136e204f105
77
refs/heads/ndm: f3868061cd7988080c30d6d5bf352a5a5fe2460b
8-
refs/heads/try2: 02866e6fe26307b34db2aa414b52a08718bfc9f7
8+
refs/heads/try2: c54ce27b392e9f16a3cc70fed415af8c274af148
99
refs/heads/dist-snap: ba4081a5a8573875fed17545846f6f6902c8ba8d
1010
refs/tags/release-0.2: c870d2dffb391e14efb05aa27898f1f6333a9596
1111
refs/tags/release-0.3: b5f0d0f648d9a6153664837026ba1be43d3e2503

branches/try2/mk/docs.mk

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -273,14 +273,18 @@ LIB_DOC_DEP_$(1) = \
273273
$$(RSINPUTS_$(1)) \
274274
$$(RUSTDOC_EXE) \
275275
$$(foreach dep,$$(RUST_DEPS_$(1)), \
276-
$$(TLIB2_T_$(CFG_BUILD)_H_$(CFG_BUILD))/stamp.$$(dep))
276+
$$(TLIB2_T_$(CFG_BUILD)_H_$(CFG_BUILD))/stamp.$$(dep) \
277+
doc/$$(dep)/)
277278
else
278279
LIB_DOC_DEP_$(1) = $$(CRATEFILE_$(1)) $$(RSINPUTS_$(1))
279280
endif
280281

282+
doc/$(1)/:
283+
$$(Q)mkdir -p $$@
284+
281285
$(2) += doc/$(1)/index.html
282286
doc/$(1)/index.html: CFG_COMPILER_HOST_TRIPLE = $(CFG_TARGET)
283-
doc/$(1)/index.html: $$(LIB_DOC_DEP_$(1))
287+
doc/$(1)/index.html: $$(LIB_DOC_DEP_$(1)) doc/$(1)/
284288
@$$(call E, rustdoc $$@)
285289
$$(Q)$$(RUSTDOC) --cfg dox --cfg stage2 $$<
286290
endef

branches/try2/src/liballoc/arc.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ impl<T: Share + Send> Drop for Arc<T> {
184184

185185
// This fence is needed to prevent reordering of use of the data and
186186
// deletion of the data. Because it is marked `Release`, the
187-
// decreasing of the reference count sychronizes with this `Acquire`
187+
// decreasing of the reference count synchronizes with this `Acquire`
188188
// fence. This means that use of the data happens before decreasing
189189
// the refernce count, which happens before this fence, which
190190
// happens before the deletion of the data.

branches/try2/src/libarena/lib.rs

Lines changed: 87 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -81,8 +81,8 @@ pub struct Arena {
8181
// The head is separated out from the list as a unbenchmarked
8282
// microoptimization, to avoid needing to case on the list to access the
8383
// head.
84-
head: Chunk,
85-
copy_head: Chunk,
84+
head: RefCell<Chunk>,
85+
copy_head: RefCell<Chunk>,
8686
chunks: RefCell<Vec<Chunk>>,
8787
}
8888

@@ -95,8 +95,8 @@ impl Arena {
9595
/// Allocate a new Arena with `initial_size` bytes preallocated.
9696
pub fn new_with_size(initial_size: uint) -> Arena {
9797
Arena {
98-
head: chunk(initial_size, false),
99-
copy_head: chunk(initial_size, true),
98+
head: RefCell::new(chunk(initial_size, false)),
99+
copy_head: RefCell::new(chunk(initial_size, true)),
100100
chunks: RefCell::new(Vec::new()),
101101
}
102102
}
@@ -114,7 +114,7 @@ fn chunk(size: uint, is_copy: bool) -> Chunk {
114114
impl Drop for Arena {
115115
fn drop(&mut self) {
116116
unsafe {
117-
destroy_chunk(&self.head);
117+
destroy_chunk(&*self.head.borrow());
118118
for chunk in self.chunks.borrow().iter() {
119119
if !chunk.is_copy.get() {
120120
destroy_chunk(chunk);
@@ -171,38 +171,40 @@ fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) {
171171

172172
impl Arena {
173173
fn chunk_size(&self) -> uint {
174-
self.copy_head.capacity()
174+
self.copy_head.borrow().capacity()
175175
}
176+
176177
// Functions for the POD part of the arena
177-
fn alloc_copy_grow(&mut self, n_bytes: uint, align: uint) -> *u8 {
178+
fn alloc_copy_grow(&self, n_bytes: uint, align: uint) -> *u8 {
178179
// Allocate a new chunk.
179180
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
180-
self.chunks.borrow_mut().push(self.copy_head.clone());
181-
self.copy_head =
181+
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
182+
183+
*self.copy_head.borrow_mut() =
182184
chunk(num::next_power_of_two(new_min_chunk_size + 1u), true);
183185

184186
return self.alloc_copy_inner(n_bytes, align);
185187
}
186188

187189
#[inline]
188-
fn alloc_copy_inner(&mut self, n_bytes: uint, align: uint) -> *u8 {
189-
unsafe {
190-
let start = round_up(self.copy_head.fill.get(), align);
191-
let end = start + n_bytes;
192-
if end > self.chunk_size() {
193-
return self.alloc_copy_grow(n_bytes, align);
194-
}
195-
self.copy_head.fill.set(end);
190+
fn alloc_copy_inner(&self, n_bytes: uint, align: uint) -> *u8 {
191+
let start = round_up(self.copy_head.borrow().fill.get(), align);
192+
193+
let end = start + n_bytes;
194+
if end > self.chunk_size() {
195+
return self.alloc_copy_grow(n_bytes, align);
196+
}
196197

197-
//debug!("idx = {}, size = {}, align = {}, fill = {}",
198-
// start, n_bytes, align, head.fill.get());
198+
let copy_head = self.copy_head.borrow();
199+
copy_head.fill.set(end);
199200

200-
self.copy_head.as_ptr().offset(start as int)
201+
unsafe {
202+
copy_head.as_ptr().offset(start as int)
201203
}
202204
}
203205

204206
#[inline]
205-
fn alloc_copy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
207+
fn alloc_copy<'a, T>(&'a self, op: || -> T) -> &'a T {
206208
unsafe {
207209
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
208210
mem::min_align_of::<T>());
@@ -213,42 +215,48 @@ impl Arena {
213215
}
214216

215217
// Functions for the non-POD part of the arena
216-
fn alloc_noncopy_grow(&mut self, n_bytes: uint, align: uint)
217-
-> (*u8, *u8) {
218+
fn alloc_noncopy_grow(&self, n_bytes: uint, align: uint) -> (*u8, *u8) {
218219
// Allocate a new chunk.
219220
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
220-
self.chunks.borrow_mut().push(self.head.clone());
221-
self.head =
221+
self.chunks.borrow_mut().push(self.head.borrow().clone());
222+
223+
*self.head.borrow_mut() =
222224
chunk(num::next_power_of_two(new_min_chunk_size + 1u), false);
223225

224226
return self.alloc_noncopy_inner(n_bytes, align);
225227
}
226228

227229
#[inline]
228-
fn alloc_noncopy_inner(&mut self, n_bytes: uint, align: uint)
229-
-> (*u8, *u8) {
230-
unsafe {
231-
let tydesc_start = self.head.fill.get();
232-
let after_tydesc = self.head.fill.get() + mem::size_of::<*TyDesc>();
230+
fn alloc_noncopy_inner(&self, n_bytes: uint, align: uint) -> (*u8, *u8) {
231+
// Be careful to not maintain any `head` borrows active, because
232+
// `alloc_noncopy_grow` borrows it mutably.
233+
let (start, end, tydesc_start, head_capacity) = {
234+
let head = self.head.borrow();
235+
let fill = head.fill.get();
236+
237+
let tydesc_start = fill;
238+
let after_tydesc = fill + mem::size_of::<*TyDesc>();
233239
let start = round_up(after_tydesc, align);
234240
let end = start + n_bytes;
235241

236-
if end > self.head.capacity() {
237-
return self.alloc_noncopy_grow(n_bytes, align);
238-
}
242+
(start, end, tydesc_start, head.capacity())
243+
};
239244

240-
self.head.fill.set(round_up(end, mem::align_of::<*TyDesc>()));
245+
if end > head_capacity {
246+
return self.alloc_noncopy_grow(n_bytes, align);
247+
}
241248

242-
//debug!("idx = {}, size = {}, align = {}, fill = {}",
243-
// start, n_bytes, align, head.fill);
249+
let head = self.head.borrow();
250+
head.fill.set(round_up(end, mem::align_of::<*TyDesc>()));
244251

245-
let buf = self.head.as_ptr();
252+
unsafe {
253+
let buf = head.as_ptr();
246254
return (buf.offset(tydesc_start as int), buf.offset(start as int));
247255
}
248256
}
249257

250258
#[inline]
251-
fn alloc_noncopy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
259+
fn alloc_noncopy<'a, T>(&'a self, op: || -> T) -> &'a T {
252260
unsafe {
253261
let tydesc = get_tydesc::<T>();
254262
let (ty_ptr, ptr) =
@@ -274,12 +282,10 @@ impl Arena {
274282
#[inline]
275283
pub fn alloc<'a, T>(&'a self, op: || -> T) -> &'a T {
276284
unsafe {
277-
// FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
278-
let this: &mut Arena = mem::transmute::<&_, &mut _>(self);
279285
if intrinsics::needs_drop::<T>() {
280-
this.alloc_noncopy(op)
286+
self.alloc_noncopy(op)
281287
} else {
282-
this.alloc_copy(op)
288+
self.alloc_copy(op)
283289
}
284290
}
285291
}
@@ -298,6 +304,20 @@ fn test_arena_destructors() {
298304
}
299305
}
300306

307+
#[test]
308+
fn test_arena_alloc_nested() {
309+
struct Inner { value: uint }
310+
struct Outer<'a> { inner: &'a Inner }
311+
312+
let arena = Arena::new();
313+
314+
let result = arena.alloc(|| Outer {
315+
inner: arena.alloc(|| Inner { value: 10 })
316+
});
317+
318+
assert_eq!(result.inner.value, 10);
319+
}
320+
301321
#[test]
302322
#[should_fail]
303323
fn test_arena_destructors_fail() {
@@ -325,19 +345,20 @@ fn test_arena_destructors_fail() {
325345
/// run again for these objects.
326346
pub struct TypedArena<T> {
327347
/// A pointer to the next object to be allocated.
328-
ptr: *T,
348+
ptr: Cell<*T>,
329349

330350
/// A pointer to the end of the allocated area. When this pointer is
331351
/// reached, a new chunk is allocated.
332-
end: *T,
352+
end: Cell<*T>,
333353

334354
/// A pointer to the first arena segment.
335-
first: Option<Box<TypedArenaChunk<T>>>,
355+
first: RefCell<TypedArenaChunkRef<T>>,
336356
}
357+
type TypedArenaChunkRef<T> = Option<Box<TypedArenaChunk<T>>>;
337358

338359
struct TypedArenaChunk<T> {
339360
/// Pointer to the next arena segment.
340-
next: Option<Box<TypedArenaChunk<T>>>,
361+
next: TypedArenaChunkRef<T>,
341362

342363
/// The number of elements that this chunk can hold.
343364
capacity: uint,
@@ -423,53 +444,52 @@ impl<T> TypedArena<T> {
423444
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
424445
let chunk = TypedArenaChunk::<T>::new(None, capacity);
425446
TypedArena {
426-
ptr: chunk.start() as *T,
427-
end: chunk.end() as *T,
428-
first: Some(chunk),
447+
ptr: Cell::new(chunk.start() as *T),
448+
end: Cell::new(chunk.end() as *T),
449+
first: RefCell::new(Some(chunk)),
429450
}
430451
}
431452

432453
/// Allocates an object in the TypedArena, returning a reference to it.
433454
#[inline]
434455
pub fn alloc<'a>(&'a self, object: T) -> &'a T {
435-
unsafe {
436-
// FIXME #13933: Remove/justify all `&T` to `&mut T` transmutes
437-
let this: &mut TypedArena<T> = mem::transmute::<&_, &mut _>(self);
438-
if this.ptr == this.end {
439-
this.grow()
440-
}
456+
if self.ptr == self.end {
457+
self.grow()
458+
}
441459

442-
let ptr: &'a mut T = mem::transmute(this.ptr);
460+
let ptr: &'a T = unsafe {
461+
let ptr: &'a mut T = mem::transmute(self.ptr);
443462
ptr::write(ptr, object);
444-
this.ptr = this.ptr.offset(1);
445-
let ptr: &'a T = ptr;
463+
self.ptr.set(self.ptr.get().offset(1));
446464
ptr
447-
}
465+
};
466+
467+
ptr
448468
}
449469

450470
/// Grows the arena.
451471
#[inline(never)]
452-
fn grow(&mut self) {
453-
let chunk = self.first.take_unwrap();
472+
fn grow(&self) {
473+
let chunk = self.first.borrow_mut().take_unwrap();
454474
let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
455475
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
456-
self.ptr = chunk.start() as *T;
457-
self.end = chunk.end() as *T;
458-
self.first = Some(chunk)
476+
self.ptr.set(chunk.start() as *T);
477+
self.end.set(chunk.end() as *T);
478+
*self.first.borrow_mut() = Some(chunk)
459479
}
460480
}
461481

462482
#[unsafe_destructor]
463483
impl<T> Drop for TypedArena<T> {
464484
fn drop(&mut self) {
465485
// Determine how much was filled.
466-
let start = self.first.get_ref().start() as uint;
467-
let end = self.ptr as uint;
486+
let start = self.first.borrow().get_ref().start() as uint;
487+
let end = self.ptr.get() as uint;
468488
let diff = (end - start) / mem::size_of::<T>();
469489

470490
// Pass that to the `destroy` method.
471491
unsafe {
472-
self.first.get_mut_ref().destroy(diff)
492+
self.first.borrow_mut().get_mut_ref().destroy(diff)
473493
}
474494
}
475495
}

branches/try2/src/libcore/fmt/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -539,7 +539,7 @@ impl<'a> Formatter<'a> {
539539
}
540540

541541
/// Runs a callback, emitting the correct padding either before or
542-
/// afterwards depending on whether right or left alingment is requested.
542+
/// afterwards depending on whether right or left alignment is requested.
543543
fn with_padding(&mut self,
544544
padding: uint,
545545
default: rt::Alignment,

branches/try2/src/libcore/result.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@
105105
//! *Note: The actual definition of `Writer` uses `IoResult`, which
106106
//! is just a synonym for `Result<T, IoError>`.*
107107
//!
108-
//! This method doesn`t produce a value, but the write may
108+
//! This method doesn't produce a value, but the write may
109109
//! fail. It's crucial to handle the error case, and *not* write
110110
//! something like this:
111111
//!

branches/try2/src/libgetopts/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ pub struct OptGroup {
163163
pub occur: Occur
164164
}
165165

166-
/// Describes wether an option is given at all or has a value.
166+
/// Describes whether an option is given at all or has a value.
167167
#[deriving(Clone, PartialEq)]
168168
enum Optval {
169169
Val(String),

branches/try2/src/libglob/lib.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@ use std::string::String;
4444
* pattern - see the `glob` function for more details.
4545
*/
4646
pub struct Paths {
47-
root: Path,
4847
dir_patterns: Vec<Pattern>,
4948
require_dir: bool,
5049
options: MatchOptions,
@@ -108,7 +107,6 @@ pub fn glob_with(pattern: &str, options: MatchOptions) -> Paths {
108107
// FIXME: How do we want to handle verbatim paths? I'm inclined to return nothing,
109108
// since we can't very well find all UNC shares with a 1-letter server name.
110109
return Paths {
111-
root: root,
112110
dir_patterns: Vec::new(),
113111
require_dir: false,
114112
options: options,
@@ -134,7 +132,6 @@ pub fn glob_with(pattern: &str, options: MatchOptions) -> Paths {
134132
}
135133

136134
Paths {
137-
root: root,
138135
dir_patterns: dir_patterns,
139136
require_dir: require_dir,
140137
options: options,

branches/try2/src/libgreen/context.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,7 @@ extern {
143143
// stacks are disabled.
144144

145145
#[cfg(target_arch = "x86")]
146+
#[repr(C)]
146147
struct Registers {
147148
eax: u32, ebx: u32, ecx: u32, edx: u32,
148149
ebp: u32, esi: u32, edi: u32, esp: u32,
@@ -226,7 +227,7 @@ fn initialize_call_frame(regs: &mut Registers, fptr: InitFn, arg: uint,
226227
regs[RUSTRT_R14] = procedure.env as uint;
227228
regs[RUSTRT_R15] = fptr as uint;
228229

229-
// These registers are picked up by the regulard context switch paths. These
230+
// These registers are picked up by the regular context switch paths. These
230231
// will put us in "mostly the right context" except for frobbing all the
231232
// arguments to the right place. We have the small trampoline code inside of
232233
// rust_bootstrap_green_task to do that.

0 commit comments

Comments
 (0)