Skip to content

Commit 19d0b53

Browse files
committed
Auto merge of #51263 - cramertj:futures-in-core, r=aturon
Add Future and task system to the standard library This adds preliminary versions of the `std::future` and `std::task` modules in order to unblock development of async/await (#50547). These shouldn't be considered as final forms of these libraries-- design questions about the libraries should be left on rust-lang/rfcs#2418. Once that RFC (or a successor) is merged, these APIs will be adjusted as necessary. r? @aturon
2 parents cb8ab33 + a6055c8 commit 19d0b53

File tree

8 files changed

+961
-0
lines changed

8 files changed

+961
-0
lines changed

src/liballoc/boxed.rs

+93
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,14 @@ use core::any::Any;
5959
use core::borrow;
6060
use core::cmp::Ordering;
6161
use core::fmt;
62+
use core::future::Future;
6263
use core::hash::{Hash, Hasher};
6364
use core::iter::FusedIterator;
6465
use core::marker::{Unpin, Unsize};
6566
use core::mem::{self, PinMut};
6667
use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState};
6768
use core::ptr::{self, NonNull, Unique};
69+
use core::task::{Context, Poll, UnsafePoll, TaskObj};
6870
use core::convert::From;
6971

7072
use raw_vec::RawVec;
@@ -755,6 +757,7 @@ impl<T> Generator for Box<T>
755757
/// A pinned, heap allocated reference.
756758
#[unstable(feature = "pin", issue = "49150")]
757759
#[fundamental]
760+
#[repr(transparent)]
758761
pub struct PinBox<T: ?Sized> {
759762
inner: Box<T>,
760763
}
@@ -771,14 +774,72 @@ impl<T> PinBox<T> {
771774
#[unstable(feature = "pin", issue = "49150")]
772775
impl<T: ?Sized> PinBox<T> {
773776
/// Get a pinned reference to the data in this PinBox.
777+
#[inline]
774778
pub fn as_pin_mut<'a>(&'a mut self) -> PinMut<'a, T> {
775779
unsafe { PinMut::new_unchecked(&mut *self.inner) }
776780
}
777781

782+
/// Constructs a `PinBox` from a raw pointer.
783+
///
784+
/// After calling this function, the raw pointer is owned by the
785+
/// resulting `PinBox`. Specifically, the `PinBox` destructor will call
786+
/// the destructor of `T` and free the allocated memory. Since the
787+
/// way `PinBox` allocates and releases memory is unspecified, the
788+
/// only valid pointer to pass to this function is the one taken
789+
/// from another `PinBox` via the [`PinBox::into_raw`] function.
790+
///
791+
/// This function is unsafe because improper use may lead to
792+
/// memory problems. For example, a double-free may occur if the
793+
/// function is called twice on the same raw pointer.
794+
///
795+
/// [`PinBox::into_raw`]: struct.PinBox.html#method.into_raw
796+
///
797+
/// # Examples
798+
///
799+
/// ```
800+
/// #![feature(pin)]
801+
/// use std::boxed::PinBox;
802+
/// let x = PinBox::new(5);
803+
/// let ptr = PinBox::into_raw(x);
804+
/// let x = unsafe { PinBox::from_raw(ptr) };
805+
/// ```
806+
#[inline]
807+
pub unsafe fn from_raw(raw: *mut T) -> Self {
808+
PinBox { inner: Box::from_raw(raw) }
809+
}
810+
811+
/// Consumes the `PinBox`, returning the wrapped raw pointer.
812+
///
813+
/// After calling this function, the caller is responsible for the
814+
/// memory previously managed by the `PinBox`. In particular, the
815+
/// caller should properly destroy `T` and release the memory. The
816+
/// proper way to do so is to convert the raw pointer back into a
817+
/// `PinBox` with the [`PinBox::from_raw`] function.
818+
///
819+
/// Note: this is an associated function, which means that you have
820+
/// to call it as `PinBox::into_raw(b)` instead of `b.into_raw()`. This
821+
/// is so that there is no conflict with a method on the inner type.
822+
///
823+
/// [`PinBox::from_raw`]: struct.PinBox.html#method.from_raw
824+
///
825+
/// # Examples
826+
///
827+
/// ```
828+
/// #![feature(pin)]
829+
/// use std::boxed::PinBox;
830+
/// let x = PinBox::new(5);
831+
/// let ptr = PinBox::into_raw(x);
832+
/// ```
833+
#[inline]
834+
pub fn into_raw(b: PinBox<T>) -> *mut T {
835+
Box::into_raw(b.inner)
836+
}
837+
778838
/// Get a mutable reference to the data inside this PinBox.
779839
///
780840
/// This function is unsafe. Users must guarantee that the data is never
781841
/// moved out of this reference.
842+
#[inline]
782843
pub unsafe fn get_mut<'a>(this: &'a mut PinBox<T>) -> &'a mut T {
783844
&mut *this.inner
784845
}
@@ -787,6 +848,7 @@ impl<T: ?Sized> PinBox<T> {
787848
///
788849
/// This function is unsafe. Users must guarantee that the data is never
789850
/// moved out of the box.
851+
#[inline]
790852
pub unsafe fn unpin(this: PinBox<T>) -> Box<T> {
791853
this.inner
792854
}
@@ -851,3 +913,34 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<PinBox<U>> for PinBox<T> {}
851913

852914
#[unstable(feature = "pin", issue = "49150")]
853915
impl<T: ?Sized> Unpin for PinBox<T> {}
916+
917+
#[unstable(feature = "futures_api", issue = "50547")]
918+
unsafe impl<F: Future<Output = ()> + Send + 'static> UnsafePoll for PinBox<F> {
919+
fn into_raw(self) -> *mut () {
920+
PinBox::into_raw(self) as *mut ()
921+
}
922+
923+
unsafe fn poll(task: *mut (), cx: &mut Context) -> Poll<()> {
924+
let ptr = task as *mut F;
925+
let pin: PinMut<F> = PinMut::new_unchecked(&mut *ptr);
926+
pin.poll(cx)
927+
}
928+
929+
unsafe fn drop(task: *mut ()) {
930+
drop(PinBox::from_raw(task as *mut F))
931+
}
932+
}
933+
934+
#[unstable(feature = "futures_api", issue = "50547")]
935+
impl<F: Future<Output = ()> + Send + 'static> From<PinBox<F>> for TaskObj {
936+
fn from(boxed: PinBox<F>) -> Self {
937+
TaskObj::from_poll_task(boxed)
938+
}
939+
}
940+
941+
#[unstable(feature = "futures_api", issue = "50547")]
942+
impl<F: Future<Output = ()> + Send + 'static> From<Box<F>> for TaskObj {
943+
fn from(boxed: Box<F>) -> Self {
944+
TaskObj::from_poll_task(PinBox::from(boxed))
945+
}
946+
}

src/liballoc/lib.rs

+6
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@
9595
#![feature(fmt_internals)]
9696
#![feature(from_ref)]
9797
#![feature(fundamental)]
98+
#![feature(futures_api)]
9899
#![feature(lang_items)]
99100
#![feature(libc)]
100101
#![feature(needs_allocator)]
@@ -103,6 +104,7 @@
103104
#![feature(pin)]
104105
#![feature(ptr_internals)]
105106
#![feature(ptr_offset_from)]
107+
#![feature(repr_transparent)]
106108
#![feature(rustc_attrs)]
107109
#![feature(specialization)]
108110
#![feature(staged_api)]
@@ -155,6 +157,10 @@ pub mod heap {
155157
pub use alloc::*;
156158
}
157159

160+
#[unstable(feature = "futures_api",
161+
reason = "futures in libcore are unstable",
162+
issue = "50547")]
163+
pub mod task;
158164

159165
// Primitive types using the heaps above
160166

src/liballoc/task.rs

+140
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,140 @@
1+
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
//! Types and Traits for working with asynchronous tasks.
12+
13+
pub use core::task::*;
14+
15+
#[cfg(target_has_atomic = "ptr")]
16+
pub use self::if_arc::*;
17+
18+
#[cfg(target_has_atomic = "ptr")]
19+
mod if_arc {
20+
use super::*;
21+
use arc::Arc;
22+
use core::marker::PhantomData;
23+
use core::mem;
24+
use core::ptr::{self, NonNull};
25+
26+
/// A way of waking up a specific task.
27+
///
28+
/// Any task executor must provide a way of signaling that a task it owns
29+
/// is ready to be `poll`ed again. Executors do so by implementing this trait.
30+
pub trait Wake: Send + Sync {
31+
/// Indicates that the associated task is ready to make progress and should
32+
/// be `poll`ed.
33+
///
34+
/// Executors generally maintain a queue of "ready" tasks; `wake` should place
35+
/// the associated task onto this queue.
36+
fn wake(arc_self: &Arc<Self>);
37+
38+
/// Indicates that the associated task is ready to make progress and should
39+
/// be `poll`ed. This function is like `wake`, but can only be called from the
40+
/// thread on which this `Wake` was created.
41+
///
42+
/// Executors generally maintain a queue of "ready" tasks; `wake_local` should place
43+
/// the associated task onto this queue.
44+
#[inline]
45+
unsafe fn wake_local(arc_self: &Arc<Self>) {
46+
Self::wake(arc_self);
47+
}
48+
}
49+
50+
#[cfg(target_has_atomic = "ptr")]
51+
struct ArcWrapped<T>(PhantomData<T>);
52+
53+
unsafe impl<T: Wake + 'static> UnsafeWake for ArcWrapped<T> {
54+
#[inline]
55+
unsafe fn clone_raw(&self) -> Waker {
56+
let me: *const ArcWrapped<T> = self;
57+
let arc = (*(&me as *const *const ArcWrapped<T> as *const Arc<T>)).clone();
58+
Waker::from(arc)
59+
}
60+
61+
#[inline]
62+
unsafe fn drop_raw(&self) {
63+
let mut me: *const ArcWrapped<T> = self;
64+
let me = &mut me as *mut *const ArcWrapped<T> as *mut Arc<T>;
65+
ptr::drop_in_place(me);
66+
}
67+
68+
#[inline]
69+
unsafe fn wake(&self) {
70+
let me: *const ArcWrapped<T> = self;
71+
T::wake(&*(&me as *const *const ArcWrapped<T> as *const Arc<T>))
72+
}
73+
74+
#[inline]
75+
unsafe fn wake_local(&self) {
76+
let me: *const ArcWrapped<T> = self;
77+
T::wake_local(&*(&me as *const *const ArcWrapped<T> as *const Arc<T>))
78+
}
79+
}
80+
81+
impl<T> From<Arc<T>> for Waker
82+
where T: Wake + 'static,
83+
{
84+
fn from(rc: Arc<T>) -> Self {
85+
unsafe {
86+
let ptr = mem::transmute::<Arc<T>, NonNull<ArcWrapped<T>>>(rc);
87+
Waker::new(ptr)
88+
}
89+
}
90+
}
91+
92+
/// Creates a `LocalWaker` from a local `wake`.
93+
///
94+
/// This function requires that `wake` is "local" (created on the current thread).
95+
/// The resulting `LocalWaker` will call `wake.wake_local()` when awoken, and
96+
/// will call `wake.wake()` if awoken after being converted to a `Waker`.
97+
#[inline]
98+
pub unsafe fn local_waker<W: Wake + 'static>(wake: Arc<W>) -> LocalWaker {
99+
let ptr = mem::transmute::<Arc<W>, NonNull<ArcWrapped<W>>>(wake);
100+
LocalWaker::new(ptr)
101+
}
102+
103+
struct NonLocalAsLocal<T>(ArcWrapped<T>);
104+
105+
unsafe impl<T: Wake + 'static> UnsafeWake for NonLocalAsLocal<T> {
106+
#[inline]
107+
unsafe fn clone_raw(&self) -> Waker {
108+
self.0.clone_raw()
109+
}
110+
111+
#[inline]
112+
unsafe fn drop_raw(&self) {
113+
self.0.drop_raw()
114+
}
115+
116+
#[inline]
117+
unsafe fn wake(&self) {
118+
self.0.wake()
119+
}
120+
121+
#[inline]
122+
unsafe fn wake_local(&self) {
123+
// Since we're nonlocal, we can't call wake_local
124+
self.0.wake()
125+
}
126+
}
127+
128+
/// Creates a `LocalWaker` from a non-local `wake`.
129+
///
130+
/// This function is similar to `local_waker`, but does not require that `wake`
131+
/// is local to the current thread. The resulting `LocalWaker` will call
132+
/// `wake.wake()` when awoken.
133+
#[inline]
134+
pub fn local_waker_from_nonlocal<W: Wake + 'static>(wake: Arc<W>) -> LocalWaker {
135+
unsafe {
136+
let ptr = mem::transmute::<Arc<W>, NonNull<NonLocalAsLocal<W>>>(wake);
137+
LocalWaker::new(ptr)
138+
}
139+
}
140+
}

0 commit comments

Comments
 (0)