Skip to content

Commit e8ada6a

Browse files
committed
Auto merge of rust-lang#125313 - matthiaskrgr:rollup-65etxv0, r=matthiaskrgr
Rollup of 5 pull requests Successful merges: - rust-lang#125034 (Weekly `cargo update`) - rust-lang#125093 (Add `fn into_raw_with_allocator` to Rc/Arc/Weak.) - rust-lang#125282 (Never type unsafe lint improvements) - rust-lang#125301 (fix suggestion in E0373 for !Unpin coroutines) - rust-lang#125302 (defrost `RUST_MIN_STACK=ice rustc hello.rs`) r? `@ghost` `@rustbot` modify labels: rollup
2 parents f092f73 + ecbd110 commit e8ada6a

18 files changed

+445
-150
lines changed

Cargo.lock

+138-83
Large diffs are not rendered by default.

compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs

+4
Original file line numberDiff line numberDiff line change
@@ -3343,6 +3343,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
33433343
} else if string.starts_with("gen") {
33443344
// `gen` is 3 chars long
33453345
Some(3)
3346+
} else if string.starts_with("static") {
3347+
// `static` is 6 chars long
3348+
// This is used for `!Unpin` coroutines
3349+
Some(6)
33463350
} else {
33473351
None
33483352
};

compiler/rustc_hir_typeck/src/fallback.rs

+40-35
Original file line numberDiff line numberDiff line change
@@ -364,41 +364,11 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
364364
};
365365

366366
let mut fallback_to = |ty| {
367-
let unsafe_infer_vars = unsafe_infer_vars.get_or_init(|| {
368-
let unsafe_infer_vars = compute_unsafe_infer_vars(self.root_ctxt, self.body_id);
369-
debug!(?unsafe_infer_vars);
370-
unsafe_infer_vars
371-
});
372-
373-
let affected_unsafe_infer_vars =
374-
graph::depth_first_search_as_undirected(&coercion_graph, root_vid)
375-
.filter_map(|x| unsafe_infer_vars.get(&x).copied())
376-
.collect::<Vec<_>>();
377-
378-
for (hir_id, span, reason) in affected_unsafe_infer_vars {
379-
self.tcx.emit_node_span_lint(
380-
lint::builtin::NEVER_TYPE_FALLBACK_FLOWING_INTO_UNSAFE,
381-
hir_id,
382-
span,
383-
match reason {
384-
UnsafeUseReason::Call => {
385-
errors::NeverTypeFallbackFlowingIntoUnsafe::Call
386-
}
387-
UnsafeUseReason::Method => {
388-
errors::NeverTypeFallbackFlowingIntoUnsafe::Method
389-
}
390-
UnsafeUseReason::Path => {
391-
errors::NeverTypeFallbackFlowingIntoUnsafe::Path
392-
}
393-
UnsafeUseReason::UnionField => {
394-
errors::NeverTypeFallbackFlowingIntoUnsafe::UnionField
395-
}
396-
UnsafeUseReason::Deref => {
397-
errors::NeverTypeFallbackFlowingIntoUnsafe::Deref
398-
}
399-
},
400-
);
401-
}
367+
self.lint_never_type_fallback_flowing_into_unsafe_code(
368+
&unsafe_infer_vars,
369+
&coercion_graph,
370+
root_vid,
371+
);
402372

403373
diverging_fallback.insert(diverging_ty, ty);
404374
};
@@ -464,6 +434,41 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
464434
diverging_fallback
465435
}
466436

437+
fn lint_never_type_fallback_flowing_into_unsafe_code(
438+
&self,
439+
unsafe_infer_vars: &OnceCell<UnordMap<ty::TyVid, (HirId, Span, UnsafeUseReason)>>,
440+
coercion_graph: &VecGraph<ty::TyVid, true>,
441+
root_vid: ty::TyVid,
442+
) {
443+
let unsafe_infer_vars = unsafe_infer_vars.get_or_init(|| {
444+
let unsafe_infer_vars = compute_unsafe_infer_vars(self.root_ctxt, self.body_id);
445+
debug!(?unsafe_infer_vars);
446+
unsafe_infer_vars
447+
});
448+
449+
let affected_unsafe_infer_vars =
450+
graph::depth_first_search_as_undirected(&coercion_graph, root_vid)
451+
.filter_map(|x| unsafe_infer_vars.get(&x).copied())
452+
.collect::<Vec<_>>();
453+
454+
for (hir_id, span, reason) in affected_unsafe_infer_vars {
455+
self.tcx.emit_node_span_lint(
456+
lint::builtin::NEVER_TYPE_FALLBACK_FLOWING_INTO_UNSAFE,
457+
hir_id,
458+
span,
459+
match reason {
460+
UnsafeUseReason::Call => errors::NeverTypeFallbackFlowingIntoUnsafe::Call,
461+
UnsafeUseReason::Method => errors::NeverTypeFallbackFlowingIntoUnsafe::Method,
462+
UnsafeUseReason::Path => errors::NeverTypeFallbackFlowingIntoUnsafe::Path,
463+
UnsafeUseReason::UnionField => {
464+
errors::NeverTypeFallbackFlowingIntoUnsafe::UnionField
465+
}
466+
UnsafeUseReason::Deref => errors::NeverTypeFallbackFlowingIntoUnsafe::Deref,
467+
},
468+
);
469+
}
470+
}
471+
467472
/// Returns a graph whose nodes are (unresolved) inference variables and where
468473
/// an edge `?A -> ?B` indicates that the variable `?A` is coerced to `?B`.
469474
fn create_coercion_graph(&self) -> VecGraph<ty::TyVid, true> {

compiler/rustc_interface/src/interface.rs

+1
Original file line numberDiff line numberDiff line change
@@ -389,6 +389,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
389389
let hash_kind = config.opts.unstable_opts.src_hash_algorithm(&target);
390390

391391
util::run_in_thread_pool_with_globals(
392+
&early_dcx,
392393
config.opts.edition,
393394
config.opts.unstable_opts.threads,
394395
SourceMapInputs { file_loader, path_mapping, hash_kind },

compiler/rustc_interface/src/util.rs

+32-9
Original file line numberDiff line numberDiff line change
@@ -51,20 +51,38 @@ pub fn add_configuration(cfg: &mut Cfg, sess: &mut Session, codegen_backend: &dy
5151
pub static STACK_SIZE: OnceLock<usize> = OnceLock::new();
5252
pub const DEFAULT_STACK_SIZE: usize = 8 * 1024 * 1024;
5353

54-
fn init_stack_size() -> usize {
54+
fn init_stack_size(early_dcx: &EarlyDiagCtxt) -> usize {
5555
// Obey the environment setting or default
5656
*STACK_SIZE.get_or_init(|| {
5757
env::var_os("RUST_MIN_STACK")
58-
.map(|os_str| os_str.to_string_lossy().into_owned())
59-
// ignore if it is set to nothing
60-
.filter(|s| s.trim() != "")
61-
.map(|s| s.trim().parse::<usize>().unwrap())
58+
.as_ref()
59+
.map(|os_str| os_str.to_string_lossy())
60+
// if someone finds out `export RUST_MIN_STACK=640000` isn't enough stack
61+
// they might try to "unset" it by running `RUST_MIN_STACK= rustc code.rs`
62+
// this is wrong, but std would nonetheless "do what they mean", so let's do likewise
63+
.filter(|s| !s.trim().is_empty())
64+
// rustc is a batch program, so error early on inputs which are unlikely to be intended
65+
// so no one thinks we parsed them setting `RUST_MIN_STACK="64 megabytes"`
66+
// FIXME: we could accept `RUST_MIN_STACK=64MB`, perhaps?
67+
.map(|s| {
68+
let s = s.trim();
69+
// FIXME(workingjubilee): add proper diagnostics when we factor out "pre-run" setup
70+
#[allow(rustc::untranslatable_diagnostic, rustc::diagnostic_outside_of_impl)]
71+
s.parse::<usize>().unwrap_or_else(|_| {
72+
let mut err = early_dcx.early_struct_fatal(format!(
73+
r#"`RUST_MIN_STACK` should be a number of bytes, but was "{s}""#,
74+
));
75+
err.note("you can also unset `RUST_MIN_STACK` to use the default stack size");
76+
err.emit()
77+
})
78+
})
6279
// otherwise pick a consistent default
6380
.unwrap_or(DEFAULT_STACK_SIZE)
6481
})
6582
}
6683

6784
fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
85+
thread_stack_size: usize,
6886
edition: Edition,
6987
sm_inputs: SourceMapInputs,
7088
f: F,
@@ -75,7 +93,7 @@ fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
7593
// the parallel compiler, in particular to ensure there is no accidental
7694
// sharing of data between the main thread and the compilation thread
7795
// (which might cause problems for the parallel compiler).
78-
let builder = thread::Builder::new().name("rustc".to_string()).stack_size(init_stack_size());
96+
let builder = thread::Builder::new().name("rustc".to_string()).stack_size(thread_stack_size);
7997

8098
// We build the session globals and run `f` on the spawned thread, because
8199
// `SessionGlobals` does not impl `Send` in the non-parallel compiler.
@@ -100,16 +118,19 @@ fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
100118

101119
#[cfg(not(parallel_compiler))]
102120
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
121+
thread_builder_diag: &EarlyDiagCtxt,
103122
edition: Edition,
104123
_threads: usize,
105124
sm_inputs: SourceMapInputs,
106125
f: F,
107126
) -> R {
108-
run_in_thread_with_globals(edition, sm_inputs, f)
127+
let thread_stack_size = init_stack_size(thread_builder_diag);
128+
run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, f)
109129
}
110130

111131
#[cfg(parallel_compiler)]
112132
pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
133+
thread_builder_diag: &EarlyDiagCtxt,
113134
edition: Edition,
114135
threads: usize,
115136
sm_inputs: SourceMapInputs,
@@ -121,10 +142,12 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
121142
use rustc_query_system::query::{break_query_cycles, QueryContext};
122143
use std::process;
123144

145+
let thread_stack_size = init_stack_size(thread_builder_diag);
146+
124147
let registry = sync::Registry::new(std::num::NonZero::new(threads).unwrap());
125148

126149
if !sync::is_dyn_thread_safe() {
127-
return run_in_thread_with_globals(edition, sm_inputs, |current_gcx| {
150+
return run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, |current_gcx| {
128151
// Register the thread for use with the `WorkerLocal` type.
129152
registry.register();
130153

@@ -167,7 +190,7 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
167190
})
168191
.unwrap();
169192
})
170-
.stack_size(init_stack_size());
193+
.stack_size(thread_stack_size);
171194

172195
// We create the session globals on the main thread, then create the thread
173196
// pool. Upon creation, each worker thread created gets a copy of the

compiler/rustc_lint_defs/src/builtin.rs

+7-3
Original file line numberDiff line numberDiff line change
@@ -4263,8 +4263,7 @@ declare_lint! {
42634263
///
42644264
/// // where absurd is a function with the following signature
42654265
/// // (it's sound, because `!` always marks unreachable code):
4266-
/// fn absurd<T>(_: !) -> T { ... }
4267-
// FIXME: use `core::convert::absurd` here instead, once it's merged
4266+
/// fn absurd<T>(never: !) -> T { ... }
42684267
/// ```
42694268
///
42704269
/// While it's convenient to be able to use non-diverging code in one of the branches (like
@@ -4321,7 +4320,12 @@ declare_lint! {
43214320
/// [`()`]: https://doc.rust-lang.org/core/primitive.unit.html
43224321
pub NEVER_TYPE_FALLBACK_FLOWING_INTO_UNSAFE,
43234322
Warn,
4324-
"never type fallback affecting unsafe function calls"
4323+
"never type fallback affecting unsafe function calls",
4324+
@future_incompatible = FutureIncompatibleInfo {
4325+
reason: FutureIncompatibilityReason::FutureReleaseSemanticsChange,
4326+
reference: "issue #123748 <https://github.com/rust-lang/rust/issues/123748>",
4327+
};
4328+
report_in_external_macro
43254329
}
43264330

43274331
declare_lint! {

library/alloc/src/rc.rs

+40-10
Original file line numberDiff line numberDiff line change
@@ -1356,6 +1356,33 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
13561356
ptr
13571357
}
13581358

1359+
/// Consumes the `Rc`, returning the wrapped pointer and allocator.
1360+
///
1361+
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
1362+
/// [`Rc::from_raw_in`].
1363+
///
1364+
/// # Examples
1365+
///
1366+
/// ```
1367+
/// #![feature(allocator_api)]
1368+
/// use std::rc::Rc;
1369+
/// use std::alloc::System;
1370+
///
1371+
/// let x = Rc::new_in("hello".to_owned(), System);
1372+
/// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1373+
/// assert_eq!(unsafe { &*ptr }, "hello");
1374+
/// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1375+
/// assert_eq!(&*x, "hello");
1376+
/// ```
1377+
#[unstable(feature = "allocator_api", issue = "32838")]
1378+
pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1379+
let this = mem::ManuallyDrop::new(this);
1380+
let ptr = Self::as_ptr(&this);
1381+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1382+
let alloc = unsafe { ptr::read(&this.alloc) };
1383+
(ptr, alloc)
1384+
}
1385+
13591386
/// Provides a raw pointer to the data.
13601387
///
13611388
/// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
@@ -3024,39 +3051,42 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
30243051
result
30253052
}
30263053

3027-
/// Consumes the `Weak<T>` and turns it into a raw pointer.
3054+
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
30283055
///
30293056
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
30303057
/// one weak reference (the weak count is not modified by this operation). It can be turned
3031-
/// back into the `Weak<T>` with [`from_raw`].
3058+
/// back into the `Weak<T>` with [`from_raw_in`].
30323059
///
30333060
/// The same restrictions of accessing the target of the pointer as with
30343061
/// [`as_ptr`] apply.
30353062
///
30363063
/// # Examples
30373064
///
30383065
/// ```
3066+
/// #![feature(allocator_api)]
30393067
/// use std::rc::{Rc, Weak};
3068+
/// use std::alloc::System;
30403069
///
3041-
/// let strong = Rc::new("hello".to_owned());
3070+
/// let strong = Rc::new_in("hello".to_owned(), System);
30423071
/// let weak = Rc::downgrade(&strong);
3043-
/// let raw = weak.into_raw();
3072+
/// let (raw, alloc) = weak.into_raw_with_allocator();
30443073
///
30453074
/// assert_eq!(1, Rc::weak_count(&strong));
30463075
/// assert_eq!("hello", unsafe { &*raw });
30473076
///
3048-
/// drop(unsafe { Weak::from_raw(raw) });
3077+
/// drop(unsafe { Weak::from_raw_in(raw, alloc) });
30493078
/// assert_eq!(0, Rc::weak_count(&strong));
30503079
/// ```
30513080
///
3052-
/// [`from_raw`]: Weak::from_raw
3081+
/// [`from_raw_in`]: Weak::from_raw_in
30533082
/// [`as_ptr`]: Weak::as_ptr
30543083
#[inline]
30553084
#[unstable(feature = "allocator_api", issue = "32838")]
3056-
pub fn into_raw_and_alloc(self) -> (*const T, A) {
3057-
let rc = mem::ManuallyDrop::new(self);
3058-
let result = rc.as_ptr();
3059-
let alloc = unsafe { ptr::read(&rc.alloc) };
3085+
pub fn into_raw_with_allocator(self) -> (*const T, A) {
3086+
let this = mem::ManuallyDrop::new(self);
3087+
let result = this.as_ptr();
3088+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3089+
let alloc = unsafe { ptr::read(&this.alloc) };
30603090
(result, alloc)
30613091
}
30623092

library/alloc/src/sync.rs

+67
Original file line numberDiff line numberDiff line change
@@ -1496,6 +1496,34 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
14961496
ptr
14971497
}
14981498

1499+
/// Consumes the `Arc`, returning the wrapped pointer and allocator.
1500+
///
1501+
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
1502+
/// [`Arc::from_raw_in`].
1503+
///
1504+
/// # Examples
1505+
///
1506+
/// ```
1507+
/// #![feature(allocator_api)]
1508+
/// use std::sync::Arc;
1509+
/// use std::alloc::System;
1510+
///
1511+
/// let x = Arc::new_in("hello".to_owned(), System);
1512+
/// let (ptr, alloc) = Arc::into_raw_with_allocator(x);
1513+
/// assert_eq!(unsafe { &*ptr }, "hello");
1514+
/// let x = unsafe { Arc::from_raw_in(ptr, alloc) };
1515+
/// assert_eq!(&*x, "hello");
1516+
/// ```
1517+
#[must_use = "losing the pointer will leak memory"]
1518+
#[unstable(feature = "allocator_api", issue = "32838")]
1519+
pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1520+
let this = mem::ManuallyDrop::new(this);
1521+
let ptr = Self::as_ptr(&this);
1522+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1523+
let alloc = unsafe { ptr::read(&this.alloc) };
1524+
(ptr, alloc)
1525+
}
1526+
14991527
/// Provides a raw pointer to the data.
15001528
///
15011529
/// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
@@ -2740,6 +2768,45 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> {
27402768
result
27412769
}
27422770

2771+
/// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
2772+
///
2773+
/// This converts the weak pointer into a raw pointer, while still preserving the ownership of
2774+
/// one weak reference (the weak count is not modified by this operation). It can be turned
2775+
/// back into the `Weak<T>` with [`from_raw_in`].
2776+
///
2777+
/// The same restrictions of accessing the target of the pointer as with
2778+
/// [`as_ptr`] apply.
2779+
///
2780+
/// # Examples
2781+
///
2782+
/// ```
2783+
/// #![feature(allocator_api)]
2784+
/// use std::sync::{Arc, Weak};
2785+
/// use std::alloc::System;
2786+
///
2787+
/// let strong = Arc::new_in("hello".to_owned(), System);
2788+
/// let weak = Arc::downgrade(&strong);
2789+
/// let (raw, alloc) = weak.into_raw_with_allocator();
2790+
///
2791+
/// assert_eq!(1, Arc::weak_count(&strong));
2792+
/// assert_eq!("hello", unsafe { &*raw });
2793+
///
2794+
/// drop(unsafe { Weak::from_raw_in(raw, alloc) });
2795+
/// assert_eq!(0, Arc::weak_count(&strong));
2796+
/// ```
2797+
///
2798+
/// [`from_raw_in`]: Weak::from_raw_in
2799+
/// [`as_ptr`]: Weak::as_ptr
2800+
#[must_use = "losing the pointer will leak memory"]
2801+
#[unstable(feature = "allocator_api", issue = "32838")]
2802+
pub fn into_raw_with_allocator(self) -> (*const T, A) {
2803+
let this = mem::ManuallyDrop::new(self);
2804+
let result = this.as_ptr();
2805+
// Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
2806+
let alloc = unsafe { ptr::read(&this.alloc) };
2807+
(result, alloc)
2808+
}
2809+
27432810
/// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>` in the provided
27442811
/// allocator.
27452812
///

0 commit comments

Comments
 (0)